{"created":"2020-09-01T15:25:23.968795+00:00","id":4868,"links":{},"metadata":{"_buckets":{"deposit":"8ac7a7a1-af3a-423a-804f-8cf31de4937b"},"_deposit":{"id":"4868","owners":[],"pid":{"revision_id":0,"type":"recid","value":"4868"},"status":"published"},"_oai":{"id":"oai:meral.edu.mm:recid/4868","sets":["1582963302567:1597824273898"]},"communities":["ucsy"],"item_1583103067471":{"attribute_name":"Title","attribute_value_mlt":[{"subitem_1551255647225":"Efficient Action Recognition based on Salient Object Detection","subitem_1551255648112":"en"}]},"item_1583103085720":{"attribute_name":"Description","attribute_value_mlt":[{"interim":"Action recognition has become an importantresearch topic in the computer vision area. Thispaper presents an efficient action recognitionapproach based on salient object detection. Recently,many features were directly extracted from videoframes; as a result, unsatisfying results wereproduced due to intrinsic textural difference betweenforeground and background. Instead of wholeframes, processing only on salient objects suppressesthe interference of background pixels and also makesthe algorithm to be more efficient. So, the maincontribution of this paper is to focus on salient objectdetection to reflect textural difference. Firstly, salientforeground objects are detected in video frames andonly interest features for such objects are detected.Secondly, we extract features using SURF featuredetector and HOG feature descriptor. Finally, we useKNN classifier for achieving better actionrecognition accuracy. Experiments performed onUCF-Sports action dataset show that our proposedapproach outperforms state-of-the-art actionrecognition methods."}]},"item_1583103108160":{"attribute_name":"Keywords","attribute_value":[]},"item_1583103120197":{"attribute_name":"Files","attribute_type":"file","attribute_value_mlt":[{"accessrole":"open_access","date":[{"dateType":"Available","dateValue":"2019-07-11"}],"displaytype":"preview","filename":"proceeding_total-pages-281-287.pdf","filesize":[{"value":"3399 Kb"}],"format":"application/pdf","licensetype":"license_note","mimetype":"application/pdf","url":{"url":"https://meral.edu.mm/record/4868/files/proceeding_total-pages-281-287.pdf"},"version_id":"44c2b3db-d044-441d-b322-5c67a17128b9"}]},"item_1583103131163":{"attribute_name":"Journal articles","attribute_value_mlt":[{"subitem_issue":"","subitem_journal_title":"Fifteenth International Conference on Computer Applications(ICCA 2017)","subitem_pages":"","subitem_volume":""}]},"item_1583103147082":{"attribute_name":"Conference papers","attribute_value_mlt":[{"subitem_acronym":"","subitem_c_date":"","subitem_conference_title":"","subitem_part":"","subitem_place":"","subitem_session":"","subitem_website":""}]},"item_1583103211336":{"attribute_name":"Books/reports/chapters","attribute_value_mlt":[{"subitem_book_title":"","subitem_isbn":"","subitem_pages":"","subitem_place":"","subitem_publisher":""}]},"item_1583103233624":{"attribute_name":"Thesis/dissertations","attribute_value_mlt":[{"subitem_awarding_university":"","subitem_supervisor(s)":[{"subitem_supervisor":""}]}]},"item_1583105942107":{"attribute_name":"Authors","attribute_value_mlt":[{"subitem_authors":[{"subitem_authors_fullname":"Aye, Hnin Mya"},{"subitem_authors_fullname":"Zaw, Sai Maung Maung"}]}]},"item_1583108359239":{"attribute_name":"Upload type","attribute_value_mlt":[{"interim":"Publication"}]},"item_1583108428133":{"attribute_name":"Publication type","attribute_value_mlt":[{"interim":"Article"}]},"item_1583159729339":{"attribute_name":"Publication date","attribute_value":"2017-02-16"},"item_1583159847033":{"attribute_name":"Identifier","attribute_value":"http://onlineresource.ucsy.edu.mm/handle/123456789/764"},"item_title":"Efficient Action Recognition based on Salient Object Detection","item_type_id":"21","owner":"1","path":["1597824273898"],"publish_date":"2019-07-11","publish_status":"0","recid":"4868","relation_version_is_last":true,"title":["Efficient Action Recognition based on Salient Object Detection"],"weko_creator_id":"1","weko_shared_id":-1},"updated":"2022-03-24T23:13:26.557856+00:00"}