{"created":"2023-08-02T03:58:04.402763+00:00","id":7210,"links":{},"metadata":{"_buckets":{"deposit":"e93ad459-cc28-4169-a5e6-d6a0013db52e"},"_deposit":{"created_by":10,"id":"7210","owners":[10],"pid":{"revision_id":0,"type":"depid","value":"7210"},"status":"published"},"_oai":{"id":"oai:repository.lib.tottori-u.ac.jp:00007210","sets":["1:10","2:12"]},"author_link":["4786","4671","26721"],"item_10001_biblio_info_7":{"attribute_name":"書誌情報","attribute_value_mlt":[{"bibliographicIssueDates":{"bibliographicIssueDate":"2022-02","bibliographicIssueDateType":"Issued"},"bibliographicIssueNumber":"2","bibliographicPageEnd":"426","bibliographicPageStart":"415","bibliographicVolumeNumber":"E105-D","bibliographic_titles":[{"bibliographic_title":"IEICE TRANSACTIONS ON INFORMATION AND SYSTEMS"},{"bibliographic_title":"IEICE TRANSACTIONS ON INFORMATION AND SYSTEMS","bibliographic_titleLang":"en"}]}]},"item_10001_description_5":{"attribute_name":"抄録","attribute_value_mlt":[{"subitem_description":"We propose an attention mechanism in deep learning networks for gender recognition using the gaze distribution of human observers when they judge the gender of people in pedestrian images. Prevalent attention mechanisms spatially compute the correlation among values of all cells in an input feature map to calculate attention weights. If a large bias in the background of pedestrian images (e.g., test samples and training samples containing different backgrounds) is present, the attention weights learned using the prevalent attention mechanisms are affected by the bias, which in turn reduces the accuracy of gender recognition. To avoid this problem, we incorporate an attention mechanism called gaze-guided self-attention (GSA) that is inspired by human visual attention. Our method assigns spatially suitable attention weights to each input feature map using the gaze distribution of human observers. In particular, GSA yields promising results even when using training samples with the background bias. The results of experiments on publicly available datasets confirm that our GSA, using the gaze distribution, is more accurate in gender recognition than currently available attention-based methods in the case of background bias between training and test samples.","subitem_description_type":"Other"}]},"item_10001_publisher_8":{"attribute_name":"出版者","attribute_value_mlt":[{"subitem_publisher":"IEICE"}]},"item_10001_relation_14":{"attribute_name":"DOI","attribute_value_mlt":[{"subitem_relation_type":"isIdenticalTo","subitem_relation_type_id":{"subitem_relation_type_id_text":"10.1587/transinf.2021edp7117","subitem_relation_type_select":"DOI"}}]},"item_10001_relation_16":{"attribute_name":"情報源","attribute_value_mlt":[{"subitem_relation_name":[{"subitem_relation_name_text":"Nishiyama Masashi, Inoue Michiko, Iwai Yoshio, et al. Gender Recognition Using a Gaze-Guided Self-Attention Mechanism Robust Against Background Bias in Training Samples. IEICE TRANSACTIONS ON INFORMATION AND SYSTEMS. 2022. E105D(2). 415-426. doi:10.1587/t"}]}]},"item_10001_relation_17":{"attribute_name":"関連サイト","attribute_value_mlt":[{"subitem_relation_name":[{"subitem_relation_name_text":"https://doi.org/10.1587/transinf.2021EDP7117"}],"subitem_relation_type_id":{"subitem_relation_type_id_text":"https://doi.org/10.1587/transinf.2021EDP7117","subitem_relation_type_select":"DOI"}}]},"item_10001_rights_15":{"attribute_name":"権利","attribute_value_mlt":[{"subitem_rights":"(C) 2022 The Institute of Electronics, Information and Communication Engineers"}]},"item_10001_source_id_9":{"attribute_name":"ISSN","attribute_value_mlt":[{"subitem_source_identifier":"17451361","subitem_source_identifier_type":"ISSN"}]},"item_10001_text_33":{"attribute_name":"著者所属(英)","attribute_value_mlt":[{"subitem_text_language":"en","subitem_text_value":"Graduate School of Engineering, Tottori University"},{"subitem_text_language":"en","subitem_text_value":"Graduate School of Engineering, Tottori University"},{"subitem_text_language":"en","subitem_text_value":"Graduate School of Engineering, Tottori University"}]},"item_10001_version_type_20":{"attribute_name":"著者版フラグ","attribute_value_mlt":[{"subitem_version_resource":"http://purl.org/coar/version/c_970fb48d4fbd8a85","subitem_version_type":"VoR"}]},"item_creator":{"attribute_name":"著者","attribute_type":"creator","attribute_value_mlt":[{"creatorNames":[{"creatorName":"西山, 正志"},{"creatorName":"ニシヤマ, マサシ","creatorNameLang":"ja-Kana"},{"creatorName":"Nishiyama, Masashi","creatorNameLang":"en"}],"nameIdentifiers":[{"nameIdentifier":"4786","nameIdentifierScheme":"WEKO"},{"nameIdentifier":"20756449","nameIdentifierScheme":"e-Rad","nameIdentifierURI":"https://kaken.nii.ac.jp/ja/search/?qm=20756449"},{"nameIdentifier":"100001512","nameIdentifierScheme":"研究者総覧鳥取大学","nameIdentifierURI":"http://researchers.adm.tottori-u.ac.jp/html/100001512_ja.html"}]},{"creatorNames":[{"creatorName":"岩井, 儀雄"},{"creatorName":"イワイ, ヨシオ","creatorNameLang":"ja-Kana"},{"creatorName":"Iwai, Yoshio","creatorNameLang":"en"}],"nameIdentifiers":[{"nameIdentifier":"4671","nameIdentifierScheme":"WEKO"},{"nameIdentifier":"70294163","nameIdentifierScheme":"e-Rad","nameIdentifierURI":"https://kaken.nii.ac.jp/ja/search/?qm=70294163"},{"nameIdentifier":"100000470","nameIdentifierScheme":"研究者総覧鳥取大学","nameIdentifierURI":"http://researchers.adm.tottori-u.ac.jp/html/100000470_ja.html"}]},{"creatorNames":[{"creatorName":"Inoue, Michiko","creatorNameLang":"en"}],"nameIdentifiers":[{"nameIdentifier":"26721","nameIdentifierScheme":"WEKO"}]}]},"item_files":{"attribute_name":"ファイル情報","attribute_type":"file","attribute_value_mlt":[{"accessrole":"open_date","date":[{"dateType":"Available","dateValue":"2023-03-17"}],"displaytype":"detail","filename":"ieicetise105-d(2)_415.pdf","filesize":[{"value":"2.0 MB"}],"format":"application/pdf","licensefree":"(C) 2022 The Institute of Electronics, Information and Communication Engineers","licensetype":"license_note","mimetype":"application/pdf","url":{"label":"ieicetise105-d(2)_415.pdf","url":"https://repository.lib.tottori-u.ac.jp/record/7210/files/ieicetise105-d(2)_415.pdf"},"version_id":"2cc628f0-ca6f-4d5f-ad17-35cfa831da41"}]},"item_keyword":{"attribute_name":"キーワード","attribute_value_mlt":[{"subitem_subject":"gaze distribution","subitem_subject_scheme":"Other"},{"subitem_subject":"attention mechanism","subitem_subject_scheme":"Other"},{"subitem_subject":"convolutional neural network","subitem_subject_scheme":"Other"},{"subitem_subject":"gender recognition","subitem_subject_scheme":"Other"},{"subitem_subject":"self-attention","subitem_subject_scheme":"Other"},{"subitem_subject":"gaze distribution","subitem_subject_language":"en","subitem_subject_scheme":"Other"},{"subitem_subject":"attention mechanism","subitem_subject_language":"en","subitem_subject_scheme":"Other"},{"subitem_subject":"convolutional neural network","subitem_subject_language":"en","subitem_subject_scheme":"Other"},{"subitem_subject":"gender recognition","subitem_subject_language":"en","subitem_subject_scheme":"Other"},{"subitem_subject":"self-attention","subitem_subject_language":"en","subitem_subject_scheme":"Other"}]},"item_language":{"attribute_name":"言語","attribute_value_mlt":[{"subitem_language":"eng"}]},"item_resource_type":{"attribute_name":"資源タイプ","attribute_value_mlt":[{"resourcetype":"journal article","resourceuri":"http://purl.org/coar/resource_type/c_6501"}]},"item_title":"Gender Recognition Using a Gaze-Guided Self-Attention Mechanism Robust Against Background Bias in Training Samples","item_titles":{"attribute_name":"タイトル","attribute_value_mlt":[{"subitem_title":"Gender Recognition Using a Gaze-Guided Self-Attention Mechanism Robust Against Background Bias in Training Samples","subitem_title_language":"en"}]},"item_type_id":"10001","owner":"10","path":["12","10"],"pubdate":{"attribute_name":"PubDate","attribute_value":"2022-07-13"},"publish_date":"2022-07-13","publish_status":"0","recid":"7210","relation_version_is_last":true,"title":["Gender Recognition Using a Gaze-Guided Self-Attention Mechanism Robust Against Background Bias in Training Samples"],"weko_creator_id":"10","weko_shared_id":-1},"updated":"2023-09-29T00:39:26.167925+00:00"}