<?xml version="1.0" encoding="utf-8" standalone="no"?>
<dublin_core schema="dc">
<dcvalue element="contributor" qualifier="author">Heo,&#x20;Seokhyeon</dcvalue>
<dcvalue element="contributor" qualifier="author">Cho,&#x20;Youngdae</dcvalue>
<dcvalue element="contributor" qualifier="author">Park,&#x20;Jeongwoo</dcvalue>
<dcvalue element="contributor" qualifier="author">Cho,&#x20;Seokhyun</dcvalue>
<dcvalue element="contributor" qualifier="author">Tsoy,&#x20;Ziya</dcvalue>
<dcvalue element="contributor" qualifier="author">Lim,&#x20;Hwasup</dcvalue>
<dcvalue element="contributor" qualifier="author">Cha,&#x20;Youngwoon</dcvalue>
<dcvalue element="date" qualifier="accessioned">2024-10-26T15:00:19Z</dcvalue>
<dcvalue element="date" qualifier="available">2024-10-26T15:00:19Z</dcvalue>
<dcvalue element="date" qualifier="created">2024-10-25</dcvalue>
<dcvalue element="date" qualifier="issued">2024-10</dcvalue>
<dcvalue element="identifier" qualifier="uri">https:&#x2F;&#x2F;pubs.kist.re.kr&#x2F;handle&#x2F;201004&#x2F;150881</dcvalue>
<dcvalue element="description" qualifier="abstract">We&#x20;present&#x20;a&#x20;novel&#x20;dataset&#x20;for&#x20;humanoid&#x20;robot&#x20;pose&#x20;estimation&#x20;from&#x20;images,&#x20;addressing&#x20;the&#x20;critical&#x20;need&#x20;for&#x20;accurate&#x20;pose&#x20;estimation&#x20;to&#x20;enhance&#x20;human-robot&#x20;interaction&#x20;in&#x20;extended&#x20;reality&#x20;(XR)&#x20;applications.&#x20;Despite&#x20;the&#x20;importance&#x20;of&#x20;this&#x20;task,&#x20;large-scale&#x20;pose&#x20;datasets&#x20;for&#x20;diverse&#x20;humanoid&#x20;robots&#x20;remain&#x20;scarce.&#x20;To&#x20;overcome&#x20;this&#x20;limitation,&#x20;we&#x20;collected&#x20;sparse&#x20;pose&#x20;datasets&#x20;for&#x20;commercially&#x20;available&#x20;humanoid&#x20;robots&#x20;and&#x20;augmented&#x20;them&#x20;through&#x20;various&#x20;synthetic&#x20;data&#x20;generation&#x20;techniques,&#x20;including&#x20;AI-assisted&#x20;image&#x20;synthesis,&#x20;foreground&#x20;removal,&#x20;and&#x20;3D&#x20;character&#x20;simulations.&#x20;Our&#x20;dataset&#x20;is&#x20;the&#x20;first&#x20;to&#x20;provide&#x20;full-body&#x20;pose&#x20;annotations&#x20;for&#x20;a&#x20;wide&#x20;range&#x20;of&#x20;humanoid&#x20;robots&#x20;exhibiting&#x20;diverse&#x20;motions,&#x20;including&#x20;side&#x20;and&#x20;back&#x20;movements,&#x20;in&#x20;real-world&#x20;scenarios.&#x20;Furthermore,&#x20;we&#x20;introduce&#x20;a&#x20;new&#x20;benchmark&#x20;method&#x20;for&#x20;real-time&#x20;full-body&#x20;2D&#x20;keypoint&#x20;estimation&#x20;from&#x20;a&#x20;single&#x20;image.&#x20;Extensive&#x20;experiments&#x20;demonstrate&#x20;that&#x20;our&#x20;extended&#x20;dataset-based&#x20;pose&#x20;estimation&#x20;approach&#x20;achieves&#x20;over&#x20;33.9%&#x20;improvement&#x20;in&#x20;accuracy&#x20;compared&#x20;to&#x20;using&#x20;only&#x20;sparse&#x20;datasets.&#x20;Additionally,&#x20;our&#x20;method&#x20;demonstrates&#x20;the&#x20;real-time&#x20;capability&#x20;of&#x20;42&#x20;frames&#x20;per&#x20;second&#x20;(FPS)&#x20;and&#x20;maintains&#x20;full-body&#x20;pose&#x20;estimation&#x20;consistency&#x20;in&#x20;side&#x20;and&#x20;back&#x20;motions&#x20;across&#x20;11&#x20;differently&#x20;shaped&#x20;humanoid&#x20;robots,&#x20;utilizing&#x20;approximately&#x20;350&#x20;training&#x20;images&#x20;per&#x20;robot.</dcvalue>
<dcvalue element="language" qualifier="none">English</dcvalue>
<dcvalue element="publisher" qualifier="none">MDPI</dcvalue>
<dcvalue element="title" qualifier="none">Diverse&#x20;Humanoid&#x20;Robot&#x20;Pose&#x20;Estimation&#x20;from&#x20;Images&#x20;Using&#x20;Only&#x20;Sparse&#x20;Datasets</dcvalue>
<dcvalue element="type" qualifier="none">Article</dcvalue>
<dcvalue element="identifier" qualifier="doi">10.3390&#x2F;app14199042</dcvalue>
<dcvalue element="description" qualifier="journalClass">1</dcvalue>
<dcvalue element="identifier" qualifier="bibliographicCitation">Applied&#x20;Sciences-basel,&#x20;v.14,&#x20;no.19</dcvalue>
<dcvalue element="citation" qualifier="title">Applied&#x20;Sciences-basel</dcvalue>
<dcvalue element="citation" qualifier="volume">14</dcvalue>
<dcvalue element="citation" qualifier="number">19</dcvalue>
<dcvalue element="description" qualifier="isOpenAccess">Y</dcvalue>
<dcvalue element="description" qualifier="journalRegisteredClass">scie</dcvalue>
<dcvalue element="description" qualifier="journalRegisteredClass">scopus</dcvalue>
<dcvalue element="identifier" qualifier="wosid">001332194900001</dcvalue>
<dcvalue element="identifier" qualifier="scopusid">2-s2.0-85206562722</dcvalue>
<dcvalue element="relation" qualifier="journalWebOfScienceCategory">Chemistry,&#x20;Multidisciplinary</dcvalue>
<dcvalue element="relation" qualifier="journalWebOfScienceCategory">Engineering,&#x20;Multidisciplinary</dcvalue>
<dcvalue element="relation" qualifier="journalWebOfScienceCategory">Materials&#x20;Science,&#x20;Multidisciplinary</dcvalue>
<dcvalue element="relation" qualifier="journalWebOfScienceCategory">Physics,&#x20;Applied</dcvalue>
<dcvalue element="relation" qualifier="journalResearchArea">Chemistry</dcvalue>
<dcvalue element="relation" qualifier="journalResearchArea">Engineering</dcvalue>
<dcvalue element="relation" qualifier="journalResearchArea">Materials&#x20;Science</dcvalue>
<dcvalue element="relation" qualifier="journalResearchArea">Physics</dcvalue>
<dcvalue element="type" qualifier="docType">Article</dcvalue>
<dcvalue element="subject" qualifier="keywordPlus">MARKERS</dcvalue>
<dcvalue element="subject" qualifier="keywordAuthor">computer&#x20;vision</dcvalue>
<dcvalue element="subject" qualifier="keywordAuthor">robotics</dcvalue>
<dcvalue element="subject" qualifier="keywordAuthor">deep&#x20;learning</dcvalue>
</dublin_core>
