<?xml version="1.0" encoding="utf-8" standalone="no"?>
<dublin_core schema="dc">
<dcvalue element="contributor" qualifier="author">Choi,&#x20;Wonyoung</dcvalue>
<dcvalue element="contributor" qualifier="author">Nam,&#x20;Gi&#x20;Pyo</dcvalue>
<dcvalue element="contributor" qualifier="author">Cho,&#x20;Junghyun</dcvalue>
<dcvalue element="contributor" qualifier="author">Kim,&#x20;Ig-Jae</dcvalue>
<dcvalue element="contributor" qualifier="author">Ko,&#x20;Hyeong-Seok</dcvalue>
<dcvalue element="date" qualifier="accessioned">2024-04-11T01:30:17Z</dcvalue>
<dcvalue element="date" qualifier="available">2024-04-11T01:30:17Z</dcvalue>
<dcvalue element="date" qualifier="created">2024-04-11</dcvalue>
<dcvalue element="date" qualifier="issued">2024-03</dcvalue>
<dcvalue element="identifier" qualifier="uri">https:&#x2F;&#x2F;pubs.kist.re.kr&#x2F;handle&#x2F;201004&#x2F;149611</dcvalue>
<dcvalue element="description" qualifier="abstract">In&#x20;the&#x20;field&#x20;of&#x20;face&#x20;frontalization,&#x20;the&#x20;model&#x20;obtained&#x20;by&#x20;training&#x20;on&#x20;a&#x20;particular&#x20;dataset&#x20;often&#x20;underperforms&#x20;on&#x20;other&#x20;datasets.&#x20;This&#x20;paper&#x20;presents&#x20;the&#x20;Pre-trained&#x20;Feature&#x20;Transformation&#x20;GAN&#x20;(PFT-GAN),&#x20;which&#x20;is&#x20;designed&#x20;to&#x20;fully&#x20;utilize&#x20;diverse&#x20;facial&#x20;feature&#x20;information&#x20;available&#x20;from&#x20;pre-trained&#x20;face&#x20;recognition&#x20;networks.&#x20;For&#x20;that&#x20;purpose,&#x20;we&#x20;propose&#x20;the&#x20;use&#x20;of&#x20;the&#x20;feature&#x20;attention&#x20;transformation&#x20;(FAT)&#x20;module&#x20;that&#x20;effectively&#x20;transfers&#x20;the&#x20;low-level&#x20;facial&#x20;features&#x20;to&#x20;the&#x20;facial&#x20;generator.&#x20;On&#x20;the&#x20;other&#x20;hand,&#x20;in&#x20;the&#x20;hope&#x20;of&#x20;reducing&#x20;the&#x20;pre-trained&#x20;encoder&#x20;dependency,&#x20;we&#x20;attempt&#x20;a&#x20;new&#x20;FAT&#x20;module&#x20;organization&#x20;that&#x20;accommodates&#x20;the&#x20;features&#x20;from&#x20;all&#x20;pre-trained&#x20;face&#x20;recognition&#x20;networks&#x20;employed.&#x20;This&#x20;paper&#x20;attempts&#x20;evaluating&#x20;the&#x20;proposed&#x20;work&#x20;using&#x20;the&#x20;&quot;independent&#x20;critic&quot;&#x20;as&#x20;well&#x20;as&#x20;&quot;dependent&#x20;critic&quot;,&#x20;which&#x20;enables&#x20;objective&#x20;judgments.&#x20;Experimental&#x20;results&#x20;show&#x20;that&#x20;the&#x20;proposed&#x20;method&#x20;significantly&#x20;improves&#x20;the&#x20;face&#x20;frontalization&#x20;performance&#x20;and&#x20;helps&#x20;overcome&#x20;the&#x20;bias&#x20;associated&#x20;with&#x20;each&#x20;pre-trained&#x20;face&#x20;recognition&#x20;network&#x20;employed.</dcvalue>
<dcvalue element="language" qualifier="none">English</dcvalue>
<dcvalue element="publisher" qualifier="none">Institute&#x20;of&#x20;Electrical&#x20;and&#x20;Electronics&#x20;Engineers&#x20;Inc.</dcvalue>
<dcvalue element="title" qualifier="none">Integrating&#x20;Pretrained&#x20;Encoders&#x20;for&#x20;Generalized&#x20;Face&#x20;Frontalization</dcvalue>
<dcvalue element="type" qualifier="none">Article</dcvalue>
<dcvalue element="identifier" qualifier="doi">10.1109&#x2F;ACCESS.2024.3377220</dcvalue>
<dcvalue element="description" qualifier="journalClass">1</dcvalue>
<dcvalue element="identifier" qualifier="bibliographicCitation">IEEE&#x20;Access,&#x20;v.12,&#x20;pp.43530&#x20;-&#x20;43539</dcvalue>
<dcvalue element="citation" qualifier="title">IEEE&#x20;Access</dcvalue>
<dcvalue element="citation" qualifier="volume">12</dcvalue>
<dcvalue element="citation" qualifier="startPage">43530</dcvalue>
<dcvalue element="citation" qualifier="endPage">43539</dcvalue>
<dcvalue element="description" qualifier="isOpenAccess">Y</dcvalue>
<dcvalue element="description" qualifier="journalRegisteredClass">scie</dcvalue>
<dcvalue element="description" qualifier="journalRegisteredClass">scopus</dcvalue>
<dcvalue element="identifier" qualifier="wosid">001193865900001</dcvalue>
<dcvalue element="identifier" qualifier="scopusid">2-s2.0-85188013005</dcvalue>
<dcvalue element="relation" qualifier="journalWebOfScienceCategory">Computer&#x20;Science,&#x20;Information&#x20;Systems</dcvalue>
<dcvalue element="relation" qualifier="journalWebOfScienceCategory">Engineering,&#x20;Electrical&#x20;&amp;&#x20;Electronic</dcvalue>
<dcvalue element="relation" qualifier="journalWebOfScienceCategory">Telecommunications</dcvalue>
<dcvalue element="relation" qualifier="journalResearchArea">Computer&#x20;Science</dcvalue>
<dcvalue element="relation" qualifier="journalResearchArea">Engineering</dcvalue>
<dcvalue element="relation" qualifier="journalResearchArea">Telecommunications</dcvalue>
<dcvalue element="type" qualifier="docType">Article</dcvalue>
<dcvalue element="subject" qualifier="keywordAuthor">Face&#x20;frontalization</dcvalue>
<dcvalue element="subject" qualifier="keywordAuthor">face&#x20;pose&#x20;normalization</dcvalue>
<dcvalue element="subject" qualifier="keywordAuthor">face&#x20;recognition</dcvalue>
<dcvalue element="subject" qualifier="keywordAuthor">generative&#x20;modeling</dcvalue>
</dublin_core>
