<html xmlns:v="urn:schemas-microsoft-com:vml" xmlns:o="urn:schemas-microsoft-com:office:office" xmlns:w="urn:schemas-microsoft-com:office:word" xmlns:m="http://schemas.microsoft.com/office/2004/12/omml" xmlns="http://www.w3.org/TR/REC-html40"><head>
<META HTTP-EQUIV="Content-Type" CONTENT="text/html; charset=iso-8859-1">
<meta name=Generator content="Microsoft Word 14 (filtered medium)"><style><!--
/* Font Definitions */
@font-face
{font-family:Cambria;
panose-1:2 4 5 3 5 4 6 3 2 4;}
@font-face
{font-family:Calibri;
panose-1:2 15 5 2 2 2 4 3 2 4;}
@font-face
{font-family:Consolas;
panose-1:2 11 6 9 2 2 4 3 2 4;}
@font-face
{font-family:"Segoe Print";
panose-1:2 0 6 0 0 0 0 0 0 0;}
@font-face
{font-family:Times-Roman;}
@font-face
{font-family:"\@Times-Roman";}
/* Style Definitions */
p.MsoNormal, li.MsoNormal, div.MsoNormal
{margin:0cm;
margin-bottom:.0001pt;
font-size:12.0pt;
font-family:"Calibri","sans-serif";
mso-fareast-language:EN-US;}
h2
{mso-style-priority:9;
mso-style-link:"Heading 2 Char";
margin-top:0cm;
margin-right:0cm;
margin-bottom:0cm;
margin-left:159.8pt;
margin-bottom:.0001pt;
text-align:justify;
text-indent:-18.0pt;
page-break-after:avoid;
mso-list:l0 level1 lfo2;
font-size:13.0pt;
font-family:"Cambria","serif";
color:#4F81BD;
mso-fareast-language:EN-US;
font-weight:bold;}
h3
{mso-style-priority:9;
mso-style-link:"Heading 3 Char";
margin-top:10.0pt;
margin-right:0cm;
margin-bottom:0cm;
margin-left:0cm;
margin-bottom:.0001pt;
page-break-after:avoid;
font-size:12.0pt;
font-family:"Cambria","serif";
color:#4F81BD;
mso-fareast-language:EN-US;
font-weight:bold;}
a:link, span.MsoHyperlink
{mso-style-priority:99;
color:blue;
text-decoration:underline;}
a:visited, span.MsoHyperlinkFollowed
{mso-style-priority:99;
color:purple;
text-decoration:underline;}
p.MsoPlainText, li.MsoPlainText, div.MsoPlainText
{mso-style-priority:99;
mso-style-link:"Plain Text Char";
margin:0cm;
margin-bottom:.0001pt;
font-size:11.0pt;
font-family:"Calibri","sans-serif";
mso-fareast-language:EN-US;}
p
{mso-style-priority:99;
mso-margin-top-alt:auto;
margin-right:0cm;
mso-margin-bottom-alt:auto;
margin-left:0cm;
font-size:12.0pt;
font-family:"Times New Roman","serif";}
p.MsoListParagraph, li.MsoListParagraph, div.MsoListParagraph
{mso-style-priority:34;
margin-top:0cm;
margin-right:0cm;
margin-bottom:0cm;
margin-left:36.0pt;
margin-bottom:.0001pt;
mso-add-space:auto;
font-size:12.0pt;
font-family:"Calibri","sans-serif";
mso-fareast-language:EN-US;}
p.MsoListParagraphCxSpFirst, li.MsoListParagraphCxSpFirst, div.MsoListParagraphCxSpFirst
{mso-style-priority:34;
mso-style-type:export-only;
margin-top:0cm;
margin-right:0cm;
margin-bottom:0cm;
margin-left:36.0pt;
margin-bottom:.0001pt;
mso-add-space:auto;
font-size:12.0pt;
font-family:"Calibri","sans-serif";
mso-fareast-language:EN-US;}
p.MsoListParagraphCxSpMiddle, li.MsoListParagraphCxSpMiddle, div.MsoListParagraphCxSpMiddle
{mso-style-priority:34;
mso-style-type:export-only;
margin-top:0cm;
margin-right:0cm;
margin-bottom:0cm;
margin-left:36.0pt;
margin-bottom:.0001pt;
mso-add-space:auto;
font-size:12.0pt;
font-family:"Calibri","sans-serif";
mso-fareast-language:EN-US;}
p.MsoListParagraphCxSpLast, li.MsoListParagraphCxSpLast, div.MsoListParagraphCxSpLast
{mso-style-priority:34;
mso-style-type:export-only;
margin-top:0cm;
margin-right:0cm;
margin-bottom:0cm;
margin-left:36.0pt;
margin-bottom:.0001pt;
mso-add-space:auto;
font-size:12.0pt;
font-family:"Calibri","sans-serif";
mso-fareast-language:EN-US;}
span.Heading2Char
{mso-style-name:"Heading 2 Char";
mso-style-priority:9;
mso-style-link:"Heading 2";
font-family:"Cambria","serif";
color:#4F81BD;
font-weight:bold;}
span.Heading3Char
{mso-style-name:"Heading 3 Char";
mso-style-priority:9;
mso-style-link:"Heading 3";
font-family:"Cambria","serif";
color:#4F81BD;
font-weight:bold;}
span.PlainTextChar
{mso-style-name:"Plain Text Char";
mso-style-priority:99;
mso-style-link:"Plain Text";
font-family:"Calibri","sans-serif";}
p.Default, li.Default, div.Default
{mso-style-name:Default;
mso-style-priority:99;
margin:0cm;
margin-bottom:.0001pt;
text-autospace:none;
font-size:12.0pt;
font-family:"Calibri","sans-serif";
color:black;
mso-fareast-language:EN-US;}
span.EmailStyle24
{mso-style-type:personal-compose;
font-family:"Calibri","sans-serif";
color:windowtext;}
span.EmailStyle25
{mso-style-type:personal;
font-family:"Calibri","sans-serif";
color:windowtext;}
.MsoChpDefault
{mso-style-type:export-only;
font-size:10.0pt;
font-family:"Calibri","sans-serif";
mso-fareast-language:EN-US;}
@page WordSection1
{size:612.0pt 792.0pt;
margin:70.85pt 2.0cm 2.0cm 2.0cm;}
div.WordSection1
{page:WordSection1;}
/* List Definitions */
@list l0
{mso-list-id:1928953472;
mso-list-type:hybrid;
mso-list-template-ids:86912818 -406142884 67698713 67698715 67698703 67698713 67698715 67698703 67698713 67698715;}
@list l0:level1
{mso-level-style-link:"Heading 2";
mso-level-tab-stop:none;
mso-level-number-position:left;
margin-left:159.8pt;
text-indent:-18.0pt;
mso-ansi-language:EN-US;}
@list l0:level2
{mso-level-number-format:alpha-lower;
mso-level-tab-stop:none;
mso-level-number-position:left;
margin-left:54.0pt;
text-indent:-18.0pt;}
@list l0:level3
{mso-level-number-format:roman-lower;
mso-level-tab-stop:none;
mso-level-number-position:right;
margin-left:90.0pt;
text-indent:-9.0pt;}
@list l0:level4
{mso-level-tab-stop:none;
mso-level-number-position:left;
margin-left:126.0pt;
text-indent:-18.0pt;}
@list l0:level5
{mso-level-number-format:alpha-lower;
mso-level-tab-stop:none;
mso-level-number-position:left;
margin-left:162.0pt;
text-indent:-18.0pt;}
@list l0:level6
{mso-level-number-format:roman-lower;
mso-level-tab-stop:none;
mso-level-number-position:right;
margin-left:198.0pt;
text-indent:-9.0pt;}
@list l0:level7
{mso-level-tab-stop:none;
mso-level-number-position:left;
margin-left:234.0pt;
text-indent:-18.0pt;}
@list l0:level8
{mso-level-number-format:alpha-lower;
mso-level-tab-stop:none;
mso-level-number-position:left;
margin-left:270.0pt;
text-indent:-18.0pt;}
@list l0:level9
{mso-level-number-format:roman-lower;
mso-level-tab-stop:none;
mso-level-number-position:right;
margin-left:306.0pt;
text-indent:-9.0pt;}
ol
{margin-bottom:0cm;}
ul
{margin-bottom:0cm;}
--></style><!--[if gte mso 9]><xml>
<o:shapedefaults v:ext="edit" spidmax="1026" />
</xml><![endif]--><!--[if gte mso 9]><xml>
<o:shapelayout v:ext="edit">
<o:idmap v:ext="edit" data="1" />
</o:shapelayout></xml><![endif]--></head><body lang=IT link=blue vlink=purple><div class=WordSection1><p class=MsoNormal><span lang=EN-GB>Apologies for cross-posting<o:p></o:p></span></p><p class=MsoPlainText><span lang=EN-US><o:p> </o:p></span></p><p class=MsoPlainText><span lang=EN-US>==================================================================<o:p></o:p></span></p><p class=MsoPlainText><b><span lang=EN-US style='font-size:12.0pt;font-family:"Cambria","serif"'>PhD Openings </span></b><span lang=EN-US style='font-size:12.0pt;font-family:"Cambria","serif"'>at the<b> Cognitive Robotics and Interaction Lab <br>Robotics, Brain and Cognitive Sciences Department<br>Italian Institute of Technology<o:p></o:p></b></span></p><p class=MsoNormal><span lang=EN-US>===========================================================<o:p></o:p></span></p><p class=MsoNormal><span lang=EN-US style='font-size:11.0pt'><o:p> </o:p></span></p><p class=MsoNormal><span lang=EN-US style='font-size:11.0pt'><o:p> </o:p></span></p><p class=MsoNormal style='text-align:justify'><span lang=EN-GB style='font-family:"Cambria","serif";mso-fareast-language:JA'>In the spirit of the doctoral School on Bioengineering and Robotics</span><span lang=EN-GB style='font-family:"Cambria","serif"'> the PhD Program for the curriculum <b>“Cognitive Robotics, Interaction and Rehabilitation Technologies”</b> provides interdisciplinary training at the interface between technology and life-sciences. The general objective of the program is to form scientists and research technologists capable of working in <b>multidisciplinary teams</b> on projects where <b>human factors</b> play a crucial role in technological development and design. </span><span lang=EN-US style='font-family:"Cambria","serif"'>Robotics and neuroscience researchers in RBCS share, as a fundamental scientific objective, the study of <strong><span style='font-family:"Cambria","serif"'>physical and social interaction in humans and machines</span></strong> (</span><a href="http://www.iit.it/rbcs" target="_blank"><span lang=EN-US>www.iit.it/rbcs</span></a><span lang=EN-US style='font-family:"Cambria","serif"'> ). </span><span lang=EN-US style='font-family:"Cambria","serif"'><o:p></o:p></span></p><p class=MsoNormal style='text-align:justify'><span lang=EN-US style='font-family:"Cambria","serif"'><o:p> </o:p></span></p><p class=MsoNormal style='text-align:justify'><span lang=EN-US style='font-family:"Cambria","serif"'>Among the different research themes proposed I would like to advertise these topics:<o:p></o:p></span></p><p class=MsoNormal style='text-align:justify'><span lang=EN-US style='font-family:"Cambria","serif"'><o:p> </o:p></span></p><p class=Default style='page-break-before:always'><span style='font-family:Symbol;mso-fareast-language:IT'>·</span><span lang=EN-US style='font-family:"Times New Roman","serif";mso-fareast-language:IT'> </span><b><span lang=EN-US style='font-size:13.0pt;color:windowtext'>Visual cues for mutual understanding in human-robot interaction <o:p></o:p></span></b></p><p class=Default style='page-break-before:always'><span style='font-family:Symbol;mso-fareast-language:IT'>·</span><span lang=EN-US style='font-family:"Times New Roman","serif";mso-fareast-language:IT'> </span><b><span lang=EN-US style='font-size:13.0pt'>Computational Neuroscience models for auditory aware robots <o:p></o:p></span></b></p><p class=Default style='page-break-before:always'><span style='font-family:Symbol;mso-fareast-language:IT'>·</span><span lang=EN-US style='font-family:"Times New Roman","serif";mso-fareast-language:IT'> </span><b><span lang=EN-US style='font-size:13.0pt;color:windowtext'>Transferring of human-robot interaction competencies: towards robot symbiosis in the acquisition of new skills<o:p></o:p></span></b></p><p class=Default><span style='font-family:Symbol;mso-fareast-language:IT'>·</span><span lang=EN-US style='font-family:"Times New Roman","serif";mso-fareast-language:IT'> </span><b><span lang=EN-US style='font-size:13.0pt;color:windowtext'>Cyber-physical social security applied to emergent innovative technologies</span></b><b><span lang=EN-US style='font-size:11.5pt'><o:p></o:p></span></b></p><p class=MsoNormal style='text-align:justify'><span lang=EN-US style='font-family:"Cambria","serif"'><o:p> </o:p></span></p><p class=MsoNormal style='margin-bottom:12.0pt;text-align:justify'><span lang=EN-GB style='font-family:"Cambria","serif";mso-fareast-language:JA'>The ideal candidates are students with a higher level university degree willing to invest extra time and effort in blending into a multidisciplinary team composed of neuroscientists, engineers, psychologists, physicists working together to investigate brain functions and realize intelligent machines, rehabilitation protocols and advanced prosthesis.<o:p></o:p></span></p><p class=MsoNormal style='margin-bottom:12.0pt;text-align:justify'><b><span lang=EN-GB style='font-family:"Cambria","serif";mso-fareast-language:JA'>International applications are encouraged and will receive logistic support with visa issues, relocation, etc.<o:p></o:p></span></b></p><p class=MsoNormal style='text-align:justify'><span lang=EN-US>Below you can find more details related to <i>the positions</i> and the <i>instructions on how to apply</i></span><span lang=EN-US style='font-family:"Cambria","serif"'><o:p></o:p></span></p><p class=MsoNormal style='text-align:justify'><span lang=EN-GB style='font-family:"Cambria","serif"'><o:p> </o:p></span></p><p class=MsoNormal style='text-align:justify'><span lang=EN-US>=======================================================================<br>Application deadline: *****<strong><span style='font-family:"Calibri","sans-serif"'>12 June 2018, Noon, Italian time</span></strong>*****</span><span lang=EN-US style='font-family:"Cambria","serif"'><o:p></o:p></span></p><p class=MsoNormal style='text-align:justify'><span lang=EN-US>=======================================================================</span><span lang=EN-GB style='font-family:"Cambria","serif"'><o:p></o:p></span></p><p><span lang=EN-US>===============<br><strong>Themes</strong><b><br></b>===============<o:p></o:p></span></p><p class=Default style='page-break-before:always'><b><span lang=EN-US style='font-size:13.0pt;color:windowtext'>Visual cues for mutual understanding in human-robot interaction <o:p></o:p></span></b></p><p class=Default style='page-break-before:always'><span lang=EN-US style='font-size:13.0pt;color:windowtext'><o:p> </o:p></span></p><p class=Default><b><span style='font-size:11.5pt;color:windowtext'>Tutors</span></b><span style='font-size:11.5pt;color:windowtext'>: Alessandra Sciutti, PhD; Francesco Rea, PhD; Prof. Giulio Sandini <o:p></o:p></span></p><p class=Default><b><span style='font-size:11.5pt;color:windowtext'>Institute: IIT (Istituto Italiano di Tecnologia) </span></b><span style='font-size:11.5pt;color:windowtext'><o:p></o:p></span></p><p class=Default><b><span lang=EN-US style='font-size:11.5pt;color:windowtext'>Research Unit: Robotics, Brain and Cognitive Sciences (</span></b><span lang=EN-US style='font-size:11.5pt;color:windowtext'>https://www.iit.it/rbcs<b>) </b><o:p></o:p></span></p><p class=Default><b><span lang=EN-US style='font-size:11.5pt;color:windowtext'>Division: Cognitive Robotics and Human-Human Interaction </span></b><span lang=EN-US style='font-size:11.5pt;color:windowtext'><o:p></o:p></span></p><p class=Default><b><span lang=EN-US style='font-size:11.5pt;color:windowtext'><o:p> </o:p></span></b></p><p class=Default><b><span lang=EN-US style='font-size:11.5pt;color:windowtext'>Description: </span></b><span lang=EN-US style='font-size:11.5pt;color:windowtext'>The ability of humans at interacting and collaborating with each other is based on mutual understanding and is supported by a continuous exchange of information mediated only in minimal part by language. The majority of messages are covertly embedded in the way the two partners move their eyes and their body. This implicit information exchange allows to anticipate the needs and intentions of the partner. The general goal of this project will be to provide the humanoid iCub robot with the ability to perceive these covert visual signals sent by its human partners, as well as becoming able to send analogous cues with its own movements. <o:p></o:p></span></p><p class=Default><span lang=EN-US style='font-size:11.5pt;color:windowtext'>The pipeline of the computational system includes an attentional module able to localize the portion of the scene containing biological motion and to extract the visual properties of the observed movements, as speed, trajectory, acceleration. Starting from this analysis, performed by modules already available on the robot, iCub will need to use the derived movement features to understand its partners’ actions and to decode their intentions, needs and emotions. View-invariance will be one of the crucial aspects of the system. Additionally, these movement features will guide the selection of the appropriate robot behavior, to make it intuitively predictable and legible to the partner. Robot motion planning could leverage existing tools to design biologically plausible robot movements for the iCub. <o:p></o:p></span></p><p class=Default><b><span lang=EN-US style='font-size:11.5pt;color:windowtext'><o:p> </o:p></span></b></p><p class=Default><b><span lang=EN-US style='font-size:11.5pt;color:windowtext'>Requirements</span></b><span lang=EN-US style='font-size:11.5pt;color:windowtext'>: degree in robotics, bioengineering, computer science, computer engineering, or related disciplines, attitude for problem solving, c++ programming. A background on computer vision and machine learning is an asset. <o:p></o:p></span></p><p class=Default><b><span lang=EN-US style='font-size:11.5pt;color:windowtext'><o:p> </o:p></span></b></p><p class=Default><b><span lang=EN-US style='font-size:11.5pt;color:windowtext'>References </span></b><span lang=EN-US style='font-size:11.5pt;color:windowtext'><o:p></o:p></span></p><p class=Default style='margin-bottom:.95pt'><span lang=EN-US style='font-size:11.5pt;color:windowtext'>1. Sciutti A., Mara M., Tagliasco V., Sandini G. 2018 Humanizing Human-Robot Interaction: On the Importance of Mutual Understanding, </span><span lang=EN-US style='font-size:11.5pt;font-family:"Cambria","serif";color:windowtext'>IEEE Technology and Society Magazine https://ieeexplore.ieee.org/document/8307144/</span><span lang=EN-US style='font-size:11.5pt;color:windowtext'><o:p></o:p></span></p><p class=Default style='margin-bottom:.95pt'><span lang=EN-US style='font-size:11.5pt;color:windowtext'>2. Sciutti A. & Sandini G. 2017 , ‘Interacting with Robots to Investigate the Bases of Social Interaction’, </span><span lang=EN-US style='font-size:11.5pt;font-family:"Cambria","serif";color:windowtext'>IEEE Transactions on Neural Systems and Rehabilitation Engineering, </span><span lang=EN-US style='font-size:11.5pt;color:windowtext'>10.1109/TNSRE.2017.2753879 </span><span lang=EN-US style='font-size:11.5pt;font-family:"Cambria","serif";color:windowtext'>http://ieeexplore.ieee.org/document/8068256/ <o:p></o:p></span></p><p class=Default style='margin-bottom:.95pt'><span lang=EN-US style='font-size:11.5pt;color:windowtext'>3. Vignolo A., Noceti N., Rea F., Sciutti A., Odone F. & Sandini G. 2017, ‘Detecting biological motion for human-robot interaction: a link between perception and action’, Frontiers in Robotics and AI, 4. </span><span lang=EN-US style='font-size:11.5pt;font-family:"Cambria","serif";color:windowtext'>http://journal.frontiersin.org/article/10.3389/frobt.2017.00014/full <o:p></o:p></span></p><p class=Default><span lang=EN-US style='font-size:11.5pt;font-family:"Cambria","serif";color:windowtext'>4. </span><span lang=EN-US style='font-size:11.5pt;color:windowtext'>Sandini G., Sciutti A. & Rea F. (2018) ‘Movement-based communication for humanoid-human interaction’. In: </span><span lang=EN-US style='font-size:11.5pt;font-family:"Cambria","serif";color:windowtext'>Section: Human-Humanoid Interaction, Humanoid Robotics: a Reference <o:p></o:p></span></p><p class=Default><span lang=EN-US style='font-size:11.5pt;font-family:"Cambria","serif";color:windowtext'><o:p> </o:p></span></p><p class=Default><b><span lang=EN-US style='font-size:11.5pt;color:windowtext'>Contacts: </span></b><span lang=EN-US style='font-size:11.5pt;color:windowtext'>alessandra.sciutti@iit.it, francesco.rea@iit.it, giulio.sandini@iit.it </span><span lang=EN-US style='font-size:11.5pt;color:windowtext'>- </span><span lang=EN-US style='font-size:11.5pt'>Applicants are strongly encouraged to contact the perspective tutors before they submit their application.</span><span lang=EN-US style='font-family:"Cambria","serif"'> </span><span lang=EN-US style='font-size:11.5pt;font-family:"Cambria","serif";color:windowtext'><o:p></o:p></span></p><p class=Default><span lang=EN-US>=========================================================================================================</span><span lang=EN-US style='color:windowtext'><o:p></o:p></span></p><p class=Default><b><span lang=EN-US style='font-size:13.0pt'><br>Computational Neuroscience models for auditory aware robots </span></b><span lang=EN-US style='font-size:13.0pt'><o:p></o:p></span></p><p class=Default><b><span lang=EN-US style='font-size:11.5pt'><o:p> </o:p></span></b></p><p class=Default><b><span style='font-size:11.5pt'>Tutors</span></b><span style='font-size:11.5pt'>: Francesco Rea, PhD; Prof. Giulio Sandini </span> <o:p></o:p></p><p class=Default><b><span style='font-size:11.5pt'>Institute: IIT (Istituto Italiano di Tecnologia) </span></b><span style='font-size:11.5pt'><o:p></o:p></span></p><p class=Default><b><span lang=EN-US style='font-size:11.5pt'>Research Unit: Robotics, Brain and Cognitive Sciences (</span></b><span lang=EN-US style='font-size:11.5pt'>https://www.iit.it/rbcs<b>) </b><o:p></o:p></span></p><p class=Default><b><span lang=EN-US style='font-size:11.5pt'>Division: Cognitive Robotics and Human-Human Interaction </span></b><span lang=EN-US style='font-size:11.5pt'><o:p></o:p></span></p><p class=Default><b><span lang=EN-US style='font-size:11.5pt'><o:p> </o:p></span></b></p><p class=Default><b><span lang=EN-US style='font-size:11.5pt'>Description: </span></b><span lang=EN-US style='font-size:11.5pt'>Selective attention is fundamental to any living being as a prerequisite for action performance and social interaction. The process of visually shifting attention on task-relevant targets has been studied extensively in cognitive neuroscience and implemented computationally for a variety of real-word problems. However multisensory attention has been only partially exploited despite the evident advantages in attentional orienting and explicit communication (speech) understanding in social realistic contexts. The PhD program will implement a novel computational neuroscience models of auditory processing by endowing the humanoid robot iCub with auditory awareness in an unstructured acoustic world. In detail, iCub will 1) show proactive attention toward relevant salient sound sources, 2) generate motor-control and integrate sensor input across successive movements to reinforce the interpretation of the scene, and 3) provide a reliable and novel speech recognition system [1],[2]. The activities carried out in collaboration with with department of computational neuroscience at University of Lethbridge (Alberta, Canada) aim: a) to consolidate audio attention system in the existing robotics setup iCub[3]; b) to enable experimentation involving ecological interaction with human subjects; c) to enhance the existing multimodal attention system; d) to provide deep-learning models of speech recognition based on DNNs; e) to integrate the outcome in the iCub cognitive architecture [4]. <o:p></o:p></span></p><p class=Default><b><span lang=EN-US style='font-size:11.5pt'><o:p> </o:p></span></b></p><p class=Default><b><span lang=EN-US style='font-size:11.5pt'>Requirements</span></b><span lang=EN-US style='font-size:11.5pt'>: degree in robotics, bioengineering, computer science, computer engineering, or related disciplines, attitude for problem solving, c++ programming. A background on machine learning is an asset. <o:p></o:p></span></p><p class=Default><b><span lang=EN-US style='font-size:11.5pt'><o:p> </o:p></span></b></p><p class=Default><b><span lang=EN-US style='font-size:11.5pt'>References: </span></b><span lang=EN-US style='font-size:11.5pt'><o:p></o:p></span></p><p class=Default style='margin-bottom:1.1pt'><span lang=EN-US style='font-size:11.5pt'>1. Mosadeghzad M., Rea F., Tata M., Brayda L. & Sandini G. 2015, ‘Saliency Based Sensor Fusion of Broadband Sound Localizer for Humanoids’, 2015 IEEE International Conference on Multisensor Fusion and Information Integration (MFI 2015), San Diego, CA, USA, September 14-16, 2015; <o:p></o:p></span></p><p class=Default style='margin-bottom:1.1pt'><span lang=EN-US style='font-size:11.5pt'>2. Ilievski M., Rea F.IIT, Sandini G., Tata M., ‘A Binaural Beamforming Approach to Resolve Complex Auditory Scenes for Humanoid Robots’ 2017 IEEE/RSJ International Conference on Intelligent Robots and Systems, Vancouver, BC, Canada, September 24-28, 2017 <o:p></o:p></span></p><p class=Default style='margin-bottom:1.1pt'><span lang=EN-US style='font-size:11.5pt'>3. Rea F., Sandini G., Metta G., ‘Motor biases in visual attention for a humanoid robot’, IEEE-RAS International Conference on Humanoid Robots, vol. 2015-February, pp. 779-786 <o:p></o:p></span></p><p class=Default><span lang=EN-US style='font-size:11.5pt'>4. Mohan V, Sandini G, Morasso P. (2014), ‘A neural framework for organization and flexible utilization of episodic memory in "cumulatively" learning baby humanoids’, Neural Computation 26(12), 2692-2734, MIT Press <o:p></o:p></span></p><p class=Default><span lang=EN-US style='font-size:11.5pt'><o:p> </o:p></span></p><p class=Default><b><span lang=EN-US style='font-size:11.5pt'>Contacts: </span></b><span lang=EN-US style='font-size:11.5pt'>francesco.rea@iit.it , giulio.sandini@iit.it </span><span lang=EN-US style='font-size:11.5pt;color:windowtext'>- </span><span lang=EN-US style='font-size:11.5pt'>Applicants are strongly encouraged to contact the perspective tutors before they submit their application.</span><span lang=EN-US style='font-family:"Cambria","serif"'> </span><span lang=EN-US style='font-size:11.5pt;font-family:"Cambria","serif"'><o:p></o:p></span></p><p class=Default><span lang=EN-US style='color:windowtext'><o:p> </o:p></span></p><p class=Default><span lang=EN-US>=========================================================================================================</span><span lang=EN-US style='color:windowtext'><o:p></o:p></span></p><p class=Default><span lang=EN-US style='color:windowtext'><o:p> </o:p></span></p><p class=Default style='page-break-before:always'><b><span lang=EN-US style='font-size:13.0pt;color:windowtext'>Transferring of human-robot interaction competencies: towards robot symbiosis in the acquisition of new skills </span></b><span lang=EN-US style='font-size:13.0pt;color:windowtext'><o:p></o:p></span></p><p class=Default><b><span style='font-size:11.5pt;color:windowtext'><o:p> </o:p></span></b></p><p class=Default><b><span style='font-size:11.5pt;color:windowtext'>Tutor</span></b><span style='font-size:11.5pt;color:windowtext'>: Jacopo Zenzeri, PhD; Alessandra Sciutti, PhD; Francesco Rea, PhD <o:p></o:p></span></p><p class=Default><b><span style='font-size:11.5pt;color:windowtext'>Institute: IIT (Istituto Italiano di Tecnologia) </span></b><span style='font-size:11.5pt;color:windowtext'><o:p></o:p></span></p><p class=Default><b><span lang=EN-US style='font-size:11.5pt;color:windowtext'>Research Unit: Robotics, Brain and Cognitive Sciences (</span></b><span lang=EN-US style='font-size:11.5pt;color:windowtext'>https://www.iit.it/rbcs<b>) </b><o:p></o:p></span></p><p class=Default><b><span lang=EN-US style='font-size:11.5pt;color:windowtext'>Division: Motor Learning, Assistive and Rehabilitation Robotics & Cognitive Robotics and Human-Human Interaction </span></b><span lang=EN-US style='font-size:11.5pt;color:windowtext'><o:p></o:p></span></p><p class=Default><b><span lang=EN-US style='font-size:11.5pt;color:windowtext'><o:p> </o:p></span></b></p><p class=Default><b><span lang=EN-US style='font-size:11.5pt;color:windowtext'>Description: </span></b><span lang=EN-US style='font-size:11.5pt;color:windowtext'>In the recent years many studies focused on how to optimize human-robot collaboration tasks. Here we focus on a fundamental case where a human agent has to learn how to use a tool while collaborating with an expert humanoid robot agent which, in this case, plays the active role of maximizing the learning of the human. In this new perspective the process will proceed in two steps: 1) the humanoid robot acquire the knowledge of the collaborative tasks and becomes the “robot teacher” by interacting (through the tool) with an expert human; 2) the naïve human interact (through the tool) with the “robot teacher” in order to learn the task in an optimal way. <o:p></o:p></span></p><p class=Default><span lang=EN-US style='font-size:11.5pt;color:windowtext'>Behavioral experiments on motor learning will be conducted using haptic interfaces to study motor control mechanisms and how motor control strategies emerge during the interaction with specific set of tools. This activity is based on recent studies on dyadic interaction [1-2] and will contribute to define human inspired models of interaction. The model is transferred to the humanoid agent to acquire the human expert knowledge (in step 1) and to teach it to the naïve one (in step 2). Behavioral experiments will then be conducted with the humanoid robot iCub to implement real use cases. In this context, the proactive role of the iCub will enrich existing cognitive framework for human robot interaction. The candidate will also exploit measure of engagement in the task (attentional level, cognitive load and fatigue). <o:p></o:p></span></p><p class=Default><b><span lang=EN-US style='font-size:11.5pt;color:windowtext'><o:p> </o:p></span></b></p><p class=Default><b><span lang=EN-US style='font-size:11.5pt;color:windowtext'>Requirements</span></b><span lang=EN-US style='font-size:11.5pt;color:windowtext'>: a master degree in Bioengineering, Computer Science or equivalent, with experience in the analysis and modeling of human movements and in robot programming. <o:p></o:p></span></p><p class=Default><b><span lang=EN-US style='font-size:11.5pt;color:windowtext'><o:p> </o:p></span></b></p><p class=Default><b><span lang=EN-US style='font-size:11.5pt;color:windowtext'>References: </span></b><span lang=EN-US style='font-size:11.5pt;color:windowtext'><o:p></o:p></span></p><p class=Default style='margin-bottom:1.15pt'><span lang=EN-US style='font-size:11.5pt;color:windowtext'>1. Avila-Mireles, E.J. et al., 2017. Skill learning and skill transfer mediated by cooperative haptic interaction. IEEE Transactions on Neural Systems and Rehabilitation Engineering, 25(7), pp.832–243. <o:p></o:p></span></p><p class=Default style='margin-bottom:1.15pt'><span lang=EN-US style='font-size:11.5pt;color:windowtext'>2. Galofaro, E., Morasso, P. & Zenzeri, J., 2017. Improving motor skill transfer during dyadic robot training through the modulation of the expert role. In IEEE International Conference on Rehabilitation Robotics. London. <o:p></o:p></span></p><p class=Default><span lang=EN-US style='font-size:11.5pt;color:windowtext'>3. Vignolo A., Noceti N., Rea F., Sciutti A., Odone F. & Sandini G. 2017, ‘Detecting biological motion for human-robot interaction: a link between perception and action’, Frontiers in Robotics and AI, 4. <o:p></o:p></span></p><p class=Default><span lang=EN-US style='font-size:11.5pt;color:windowtext'><o:p> </o:p></span></p><p class=Default><b><span lang=EN-US style='font-size:11.5pt;color:windowtext'>Contacts: </span></b><span lang=EN-US style='font-size:11.5pt;color:windowtext'>jacopo.zenzeri@iit.it alessandra.sciutti@iit.it francesco.rea@iit.it </span><span lang=EN-US style='font-size:11.5pt'> </span><span lang=EN-US style='font-size:11.5pt;color:windowtext'>- </span><span lang=EN-US style='font-size:11.5pt'>Applicants are strongly encouraged to contact the perspective tutors before they submit their application.</span><span lang=EN-US style='font-size:11.5pt;font-family:"Cambria","serif";color:windowtext'><o:p></o:p></span></p><p class=Default><span lang=EN-US style='color:windowtext'><o:p> </o:p></span></p><p class=Default><span lang=EN-US>=========================================================================================================</span><span lang=EN-US style='color:windowtext'><o:p></o:p></span></p><p class=Default><span lang=EN-US style='color:windowtext'><o:p> </o:p></span></p><p class=Default style='page-break-before:always'><b><span lang=EN-US style='font-size:13.0pt;color:windowtext'>Cyber-physical social security applied to emergent innovative technologies</span></b><span lang=EN-US style='font-size:13.0pt;color:windowtext'><o:p></o:p></span></p><p class=Default><b><span style='font-size:11.5pt;color:windowtext'>Tutor</span></b><span style='font-size:11.5pt;color:windowtext'>: Francesco Rea, Stefano Bencetti <o:p></o:p></span></p><p class=Default><b><span style='font-size:11.5pt;color:windowtext'>Institute: IIT (Istituto Italiano di Tecnologia) </span></b><span style='font-size:11.5pt;color:windowtext'><o:p></o:p></span></p><p class=Default><b><span lang=EN-US style='font-size:11.5pt;color:windowtext'>Department: ICT/ Robotics, Brain and Cognitive Sciences (</span></b><span lang=EN-US style='font-size:11.5pt;color:windowtext'>https://www.iit.it/rbcs<b>) </b><o:p></o:p></span></p><p class=Default><b><span lang=EN-US style='font-size:11.5pt;color:windowtext'><o:p> </o:p></span></b></p><p class=Default><b><span lang=EN-US style='font-size:11.5pt;color:windowtext'>Description: </span></b><span lang=EN-US style='font-size:11.5pt;color:windowtext'>The field of cyber security is a fast-growing discipline that impacts on the interaction between people and technology. Even though the effectiveness of security measures to protect personal data is increasing, people remain susceptible to manipulation and thus the human element remains the weakest link. Social engineering. Such weakness is often is exploited by the use of various manipulation techniques aiming at the disclosure of sensitive information, namely social engineering. The field of social engineering is still in its early stages however the interaction between individuals and new technologies ( assistive robotics, robot companion) and new ways of working (smart working) might be exposed to yet unknown risks associated with the misuse of protected data only partially addressed by traditional computer security. <o:p></o:p></span></p><p class=Default><span lang=EN-US style='font-size:11.5pt;color:windowtext'>The overall aim of the project is to investigate how to prevent disclosure of sensitive information applied to the areas where humans use interconnected technologies (e.g. :robotics, IOT, Big Data Analytics systems) especially in the context of human machine interactions (e.g.: robot companion, assistive robotics, home assistance, etc.). The aim unfolds into two goals for the candidate. First, the ideal candidate is required to develop algorithms of human machine interaction that adapts to the person. The technology autonomously decides what sensitive information is acquired from the person in relation to the specific objective of the interaction (medical assessment, adaptation to user`s needs, etc). For example, the assistive robot autonomously adapts the data acquisition strategy to the goal of improving the provided assistance without the acquisition of personal data, which is irrelevant to the assistance. The second goal is to improve the robustness and high integrity of system architectures (cyber-physical security) adopted for above-mentioned cutting-edge technologies. The solutions defined by the candidate can also help the security risk management and the analysis of social engineering threats. <o:p></o:p></span></p><p class=Default><span lang=EN-US style='font-size:11.5pt;color:windowtext'>As outcome of the project, such methodologies will be concretely applied to innovative applications designed at the Istituto Italiano di Tecnologia in collaboration with the robotic labs of the institute to make the applications socially aware and socially acceptable. <o:p></o:p></span></p><p class=Default><b><span lang=EN-US style='font-size:11.5pt;color:windowtext'><o:p> </o:p></span></b></p><p class=Default><b><span lang=EN-US style='font-size:11.5pt;color:windowtext'>Requirements</span></b><span lang=EN-US style='font-size:11.5pt;color:windowtext'>: a degree in Computer Science with high interests in life sciences. Programming skills, familiarity with social network <o:p></o:p></span></p><p class=MsoNormal><b><span lang=EN-US style='font-size:11.5pt'><o:p> </o:p></span></b></p><p class=MsoNormal><b><span lang=EN-US style='font-size:11.5pt'>Contacts: </span></b><span lang=EN-US style='font-size:11.5pt'>francesco.rea@iit.it stefano.bencetti@iit.it - </span><span lang=EN-US style='font-size:11.5pt'>Applicants are strongly encouraged to contact the perspective tutors before they submit their application.</span><span lang=EN-US><o:p></o:p></span></p><p><span lang=EN-US>===============<br><strong>How to apply</strong><br>===============<o:p></o:p></span></p><p><span lang=EN-US>Please note that the positions are available through the PhD course of Bioengineering and Robotics, curriculum on Cognitive Robotics, Interaction and Rehabilitation Technologies, offered jointly by IIT and the University of Genoa.<o:p></o:p></span></p><p><span lang=EN-US>The official calls are available here: </span><a href="https://www.iit.it/phd-school/phd-school-genoa"><span lang=EN-US>https://www.iit.it/phd-school/phd-school-genoa</span></a><span lang=EN-US> under the <i>Curriculum “Cognitive Robotics, interaction and rehabilitation technologies”<em>.</em><o:p></o:p></i></span></p><p><span lang=EN-US>Please have a look at the <strong>"ADMISSION GUIDE"</strong> section, which contains detailed instructions on how to apply and a list of documents you have to present. <o:p></o:p></span></p><p><span lang=EN-US>In particular, note that the preparation of a short project proposal is required and is important in the evaluation of prospective candidates. Please find here a template (PDF) : <a href="http://phd.dibris.unige.it/biorob/media/Template%20for%20Research%20Project_v2.0.pdf">http://phd.dibris.unige.it/biorob/media/Template%20for%20Research%20Project_v2.0.pdf</a> <o:p></o:p></span></p><p><span lang=EN-US>The link to the on-line application page is: <a href="https://www.studenti.unige.it/postlaurea/dottorati/XXXIV/EN/">https://www.studenti.unige.it/postlaurea/dottorati/XXXIV/EN/</a> <o:p></o:p></span></p><p><span lang=EN-US>In case of problems or questions related to the application procedure, please contact: </span><a href="mailto:anastasia.bruzzone@iit.it"><span lang=EN-US>anastasia.bruzzone@iit.it</span></a><span lang=EN-US> <o:p></o:p></span></p><p><span lang=EN-US>============================================================<br>Application deadline: *****<strong>12 June 2018, Noon, Italian time</strong>******<br>============================================================<o:p></o:p></span></p><p class=MsoNormal style='text-autospace:none'><span lang=EN-US style='font-size:11.0pt;color:#1F497D;mso-fareast-language:IT'>----------------------------------------<br>Alessandra Sciutti (PhD)<br>Researcher, Robotics Brain and Cognitive Sciences Unit <br>Istituto Italiano di Tecnologia<br>Center for Human Technologies <o:p></o:p></span></p><p class=MsoNormal style='text-autospace:none'><span lang=EN-US style='font-size:11.0pt;color:#1F497D;mso-fareast-language:IT'>Via Enrico Melen 83, Building B<o:p></o:p></span></p><p class=MsoNormal style='text-autospace:none'><span lang=EN-GB style='font-size:11.0pt;color:#1F497D;mso-fareast-language:IT'>16152 Genova, Italy<o:p></o:p></span></p><p class=MsoNormal style='text-autospace:none'><span lang=EN-GB style='font-size:11.0pt;color:#1F497D;mso-fareast-language:IT'>tel: +39 010 8172 210<o:p></o:p></span></p><p class=MsoNormal style='text-autospace:none'><span lang=EN-US style='font-size:11.0pt;color:#1F497D;mso-fareast-language:IT'>email:</span><span lang=EN-US style='font-size:11.0pt;color:black;mso-fareast-language:IT'> </span><a href="mailto:alessandra.sciutti@iit.it"><span lang=EN-GB style='font-size:11.0pt;color:blue;mso-fareast-language:IT'>alessandra.sciutti@iit.it</span></a><span style='font-size:11.0pt;color:black;mso-fareast-language:IT'> <span lang=EN-GB><o:p></o:p></span></span></p><p class=MsoNormal style='text-autospace:none'><span lang=EN-US style='font-size:11.0pt;color:#1F497D;mso-fareast-language:IT'>website:</span><span lang=EN-US style='font-size:11.0pt;color:black;mso-fareast-language:IT'> </span><a href="https://www.iit.it/people/alessandra-sciutti"><span lang=EN-GB style='font-size:11.0pt;color:blue;mso-fareast-language:IT'>https://www.iit.it/people/alessandra-sciutti</span></a><span lang=EN-GB style='font-size:11.0pt;font-family:"Segoe Print";color:#1F497D;mso-fareast-language:IT'><o:p></o:p></span></p><p class=MsoNormal><span lang=EN-US style='font-size:11.0pt'><o:p> </o:p></span></p><p><span lang=EN-US><o:p> </o:p></span></p><p class=MsoNormal><span lang=EN-US style='font-size:11.0pt'><o:p> </o:p></span></p></div></body></html>