<html xmlns:v="urn:schemas-microsoft-com:vml" xmlns:o="urn:schemas-microsoft-com:office:office" xmlns:w="urn:schemas-microsoft-com:office:word" xmlns:m="http://schemas.microsoft.com/office/2004/12/omml" xmlns="http://www.w3.org/TR/REC-html40"><head><meta http-equiv=Content-Type content="text/html; charset=us-ascii"><meta name=Generator content="Microsoft Word 15 (filtered medium)"><style><!--
/* Font Definitions */
@font-face
{font-family:"Cambria Math";
panose-1:2 4 5 3 5 4 6 3 2 4;}
@font-face
{font-family:Calibri;
panose-1:2 15 5 2 2 2 4 3 2 4;}
/* Style Definitions */
p.MsoNormal, li.MsoNormal, div.MsoNormal
{margin:0in;
margin-bottom:.0001pt;
font-size:11.0pt;
font-family:"Calibri",sans-serif;}
a:link, span.MsoHyperlink
{mso-style-priority:99;
color:#0563C1;
text-decoration:underline;}
a:visited, span.MsoHyperlinkFollowed
{mso-style-priority:99;
color:#954F72;
text-decoration:underline;}
p.MsoListParagraph, li.MsoListParagraph, div.MsoListParagraph
{mso-style-priority:34;
margin-top:0in;
margin-right:0in;
margin-bottom:0in;
margin-left:.5in;
margin-bottom:.0001pt;
font-size:11.0pt;
font-family:"Calibri",sans-serif;}
p.msonormal0, li.msonormal0, div.msonormal0
{mso-style-name:msonormal;
mso-margin-top-alt:auto;
margin-right:0in;
mso-margin-bottom-alt:auto;
margin-left:0in;
font-size:11.0pt;
font-family:"Calibri",sans-serif;}
span.EmailStyle19
{mso-style-type:personal;
font-family:"Calibri",sans-serif;
color:windowtext;}
span.EmailStyle20
{mso-style-type:personal;
font-family:"Calibri",sans-serif;
color:windowtext;}
span.EmailStyle21
{mso-style-type:personal;
font-family:"Calibri",sans-serif;
color:windowtext;}
span.EmailStyle22
{mso-style-type:personal;
font-family:"Calibri",sans-serif;
color:windowtext;}
span.EmailStyle23
{mso-style-type:personal;
font-family:"Calibri",sans-serif;
color:windowtext;}
span.EmailStyle24
{mso-style-type:personal;
font-family:"Calibri",sans-serif;
color:windowtext;}
span.EmailStyle25
{mso-style-type:personal;
font-family:"Calibri",sans-serif;
color:windowtext;}
span.EmailStyle26
{mso-style-type:personal;
font-family:"Calibri",sans-serif;
color:windowtext;}
span.EmailStyle27
{mso-style-type:personal;
font-family:"Calibri",sans-serif;
color:windowtext;}
span.EmailStyle28
{mso-style-type:personal;
font-family:"Calibri",sans-serif;
color:windowtext;}
span.EmailStyle29
{mso-style-type:personal;
font-family:"Calibri",sans-serif;
color:windowtext;}
span.EmailStyle30
{mso-style-type:personal;
font-family:"Calibri",sans-serif;
color:windowtext;}
span.EmailStyle31
{mso-style-type:personal;
font-family:"Calibri",sans-serif;
color:windowtext;}
span.EmailStyle32
{mso-style-type:personal;
font-family:"Calibri",sans-serif;
color:windowtext;}
span.EmailStyle33
{mso-style-type:personal;
font-family:"Calibri",sans-serif;
color:windowtext;}
span.EmailStyle34
{mso-style-type:personal;
font-family:"Calibri",sans-serif;
color:windowtext;}
span.EmailStyle35
{mso-style-type:personal-reply;
font-family:"Calibri",sans-serif;
color:windowtext;}
.MsoChpDefault
{mso-style-type:export-only;
font-size:10.0pt;}
@page WordSection1
{size:8.5in 11.0in;
margin:1.0in 1.25in 1.0in 1.25in;}
div.WordSection1
{page:WordSection1;}
/* List Definitions */
@list l0
{mso-list-id:1007517054;
mso-list-type:hybrid;
mso-list-template-ids:1594282304 -351641160 67698713 67698715 67698703 67698713 67698715 67698703 67698713 67698715;}
@list l0:level1
{mso-level-tab-stop:none;
mso-level-number-position:left;
margin-left:.75in;
text-indent:-.25in;}
@list l0:level2
{mso-level-number-format:alpha-lower;
mso-level-tab-stop:none;
mso-level-number-position:left;
margin-left:1.25in;
text-indent:-.25in;}
@list l0:level3
{mso-level-number-format:roman-lower;
mso-level-tab-stop:none;
mso-level-number-position:right;
margin-left:1.75in;
text-indent:-9.0pt;}
@list l0:level4
{mso-level-tab-stop:none;
mso-level-number-position:left;
margin-left:2.25in;
text-indent:-.25in;}
@list l0:level5
{mso-level-number-format:alpha-lower;
mso-level-tab-stop:none;
mso-level-number-position:left;
margin-left:2.75in;
text-indent:-.25in;}
@list l0:level6
{mso-level-number-format:roman-lower;
mso-level-tab-stop:none;
mso-level-number-position:right;
margin-left:3.25in;
text-indent:-9.0pt;}
@list l0:level7
{mso-level-tab-stop:none;
mso-level-number-position:left;
margin-left:3.75in;
text-indent:-.25in;}
@list l0:level8
{mso-level-number-format:alpha-lower;
mso-level-tab-stop:none;
mso-level-number-position:left;
margin-left:4.25in;
text-indent:-.25in;}
@list l0:level9
{mso-level-number-format:roman-lower;
mso-level-tab-stop:none;
mso-level-number-position:right;
margin-left:4.75in;
text-indent:-9.0pt;}
ol
{margin-bottom:0in;}
ul
{margin-bottom:0in;}
--></style><!--[if gte mso 9]><xml>
<o:shapedefaults v:ext="edit" spidmax="1026" />
</xml><![endif]--><!--[if gte mso 9]><xml>
<o:shapelayout v:ext="edit">
<o:idmap v:ext="edit" data="1" />
</o:shapelayout></xml><![endif]--></head><body lang=EN-US link="#0563C1" vlink="#954F72"><div class=WordSection1><p class=MsoNormal><a name="_Hlk37689647"><span style='color:black'>Dear Computer Vision/Machine Learning/Autonomous Systems students, engineers, scientists and enthusiasts,<o:p></o:p></span></a></p><p class=MsoNormal><span style='mso-bookmark:_Hlk37689647'><span style='color:black'><o:p> </o:p></span></span></p><p class=MsoNormal><span style='mso-bookmark:_Hlk37689647'><span style='color:black'>Artificial Intelligence and Information analysis (AIIA) Lab, Aristotle University of Thessaloniki, Greece is proud to launch the live CVML Web lecture series</span> <span style='color:black'>that will cover very important topics Computer vision/machine learning. Two lectures will take place on Saturday 2</span></span><span style='mso-bookmark:_Hlk37689647'><span lang=EL>5</span><span style='color:black'>th April 2020:<o:p></o:p></span></span></p><p class=MsoNormal><span style='mso-bookmark:_Hlk37689647'><span style='color:black'><o:p> </o:p></span></span></p><p class=MsoNormal><span style='mso-bookmark:_Hlk37689647'><b><span style='color:black'>1) Introduction to Autonomous Systems<o:p></o:p></span></b></span></p><p class=MsoNormal><span style='mso-bookmark:_Hlk37689647'><b><span style='color:black'>2) Introduction to Computer Vision<o:p></o:p></span></b></span></p><p class=MsoNormal><span style='mso-bookmark:_Hlk37689647'><span style='color:black'><o:p> </o:p></span></span></p><p class=MsoNormal><span style='mso-bookmark:_Hlk37689647'><span style='color:black'>Date/time: <o:p></o:p></span></span></p><p class=MsoNormal><span style='mso-bookmark:_Hlk37689647'><span style='color:black'>a) Saturday 11:00-12:30 EET (17:00-18:30 Beijing time) for audience in Asia and <o:p></o:p></span></span></p><p class=MsoNormal><span style='mso-bookmark:_Hlk37689647'><span style='color:black'>b) Saturday 20:00-21:30 EET (13:00-14:30<i> </i>EST, 10:00-11:30 PST for NY/LA, respectively) for audience in the Americas. <o:p></o:p></span></span></p><p class=MsoNormal><span style='mso-bookmark:_Hlk37689647'><span style='color:black'><o:p> </o:p></span></span></p><p class=MsoNormal><span style='mso-bookmark:_Hlk37689647'><span style='color:black'>Registration can be done using the link:</span> </span><a href="http://icarus.csd.auth.gr/cvml-web-lecture-series/"><span style='mso-bookmark:_Hlk37689647'>http://icarus.csd.auth.gr/cvml-web-lecture-series/</span><span style='mso-bookmark:_Hlk37689647'></span></a><span style='mso-bookmark:_Hlk37689647'><o:p></o:p></span></p><p class=MsoNormal><span style='mso-bookmark:_Hlk37689647'><o:p> </o:p></span></p><p class=MsoNormal><span style='mso-bookmark:_Hlk37689647'><b><span style='color:black'>Lectures abstract<o:p></o:p></span></b></span></p><p class=MsoNormal><span style='mso-bookmark:_Hlk37689647'><span style='color:black'>1) Introduction to Autonomous Systems<o:p></o:p></span></span></p><p class=MsoNormal><span style='mso-bookmark:_Hlk37689647'><span style='color:black'>Abstract: Mission planning and control, perception and intelligence, embedded computing, swarm systems, communications and societal technologies. <o:p></o:p></span></span></p><p class=MsoNormal><span style='mso-bookmark:_Hlk37689647'><span style='color:black'>a) autonomous cars, b) drones and drone swarms, c) autonomous underwater vehicles d) autonomous marine vessels and e) autonomous robots.<o:p></o:p></span></span></p><p class=MsoNormal><span style='mso-bookmark:_Hlk37689647'><span style='color:black'><o:p> </o:p></span></span></p><p class=MsoNormal><span style='mso-bookmark:_Hlk37689647'><span style='color:black'>2) Introduction to Computer Vision<o:p></o:p></span></span></p><p class=MsoNormal><span style='mso-bookmark:_Hlk37689647'><span style='color:black'>Abstract: image/video sampling, Image and video acquisition, Camera geometry, Stereo and Multiview imaging, Structure from motion, Structure from X, 3D Robot Localization and Mapping, Semantic 3D world mapping, 3D object localization, Multiview object detection and tracking, Object pose estimation.<o:p></o:p></span></span></p><p class=MsoNormal><span style='mso-bookmark:_Hlk37689647'><span style='color:black'><o:p> </o:p></span></span></p><p class=MsoNormal><span style='mso-bookmark:_Hlk37689647'><b><span style='color:black'>Lecturer: Prof. Ioannis Pitas</span></b><span style='color:black'> (IEEE fellow, IEEE Distinguished Lecturer, EURASIP fellow) received the Diploma and PhD degree in Electrical Engineering, both from the Aristotle University of Thessaloniki, Greece. Since 1994, he has been a Professor at the Department of Informatics of the same University. He served as a Visiting Professor at several Universities.<o:p></o:p></span></span></p><p class=MsoNormal><span style='mso-bookmark:_Hlk37689647'><span style='color:black'>His current interests are in the areas of image/video processing, machine learning, computer vision, intelligent digital media, human centered interfaces, affective computing, 3D imaging and biomedical imaging. He has published over 1138 papers, contributed in 50 books in his areas of interest and edited or (co-)authored another 11 books. He has also been member of the program committee of many scientific conferences and workshops. In the past he served as Associate Editor or co-Editor of 9 international journals and General or Technical Chair of 4 international conferences. He participated in 70 R&D projects, primarily funded by the European Union and is/was principal investigator/researcher in 42 such projects. He has 30000+ citations to his work and h-index 81+ (Google Scholar). <o:p></o:p></span></span></p><p class=MsoNormal><span style='mso-bookmark:_Hlk37689647'><span style='color:black'>Prof. Pitas lead the big European H2020 R&D project MULTIDRONE:</span> </span><a href="https://multidrone.eu/"><span style='mso-bookmark:_Hlk37689647'>https://multidrone.eu/</span><span style='mso-bookmark:_Hlk37689647'></span></a><span style='mso-bookmark:_Hlk37689647'> <span style='color:black'>and is principal investigator (AUTH) in H2020 projects Aerial Core and AI4Media. He is chair of the Autonomous Systems initiative</span> </span><a href="https://ieeeasi.signalprocessingsociety.org/"><span style='mso-bookmark:_Hlk37689647'>https://ieeeasi.signalprocessingsociety.org/</span><span style='mso-bookmark:_Hlk37689647'></span></a><span style='mso-bookmark:_Hlk37689647'><span style='color:black'>.<o:p></o:p></span></span></p><p class=MsoNormal><span style='mso-bookmark:_Hlk37689647'><span style='color:black'>Prof. I. Pitas:</span> </span><a href="https://scholar.google.gr/citations?user=lWmGADwAAAAJ&hl=el"><span style='mso-bookmark:_Hlk37689647'>https://scholar.google.gr/citations?user=lWmGADwAAAAJ&hl=el</span><span style='mso-bookmark:_Hlk37689647'></span></a><span style='mso-bookmark:_Hlk37689647'><o:p></o:p></span></p><p class=MsoNormal><span style='mso-bookmark:_Hlk37689647'><span style='color:black'>AIIA Lab</span> </span><a href="http://www.aiia.csd.auth.gr"><span style='mso-bookmark:_Hlk37689647'>www.aiia.csd.auth.gr</span><span style='mso-bookmark:_Hlk37689647'></span></a><span style='mso-bookmark:_Hlk37689647'><o:p></o:p></span></p><p class=MsoNormal><span style='mso-bookmark:_Hlk37689647'><span style='color:black'>Lectures will consist primarily of live lecture streaming and PPT slides. Attendees (registrants) need no special computer equipment for attending the lecture. They will receive the lecture PDF before each lecture and will have the ability to ask questions real-time. Audience should have basic University-level undergraduate knowledge of any science or engineering department (calculus, probabilities, programming, that are typical e.g., in any ECE, CS, EE undergraduate program). More advanced knowledge (signals and systems, optimization theory, machine learning) is very helpful but nor required.<o:p></o:p></span></span></p><p class=MsoNormal><span style='mso-bookmark:_Hlk37689647'><span style='color:black'>These two lectures are part of a 14 lecture <b>CVML web course ‘Computer vision and machine learning for autonomous systems’</b> (April-June 2020):<o:p></o:p></span></span></p><p class=MsoNormal style='margin-left:.55in;text-indent:-.25in;mso-list:l0 level1 lfo2'><span style='mso-bookmark:_Hlk37689647'><![if !supportLists]><span lang=EL style='color:black'><span style='mso-list:Ignore'>1.<span style='font:7.0pt "Times New Roman"'> </span></span></span><![endif]><span style='color:black'>Introduction to autonomous systems </span></span><span style='mso-bookmark:_Hlk37689647'><span lang=EL style='color:black'><o:p></o:p></span></span></p><p class=MsoNormal style='margin-left:.55in;text-indent:-.25in;mso-list:l0 level1 lfo2'><span style='mso-bookmark:_Hlk37689647'><![if !supportLists]><span style='color:black'><span style='mso-list:Ignore'>2.<span style='font:7.0pt "Times New Roman"'> </span></span></span><![endif]><span style='color:black'>Introduction to computer vision <o:p></o:p></span></span></p><p class=MsoNormal style='margin-left:.55in;text-indent:-.25in;mso-list:l0 level1 lfo2'><span style='mso-bookmark:_Hlk37689647'><![if !supportLists]><span style='color:black'><span style='mso-list:Ignore'>3.<span style='font:7.0pt "Times New Roman"'> </span></span></span><![endif]><span style='color:black'>Image acquisition, camera geometry <o:p></o:p></span></span></p><p class=MsoNormal style='margin-left:.55in;text-indent:-.25in;mso-list:l0 level1 lfo2'><span style='mso-bookmark:_Hlk37689647'><![if !supportLists]><span style='color:black'><span style='mso-list:Ignore'>4.<span style='font:7.0pt "Times New Roman"'> </span></span></span><![endif]><span style='color:black'>Stereo and Multiview imaging <o:p></o:p></span></span></p><p class=MsoNormal style='margin-left:.55in;text-indent:-.25in;mso-list:l0 level1 lfo2'><span style='mso-bookmark:_Hlk37689647'><![if !supportLists]><span style='color:black'><span style='mso-list:Ignore'>5.<span style='font:7.0pt "Times New Roman"'> </span></span></span><![endif]><span style='color:black'>3D object/building/monument reconstruction and modeling <o:p></o:p></span></span></p><p class=MsoNormal style='margin-left:.55in;text-indent:-.25in;mso-list:l0 level1 lfo2'><span style='mso-bookmark:_Hlk37689647'><![if !supportLists]><span style='color:black'><span style='mso-list:Ignore'>6.<span style='font:7.0pt "Times New Roman"'> </span></span></span><![endif]><span style='color:black'>Signals and systems. 2D convolution/correlation <o:p></o:p></span></span></p><p class=MsoNormal style='margin-left:.55in;text-indent:-.25in;mso-list:l0 level1 lfo2'><span style='mso-bookmark:_Hlk37689647'><![if !supportLists]><span style='color:black'><span style='mso-list:Ignore'>7.<span style='font:7.0pt "Times New Roman"'> </span></span></span><![endif]><span style='color:black'>Motion estimation <o:p></o:p></span></span></p><p class=MsoNormal style='margin-left:.55in;text-indent:-.25in;mso-list:l0 level1 lfo2'><span style='mso-bookmark:_Hlk37689647'><![if !supportLists]><span style='color:black'><span style='mso-list:Ignore'>8.<span style='font:7.0pt "Times New Roman"'> </span></span></span><![endif]><span style='color:black'>Introduction to Machine Learning<o:p></o:p></span></span></p><p class=MsoNormal style='margin-left:.55in;text-indent:-.25in;mso-list:l0 level1 lfo2'><span style='mso-bookmark:_Hlk37689647'><![if !supportLists]><span style='color:black'><span style='mso-list:Ignore'>9.<span style='font:7.0pt "Times New Roman"'> </span></span></span><![endif]><span style='color:black'>Introduction to neural networks, Perceptron, backpropagation<o:p></o:p></span></span></p><p class=MsoNormal style='margin-left:.55in;text-indent:-.25in;mso-list:l0 level1 lfo2'><span style='mso-bookmark:_Hlk37689647'><![if !supportLists]><span style='color:black'><span style='mso-list:Ignore'>10.<span style='font:7.0pt "Times New Roman"'> </span></span></span><![endif]><span style='color:black'>Deep neural networks, Convolutional NNs<o:p></o:p></span></span></p><p class=MsoNormal style='margin-left:.55in;text-indent:-.25in;mso-list:l0 level1 lfo2'><span style='mso-bookmark:_Hlk37689647'><![if !supportLists]><span style='color:black'><span style='mso-list:Ignore'>11.<span style='font:7.0pt "Times New Roman"'> </span></span></span><![endif]><span style='color:black'>Deep learning for object/target detection<o:p></o:p></span></span></p><p class=MsoNormal style='margin-left:.55in;text-indent:-.25in;mso-list:l0 level1 lfo2'><span style='mso-bookmark:_Hlk37689647'><![if !supportLists]><span style='color:black'><span style='mso-list:Ignore'>12.<span style='font:7.0pt "Times New Roman"'> </span></span></span><![endif]><span style='color:black'>Object tracking <o:p></o:p></span></span></p><p class=MsoNormal style='margin-left:.55in;text-indent:-.25in;mso-list:l0 level1 lfo2'><span style='mso-bookmark:_Hlk37689647'><![if !supportLists]><span style='color:black'><span style='mso-list:Ignore'>13.<span style='font:7.0pt "Times New Roman"'> </span></span></span><![endif]><span style='color:black'>Localization and mapping<o:p></o:p></span></span></p><p class=MsoNormal style='margin-left:.55in;text-indent:-.25in;mso-list:l0 level1 lfo2'><span style='mso-bookmark:_Hlk37689647'><![if !supportLists]><span style='color:black'><span style='mso-list:Ignore'>14.<span style='font:7.0pt "Times New Roman"'> </span></span></span><![endif]><span style='color:black'>Fast convolution algorithms. CVML programming tools.<o:p></o:p></span></span></p><p class=MsoNormal><span style='mso-bookmark:_Hlk37689647'><o:p> </o:p></span></p><p class=MsoNormal><span style='mso-bookmark:_Hlk37689647'><o:p> </o:p></span></p><p class=MsoNormal><span style='mso-bookmark:_Hlk37689647'><span style='color:black'>Sincerely yours<o:p></o:p></span></span></p><p class=MsoNormal><span style='mso-bookmark:_Hlk37689647'><span style='color:black'>Prof. Ioannis Pitas, Director of AIIA Lab, Aristotle University of Thessaloniki, Greece</span></span><o:p></o:p></p><p class=MsoNormal><o:p> </o:p></p><p class=MsoNormal><o:p> </o:p></p><p class=MsoNormal><o:p> </o:p></p><p class=MsoNormal><o:p> </o:p></p></div></body></html>