<html xmlns:v="urn:schemas-microsoft-com:vml" xmlns:o="urn:schemas-microsoft-com:office:office" xmlns:w="urn:schemas-microsoft-com:office:word" xmlns:m="http://schemas.microsoft.com/office/2004/12/omml" xmlns="http://www.w3.org/TR/REC-html40"><head><meta http-equiv=Content-Type content="text/html; charset=us-ascii"><meta name=Generator content="Microsoft Word 15 (filtered medium)"><style><!--
/* Font Definitions */
@font-face
{font-family:"Cambria Math";
panose-1:2 4 5 3 5 4 6 3 2 4;}
@font-face
{font-family:Calibri;
panose-1:2 15 5 2 2 2 4 3 2 4;}
@font-face
{font-family:"Calibri Light";
panose-1:2 15 3 2 2 2 4 3 2 4;}
@font-face
{font-family:Consolas;
panose-1:2 11 6 9 2 2 4 3 2 4;}
/* Style Definitions */
p.MsoNormal, li.MsoNormal, div.MsoNormal
{margin-top:0in;
margin-right:0in;
margin-bottom:8.0pt;
margin-left:0in;
line-height:105%;
font-size:11.0pt;
font-family:"Calibri",sans-serif;}
h5
{mso-style-priority:9;
mso-style-link:"\0395\03C0\03B9\03BA\03B5\03C6\03B1\03BB\03AF\03B4\03B1 5 Char";
mso-margin-top-alt:auto;
margin-right:0in;
mso-margin-bottom-alt:auto;
margin-left:0in;
font-size:10.0pt;
font-family:"Calibri",sans-serif;}
a:link, span.MsoHyperlink
{mso-style-priority:99;
color:#0563C1;
text-decoration:underline;}
a:visited, span.MsoHyperlinkFollowed
{mso-style-priority:99;
color:#954F72;
text-decoration:underline;}
p.MsoPlainText, li.MsoPlainText, div.MsoPlainText
{mso-style-priority:99;
mso-style-link:"\0391\03C0\03BB\03CC \03BA\03B5\03AF\03BC\03B5\03BD\03BF Char";
margin-top:0in;
margin-right:0in;
margin-bottom:8.0pt;
margin-left:0in;
line-height:105%;
font-size:11.0pt;
font-family:"Calibri",sans-serif;}
p.MsoNoSpacing, li.MsoNoSpacing, div.MsoNoSpacing
{mso-style-priority:1;
margin:0in;
margin-bottom:.0001pt;
font-size:12.0pt;
font-family:"Times New Roman",serif;}
p.MsoListParagraph, li.MsoListParagraph, div.MsoListParagraph
{mso-style-priority:34;
margin-top:0in;
margin-right:0in;
margin-bottom:0in;
margin-left:.5in;
margin-bottom:.0001pt;
font-size:11.0pt;
font-family:"Calibri",sans-serif;}
span.5Char
{mso-style-name:"\0395\03C0\03B9\03BA\03B5\03C6\03B1\03BB\03AF\03B4\03B1 5 Char";
mso-style-priority:9;
mso-style-link:"\0395\03C0\03B9\03BA\03B5\03C6\03B1\03BB\03AF\03B4\03B1 5";
font-family:"Calibri Light",sans-serif;
color:#2F5496;}
p.msonormal0, li.msonormal0, div.msonormal0
{mso-style-name:msonormal;
mso-margin-top-alt:auto;
margin-right:0in;
mso-margin-bottom-alt:auto;
margin-left:0in;
font-size:11.0pt;
font-family:"Calibri",sans-serif;}
span.Char
{mso-style-name:"\0391\03C0\03BB\03CC \03BA\03B5\03AF\03BC\03B5\03BD\03BF Char";
mso-style-priority:99;
mso-style-link:"\0391\03C0\03BB\03CC \03BA\03B5\03AF\03BC\03B5\03BD\03BF";
font-family:Consolas;}
span.PlainTextChar
{mso-style-name:"Plain Text Char";
mso-style-priority:99;
mso-style-link:"Plain Text";
font-family:"Calibri",sans-serif;
color:black;}
p.PlainText, li.PlainText, div.PlainText
{mso-style-name:"Plain Text";
mso-style-link:"Plain Text Char";
margin-top:0in;
margin-right:0in;
margin-bottom:8.0pt;
margin-left:0in;
line-height:105%;
font-size:11.0pt;
font-family:"Calibri",sans-serif;}
span.Heading5Char
{mso-style-name:"Heading 5 Char";
mso-style-priority:9;
mso-style-link:"Heading 5";
font-family:"Calibri",sans-serif;
font-weight:bold;}
p.Heading5, li.Heading5, div.Heading5
{mso-style-name:"Heading 5";
mso-style-link:"Heading 5 Char";
margin-top:0in;
margin-right:0in;
margin-bottom:8.0pt;
margin-left:0in;
line-height:105%;
font-size:11.0pt;
font-family:"Calibri",sans-serif;}
span.EmailStyle27
{mso-style-type:personal;
font-family:"Calibri",sans-serif;
color:windowtext;}
span.tadv-format-panel
{mso-style-name:tadv-format-panel;}
span.EmailStyle29
{mso-style-type:personal;
font-family:"Calibri",sans-serif;
color:windowtext;}
span.EmailStyle30
{mso-style-type:personal;
font-family:"Calibri",sans-serif;
color:windowtext;}
span.EmailStyle31
{mso-style-type:personal;
font-family:"Calibri",sans-serif;
color:windowtext;}
span.EmailStyle32
{mso-style-type:personal;
font-family:"Calibri",sans-serif;
color:windowtext;}
span.EmailStyle33
{mso-style-type:personal;
font-family:"Calibri",sans-serif;
color:windowtext;}
span.st
{mso-style-name:st;}
span.EmailStyle35
{mso-style-type:personal;
font-family:"Calibri",sans-serif;
color:windowtext;}
span.EmailStyle36
{mso-style-type:personal;
font-family:"Calibri",sans-serif;
color:windowtext;}
span.EmailStyle37
{mso-style-type:personal;
font-family:"Calibri",sans-serif;
color:windowtext;}
span.EmailStyle38
{mso-style-type:personal;
font-family:"Calibri",sans-serif;
color:windowtext;}
span.EmailStyle39
{mso-style-type:personal;
font-family:"Calibri",sans-serif;
color:windowtext;}
span.EmailStyle40
{mso-style-type:personal;
font-family:"Calibri",sans-serif;
color:windowtext;}
span.EmailStyle41
{mso-style-type:personal;
font-family:"Calibri",sans-serif;
color:windowtext;}
span.EmailStyle42
{mso-style-type:personal;
font-family:"Calibri",sans-serif;
color:windowtext;}
span.EmailStyle43
{mso-style-type:personal;
font-family:"Calibri",sans-serif;
color:windowtext;}
span.EmailStyle44
{mso-style-type:personal;
font-family:"Calibri",sans-serif;
color:windowtext;}
span.EmailStyle45
{mso-style-type:personal;
font-family:"Calibri",sans-serif;
color:windowtext;}
span.EmailStyle46
{mso-style-type:personal;
font-family:"Calibri",sans-serif;
color:windowtext;}
span.EmailStyle47
{mso-style-type:personal;
font-family:"Calibri",sans-serif;
color:windowtext;}
span.EmailStyle48
{mso-style-type:personal;
font-family:"Calibri",sans-serif;
color:windowtext;}
span.EmailStyle49
{mso-style-type:personal;
font-family:"Calibri",sans-serif;
color:windowtext;}
span.EmailStyle50
{mso-style-type:personal;
font-family:"Calibri",sans-serif;
color:windowtext;}
span.EmailStyle51
{mso-style-type:personal;
font-family:"Calibri",sans-serif;
color:windowtext;}
span.EmailStyle52
{mso-style-type:personal;
font-family:"Calibri",sans-serif;
color:windowtext;}
span.EmailStyle53
{mso-style-type:personal;
font-family:"Calibri",sans-serif;
color:windowtext;}
span.EmailStyle54
{mso-style-type:personal;
font-family:"Calibri",sans-serif;
color:windowtext;}
span.EmailStyle55
{mso-style-type:personal;
font-family:"Calibri",sans-serif;
color:windowtext;}
span.EmailStyle56
{mso-style-type:personal;
font-family:"Calibri",sans-serif;
color:windowtext;}
span.EmailStyle57
{mso-style-type:personal;
font-family:"Calibri",sans-serif;
color:windowtext;}
span.EmailStyle58
{mso-style-type:personal;
font-family:"Calibri",sans-serif;
color:windowtext;}
span.EmailStyle59
{mso-style-type:personal;
font-family:"Calibri",sans-serif;
color:windowtext;}
span.EmailStyle60
{mso-style-type:personal;
font-family:"Calibri",sans-serif;
color:windowtext;}
span.EmailStyle61
{mso-style-type:personal;
font-family:"Calibri",sans-serif;
color:windowtext;}
span.EmailStyle62
{mso-style-type:personal;
font-family:"Calibri",sans-serif;
color:windowtext;}
span.EmailStyle63
{mso-style-type:personal;
font-family:"Calibri",sans-serif;
color:windowtext;}
span.EmailStyle64
{mso-style-type:personal;
font-family:"Calibri",sans-serif;
color:windowtext;}
span.EmailStyle65
{mso-style-type:personal;
font-family:"Calibri",sans-serif;
color:windowtext;}
span.tadv-color
{mso-style-name:tadv-color;}
span.EmailStyle67
{mso-style-type:personal;
font-family:"Calibri",sans-serif;
color:windowtext;}
span.EmailStyle68
{mso-style-type:personal;
font-family:"Calibri",sans-serif;
color:windowtext;}
span.EmailStyle69
{mso-style-type:personal;
font-family:"Calibri",sans-serif;
color:windowtext;}
span.EmailStyle70
{mso-style-type:personal;
font-family:"Calibri",sans-serif;
color:windowtext;}
span.EmailStyle71
{mso-style-type:personal;
font-family:"Calibri",sans-serif;
color:windowtext;}
span.EmailStyle72
{mso-style-type:personal;
font-family:"Calibri",sans-serif;
color:windowtext;}
span.EmailStyle73
{mso-style-type:personal;
font-family:"Calibri",sans-serif;
color:windowtext;}
span.EmailStyle74
{mso-style-type:personal;
font-family:"Calibri",sans-serif;
color:windowtext;}
span.EmailStyle75
{mso-style-type:personal;
font-family:"Calibri",sans-serif;
color:windowtext;}
span.EmailStyle76
{mso-style-type:personal;
font-family:"Calibri",sans-serif;
color:windowtext;}
span.EmailStyle79
{mso-style-type:personal-reply;
font-family:"Calibri",sans-serif;
color:windowtext;}
.MsoChpDefault
{mso-style-type:export-only;
font-size:10.0pt;}
@page WordSection1
{size:8.5in 11.0in;
margin:1.0in 1.25in 1.0in 1.25in;}
div.WordSection1
{page:WordSection1;}
--></style><!--[if gte mso 9]><xml>
<o:shapedefaults v:ext="edit" spidmax="1026" />
</xml><![endif]--><!--[if gte mso 9]><xml>
<o:shapelayout v:ext="edit">
<o:idmap v:ext="edit" data="1" />
</o:shapelayout></xml><![endif]--></head><body lang=EN-US link="#0563C1" vlink="#954F72"><div class=WordSection1><p class=MsoNoSpacing>Dear Computer Vision/Machine Learning/Autonomous Systems students, engineers, scientists and enthusiasts,<o:p></o:p></p><p class=MsoNoSpacing><o:p> </o:p></p><p class=MsoNoSpacing>Artificial Intelligence and Information analysis (AIIA) Lab, <span style='color:black'>Aristotle University of Thessaloniki, Greece </span>is proud to launch the live CVML Web lecture series <span style='color:black'><o:p></o:p></span></p><p class=MsoNoSpacing>that will cover very important topics Computer vision/machine learning. Two lectures will take place on Saturday 2nd May 2020:<o:p></o:p></p><p class=MsoNoSpacing><o:p> </o:p></p><p class=MsoNoSpacing><b>1) Image acquisition, Camera geometry<span style='color:#313131'> </span><o:p></o:p></b></p><p class=MsoNoSpacing><b>2)</b> <strong>Stereo and Multiview imaging</strong><span style='font-size:10.0pt;font-family:"Calibri",sans-serif'><o:p></o:p></span></p><p class=MsoNoSpacing><o:p> </o:p></p><p class=MsoNoSpacing>Date/time: <o:p></o:p></p><p class=MsoNoSpacing>a) Saturday 11:00-12:30 EET (17:00-18:30 Beijing time) for audience in Asia and <o:p></o:p></p><p class=MsoNoSpacing>b) Saturday 20:00-21:30 EET (<em><span style='font-style:normal'>13:00-14:30</span> </em><span class=st>EST, 10:00-11:30 PST for NY/LA, respectively</span>) for audience in the Americas. <o:p></o:p></p><p class=MsoNoSpacing><span style='font-size:11.0pt;font-family:"Calibri",sans-serif'><o:p> </o:p></span></p><p class=MsoNoSpacing>Registration can be done using the link: <a href="http://icarus.csd.auth.gr/cvml-web-lecture-series/">http://icarus.csd.auth.gr/cvml-web-lecture-series/</a><o:p></o:p></p><p class=MsoNoSpacing><o:p> </o:p></p><p class=MsoNoSpacing><b>Lectures abstract<o:p></o:p></b></p><p class=MsoNoSpacing><b>1) Image acquisition, Camera geometry<span style='color:#313131'> <o:p></o:p></span></b></p><p class=MsoNoSpacing><b><span style='color:black'>Abstract</span></b><span style='color:black'>: </span>After a brief introduction to image acquisition and light reflection, the building blocks of modern cameras will be surveyed, along with geometric camera modeling. Several camera models, like the pinhole and the weak-perspective camera model, will subsequently be presented. Projective geometry will be overviewed, with the most commonly used camera calibration techniques closing the lecture.<o:p></o:p></p><p class=MsoNoSpacing><b><o:p> </o:p></b></p><p class=MsoNoSpacing>2) <strong>Stereo and Multiview imaging</strong><span style='font-size:10.0pt;font-family:"Calibri",sans-serif'><o:p></o:p></span></p><p class=MsoNoSpacing><b><span style='color:black'>Abstract</span></b><span style='color:black'>: </span>Stereoscopic and multiview imaging will be explored in depth. The fundamentals of stereopsis will be overviewed. Stereoscopic vision, geometry will be presented, focusing on epipolar geometry, fundamental/essential matrix and camera rectification. Stereo camera technologies will be overviewed. Subsequently, the main methods of 3D scene reconstruction from stereoscopic video will be described based on feature detection and matching. Classical and neural disparity estimation methods will be presented. 3D depth estimation in parallel, converging and arbitrary camera geometries will be also presented, along with the basics of multiview imaging.<o:p></o:p></p><p class=MsoNoSpacing><span style='font-size:11.0pt;font-family:"Calibri",sans-serif'><o:p> </o:p></span></p><p class=MsoNoSpacing><o:p> </o:p></p><p class=MsoNoSpacing><b>Lecturer: <span style='color:black'>Prof. Ioannis Pitas</span></b><span style='color:black'> (IEEE fellow, IEEE Distinguished Lecturer, EURASIP fellow) received the Diploma and PhD degree in Electrical Engineering, both from the Aristotle University of Thessaloniki, Greece. Since 1994, he has been a Professor at the Department of Informatics of the same University. He served as a Visiting Professor at several Universities.<o:p></o:p></span></p><p class=MsoNoSpacing><span style='color:black'>His current interests are in the areas of image/video processing, machine learning, computer vision, intelligent digital media, human centered interfaces, affective computing, 3D imaging and biomedical imaging. He has published over 1138 papers, contributed in 50 books in his areas of interest and edited or (co-)authored another 11 books. He has also been member of the program committee of many scientific conferences and workshops. In the past he served as Associate Editor or co-Editor of 9 international journals and General or Technical Chair of 4 international conferences. He participated in 70 R&D projects, primarily funded by the European Union and is/was principal investigator/researcher in 42 such projects. He has 30000+ citations to his work and h-index 81+ (Google Scholar). <o:p></o:p></span></p><p class=MsoNoSpacing><span style='color:black'>Prof. Pitas lead the big European H2020 R&D project MULTIDRONE: <a href="https://multidrone.eu/">https://multidrone.eu/</a></span> and is principal investigator (AUTH) in H2020 projects Aerial Core and AI4Media<span style='color:black'>. He is chair of the Autonomous Systems initiative <a href="https://ieeeasi.signalprocessingsociety.org/">https://ieeeasi.signalprocessingsociety.org/</a>.<o:p></o:p></span></p><p class=MsoNoSpacing>Prof. I. Pitas: <span class=MsoHyperlink><a href="https://scholar.google.gr/citations?user=lWmGADwAAAAJ&hl=el">https://scholar.google.gr/citations?user=lWmGADwAAAAJ&hl=el</a></span><o:p></o:p></p><p class=MsoNoSpacing>AIIA Lab <span class=MsoHyperlink><a href="http://www.aiia.csd.auth.gr">www.aiia.csd.auth.gr</a></span><o:p></o:p></p><p class=MsoNoSpacing>Lectures will consist primarily of live lecture streaming and PPT slides. Attendees (registrants) need no special computer equipment for attending the lecture. They will receive the lecture PDF before each lecture and will have the ability to ask questions real-time. Audience should have basic University-level undergraduate knowledge of any science or engineering department (calculus, probabilities, programming, that are typical e.g., in any ECE, CS, EE undergraduate program). More advanced knowledge (signals and systems, optimization theory, machine learning) is very helpful but nor required.<span style='color:#313131'><o:p></o:p></span></p><p class=MsoNoSpacing><o:p> </o:p></p><p class=MsoNoSpacing><span style='font-size:11.0pt;font-family:"Calibri",sans-serif'><o:p> </o:p></span></p><p class=MsoNoSpacing><span style='color:black'>These two lectures are part of a 14 lecture <b>CVML web course <span class=tadv-color>‘Computer vision and machine learning for autonomous systems’</span></b><span class=tadv-color> (April-June 2020):<o:p></o:p></span></span></p><p class=MsoNoSpacing><o:p> </o:p></p><p class=MsoNoSpacing><span style='color:black'>Introduction to autonomous systems </span> (delivered 25<sup>th</sup> April 2020)<o:p></o:p></p><p class=MsoNoSpacing><span style='color:black'>Introduction to computer vision </span> (delivered 25<sup>th</sup> April 2020)<span style='color:black'><o:p></o:p></span></p><p class=MsoNoSpacing><span style='color:black'>Image acquisition, camera geometry </span> (scheduled 2<sup>nd</sup> May 2020)<span style='color:black'><o:p></o:p></span></p><p class=MsoNoSpacing><span style='color:black'>Stereo and Multiview imaging </span> (scheduled 2<sup>nd</sup> May 2020)<span style='color:black'><o:p></o:p></span></p><p class=MsoNoSpacing><span style='color:black'>3D object/building/monument reconstruction and modeling <o:p></o:p></span></p><p class=MsoNoSpacing><span style='color:black'>Signals and systems. 2D convolution/correlation <o:p></o:p></span></p><p class=MsoNoSpacing><span style='color:black'>Motion estimation <o:p></o:p></span></p><p class=MsoNoSpacing><span style='color:black'>Introduction to Machine Learning<o:p></o:p></span></p><p class=MsoNoSpacing><span style='color:black'>Introduction to neural networks, Perceptron, backpropagation<o:p></o:p></span></p><p class=MsoNoSpacing><span style='color:black'>Deep neural networks, Convolutional NNs<o:p></o:p></span></p><p class=MsoNoSpacing><span style='color:black'>Deep learning for object/target detection<o:p></o:p></span></p><p class=MsoNoSpacing><span style='color:black'>Object tracking <o:p></o:p></span></p><p class=MsoNoSpacing><span style='color:black'>Localization and mapping<o:p></o:p></span></p><p class=MsoNoSpacing><span style='color:black'>Fast convolution algorithms. CVML programming tools.<o:p></o:p></span></p><p class=MsoNoSpacing><span style='color:black'><o:p> </o:p></span></p><p class=MsoNoSpacing><span style='color:black'>Sincerely yours<o:p></o:p></span></p><p class=MsoNoSpacing><span style='color:black'>Prof. Ioannis Pitas</span><o:p></o:p></p><p class=MsoNoSpacing><span style='color:black'>Director of AIIA Lab, Aristotle University of Thessaloniki, Greece<o:p></o:p></span></p><div id=DAB4FAD8-2DD7-40BB-A1B8-4E2AA1F9FDF2><p class=MsoNormal style='margin-bottom:0in;margin-bottom:.0001pt;line-height:normal'><o:p> </o:p></p><p class=MsoNormal style='margin-bottom:0in;margin-bottom:.0001pt;line-height:normal'><o:p> </o:p></p></div></div></body></html>