This commit is contained in:
Andrea Ierardi 2020-04-12 22:00:42 +02:00
parent dfea12e4dc
commit 17930de645
7 changed files with 298 additions and 794 deletions

View File

@ -1,34 +1,3 @@
\relax
\@nameuse{bbl@beforestart}
\babel@aux{english}{}
\@writefile{toc}{\contentsline {section}{\numberline {1}Lecture 1 - 09-03-2020}{3}\protected@file@percent }
\@writefile{toc}{\contentsline {subsection}{\numberline {1.1}Introduction}{3}\protected@file@percent }
\@writefile{toc}{\contentsline {section}{\numberline {2}Lecture 2 - 07-04-2020}{6}\protected@file@percent }
\@writefile{toc}{\contentsline {subsection}{\numberline {2.1}Argomento}{6}\protected@file@percent }
\@writefile{toc}{\contentsline {subsection}{\numberline {2.2}Loss}{6}\protected@file@percent }
\@writefile{toc}{\contentsline {subsubsection}{\numberline {2.2.1}Absolute Loss}{6}\protected@file@percent }
\@writefile{toc}{\contentsline {subsubsection}{\numberline {2.2.2}Square Loss}{7}\protected@file@percent }
\@writefile{toc}{\contentsline {subsubsection}{\numberline {2.2.3}Example of information of square loss}{7}\protected@file@percent }
\@writefile{toc}{\contentsline {subsubsection}{\numberline {2.2.4}labels and losses}{9}\protected@file@percent }
\@writefile{toc}{\contentsline {subsubsection}{\numberline {2.2.5}Example TF(idf) documents encoding}{10}\protected@file@percent }
\@writefile{toc}{\contentsline {section}{\numberline {3}Lecture 3 - 07-04-2020}{12}\protected@file@percent }
\@writefile{toc}{\contentsline {subsection}{\numberline {3.1}Overfitting}{14}\protected@file@percent }
\@writefile{toc}{\contentsline {subsubsection}{\numberline {3.1.1}Noise in the data}{14}\protected@file@percent }
\@writefile{toc}{\contentsline {subsection}{\numberline {3.2}Underfitting}{16}\protected@file@percent }
\@writefile{toc}{\contentsline {subsection}{\numberline {3.3}Nearest neighbour}{16}\protected@file@percent }
\@writefile{toc}{\contentsline {section}{\numberline {4}Lecture 4 - 07-04-2020}{18}\protected@file@percent }
\@writefile{toc}{\contentsline {subsection}{\numberline {4.1}Computing $h_{NN}$}{18}\protected@file@percent }
\@writefile{toc}{\contentsline {subsection}{\numberline {4.2}Tree Predictor}{19}\protected@file@percent }
\@writefile{toc}{\contentsline {section}{\numberline {5}Lecture 5 - 07-04-2020}{22}\protected@file@percent }
\@writefile{toc}{\contentsline {subsection}{\numberline {5.1}Tree Classifier}{22}\protected@file@percent }
\@writefile{toc}{\contentsline {subsection}{\numberline {5.2}Jensens inequality}{23}\protected@file@percent }
\@writefile{toc}{\contentsline {subsection}{\numberline {5.3}Tree Predictor}{25}\protected@file@percent }
\@writefile{toc}{\contentsline {subsection}{\numberline {5.4}Statistical model for Machine Learning}{26}\protected@file@percent }
\@writefile{toc}{\contentsline {section}{\numberline {6}Lecture 6 - 07-04-2020}{28}\protected@file@percent }
\@writefile{toc}{\contentsline {section}{\numberline {7}Lecture 7 - 07-04-2020}{29}\protected@file@percent }
\@writefile{toc}{\contentsline {section}{\numberline {8}Lecture 8 - 07-04-2020}{30}\protected@file@percent }
\@writefile{toc}{\contentsline {section}{\numberline {9}Lecture 9 - 07-04-2020}{31}\protected@file@percent }
\@writefile{toc}{\contentsline {section}{\numberline {10}Lecture 10 - 07-04-2020}{32}\protected@file@percent }
\@writefile{toc}{\contentsline {subsection}{\numberline {10.1}TO BE DEFINE}{32}\protected@file@percent }
\bibstyle{abbrv}
\bibdata{main}

View File

@ -7,7 +7,7 @@
\usepackage[dvipsnames]{xcolor}
\usepackage{graphicx}
%\graphicspath{ {./img/} }
\graphicspath{ {./img/} }
\definecolor{mypink}{cmyk}{0, 0.7808, 0.4429, 0.1412}
\definecolor{mygray}{gray}{0.6}
\usepackage{framed}
@ -45,7 +45,7 @@
\vspace{0.8cm}
\includegraphics[width=0.4\textwidth]{frontpage}
\includegraphics[width=.7\linewidth]{frontpage.jpg}
\Large
Università degli Studi di Milano

View File

@ -1,30 +0,0 @@
\babel@toc {english}{}
\contentsline {section}{\numberline {1}Lecture 1 - 09-03-2020}{3}%
\contentsline {subsection}{\numberline {1.1}Introduction}{3}%
\contentsline {section}{\numberline {2}Lecture 2 - 07-04-2020}{6}%
\contentsline {subsection}{\numberline {2.1}Argomento}{6}%
\contentsline {subsection}{\numberline {2.2}Loss}{6}%
\contentsline {subsubsection}{\numberline {2.2.1}Absolute Loss}{6}%
\contentsline {subsubsection}{\numberline {2.2.2}Square Loss}{7}%
\contentsline {subsubsection}{\numberline {2.2.3}Example of information of square loss}{7}%
\contentsline {subsubsection}{\numberline {2.2.4}labels and losses}{9}%
\contentsline {subsubsection}{\numberline {2.2.5}Example TF(idf) documents encoding}{10}%
\contentsline {section}{\numberline {3}Lecture 3 - 07-04-2020}{12}%
\contentsline {subsection}{\numberline {3.1}Overfitting}{14}%
\contentsline {subsubsection}{\numberline {3.1.1}Noise in the data}{14}%
\contentsline {subsection}{\numberline {3.2}Underfitting}{16}%
\contentsline {subsection}{\numberline {3.3}Nearest neighbour}{16}%
\contentsline {section}{\numberline {4}Lecture 4 - 07-04-2020}{18}%
\contentsline {subsection}{\numberline {4.1}Computing $h_{NN}$}{18}%
\contentsline {subsection}{\numberline {4.2}Tree Predictor}{19}%
\contentsline {section}{\numberline {5}Lecture 5 - 07-04-2020}{22}%
\contentsline {subsection}{\numberline {5.1}Tree Classifier}{22}%
\contentsline {subsection}{\numberline {5.2}Jensens inequality}{23}%
\contentsline {subsection}{\numberline {5.3}Tree Predictor}{25}%
\contentsline {subsection}{\numberline {5.4}Statistical model for Machine Learning}{26}%
\contentsline {section}{\numberline {6}Lecture 6 - 07-04-2020}{28}%
\contentsline {section}{\numberline {7}Lecture 7 - 07-04-2020}{29}%
\contentsline {section}{\numberline {8}Lecture 8 - 07-04-2020}{30}%
\contentsline {section}{\numberline {9}Lecture 9 - 07-04-2020}{31}%
\contentsline {section}{\numberline {10}Lecture 10 - 07-04-2020}{32}%
\contentsline {subsection}{\numberline {10.1}TO BE DEFINE}{32}%