Master-DataScience-Notes/1year/3trimester/Machine Learning, Statistical Learning, Deep Learning and Artificial Intelligence/Machine Learning/main.toc
Andreaierardi 1b9d0e6f97 up imgs
2020-04-20 15:25:33 +02:00

55 lines
3.6 KiB
TeX
Raw Blame History

This file contains ambiguous Unicode characters

This file contains Unicode characters that might be confused with other characters. If you think that this is intentional, you can safely ignore this warning. Use the Escape button to reveal them.

\babel@toc {english}{}
\contentsline {chapter}{\numberline {1}Lecture 1 - 09-03-2020}{5}%
\contentsline {section}{\numberline {1.1}Introduction of the course}{5}%
\contentsline {section}{\numberline {1.2}Examples}{5}%
\contentsline {subsection}{\numberline {1.2.1}Spam filtering}{8}%
\contentsline {chapter}{\numberline {2}Lecture 2 - 07-04-2020}{9}%
\contentsline {section}{\numberline {2.1}Argomento}{9}%
\contentsline {section}{\numberline {2.2}Loss}{9}%
\contentsline {subsection}{\numberline {2.2.1}Absolute Loss}{9}%
\contentsline {subsection}{\numberline {2.2.2}Square Loss}{10}%
\contentsline {subsection}{\numberline {2.2.3}Example of information of square loss}{11}%
\contentsline {subsection}{\numberline {2.2.4}labels and losses}{12}%
\contentsline {subsection}{\numberline {2.2.5}Example TF(idf) documents encoding}{14}%
\contentsline {chapter}{\numberline {3}Lecture 3 - 07-04-2020}{16}%
\contentsline {section}{\numberline {3.1}Overfitting}{18}%
\contentsline {subsection}{\numberline {3.1.1}Noise in the data}{18}%
\contentsline {section}{\numberline {3.2}Underfitting}{20}%
\contentsline {section}{\numberline {3.3}Nearest neighbour}{20}%
\contentsline {chapter}{\numberline {4}Lecture 4 - 07-04-2020}{23}%
\contentsline {section}{\numberline {4.1}Computing $h_{NN}$}{23}%
\contentsline {section}{\numberline {4.2}Tree Predictor}{25}%
\contentsline {chapter}{\numberline {5}Lecture 5 - 07-04-2020}{29}%
\contentsline {section}{\numberline {5.1}Tree Classifier}{29}%
\contentsline {section}{\numberline {5.2}Jensens inequality}{31}%
\contentsline {section}{\numberline {5.3}Tree Predictor}{35}%
\contentsline {section}{\numberline {5.4}Statistical model for Machine Learning}{36}%
\contentsline {chapter}{\numberline {6}Lecture 6 - 07-04-2020}{38}%
\contentsline {section}{\numberline {6.1}Bayes Optimal Predictor}{38}%
\contentsline {subsection}{\numberline {6.1.1}Square Loss}{39}%
\contentsline {subsection}{\numberline {6.1.2}Zero-one loss for binary classification}{40}%
\contentsline {section}{\numberline {6.2}Bayes Risk}{43}%
\contentsline {chapter}{\numberline {7}Lecture 7 - 07-04-2020}{45}%
\contentsline {section}{\numberline {7.1}Chernoff-Hoffding bound}{45}%
\contentsline {section}{\numberline {7.2}Union Bound}{46}%
\contentsline {section}{\numberline {7.3}Studying overfitting of a ERM}{50}%
\contentsline {chapter}{\numberline {8}Lecture 8 - 07-04-2020}{52}%
\contentsline {section}{\numberline {8.1}The problem of estimating risk in practise}{53}%
\contentsline {section}{\numberline {8.2}Cross-validation}{55}%
\contentsline {section}{\numberline {8.3}Nested cross validation}{57}%
\contentsline {chapter}{\numberline {9}Lecture 9 - 07-04-2020}{58}%
\contentsline {section}{\numberline {9.1}Tree predictors}{58}%
\contentsline {subsection}{\numberline {9.1.1}Catalan Number}{60}%
\contentsline {chapter}{\numberline {10}Lecture 10 - 07-04-2020}{64}%
\contentsline {section}{\numberline {10.1}TO BE DEFINE}{64}%
\contentsline {section}{\numberline {10.2}MANCANO 20 MINUTI DI LEZIONE}{64}%
\contentsline {section}{\numberline {10.3}Compare risk for zero-one loss}{66}%
\contentsline {chapter}{\numberline {11}Lecture 11 - 20-04-2020}{68}%
\contentsline {section}{\numberline {11.1}Analysis of $K_{NN}$}{68}%
\contentsline {subsection}{\numberline {11.1.1}Study of $K_{NN}$}{71}%
\contentsline {subsection}{\numberline {11.1.2}study of trees}{72}%
\contentsline {section}{\numberline {11.2}Non-parametric Algorithms}{73}%
\contentsline {subsection}{\numberline {11.2.1}Example of parametric algorithms}{74}%
\contentsline {chapter}{\numberline {12}Lecture 12 - 21-04-2020}{75}%
\contentsline {section}{\numberline {12.1}Non parametrics algorithms}{75}%