mirror of
https://github.com/Andreaierardi/Master-DataScience-Notes.git
synced 2025-02-10 02:16:46 +01:00
up main after lect 22
This commit is contained in:
parent
44250ae388
commit
1de9dac481
@ -1,221 +1,228 @@
|
||||
\relax
|
||||
\@nameuse{bbl@beforestart}
|
||||
\babel@aux{english}{}
|
||||
\@writefile{toc}{\contentsline {chapter}{\numberline {1}Lecture 1 - 09-03-2020}{7}\protected@file@percent }
|
||||
\@writefile{lof}{\addvspace {10\p@ }}
|
||||
\@writefile{lot}{\addvspace {10\p@ }}
|
||||
\@writefile{toc}{\contentsline {section}{\numberline {1.1}Introduction of the course}{7}\protected@file@percent }
|
||||
\@writefile{toc}{\contentsline {section}{\numberline {1.2}Examples}{7}\protected@file@percent }
|
||||
\@writefile{toc}{\contentsline {subsection}{\numberline {1.2.1}Spam filtering}{10}\protected@file@percent }
|
||||
\@writefile{toc}{\contentsline {chapter}{\numberline {2}Lecture 2 - 10-03-2020}{11}\protected@file@percent }
|
||||
\@writefile{lof}{\addvspace {10\p@ }}
|
||||
\@writefile{lot}{\addvspace {10\p@ }}
|
||||
\@writefile{toc}{\contentsline {section}{\numberline {2.1}Argomento}{11}\protected@file@percent }
|
||||
\@writefile{toc}{\contentsline {section}{\numberline {2.2}Loss}{11}\protected@file@percent }
|
||||
\@writefile{toc}{\contentsline {subsection}{\numberline {2.2.1}Absolute Loss}{11}\protected@file@percent }
|
||||
\@writefile{lof}{\contentsline {figure}{\numberline {2.1}{\ignorespaces Example of domain of $K_{NN}$}}{12}\protected@file@percent }
|
||||
\@writefile{toc}{\contentsline {subsection}{\numberline {2.2.2}Square Loss}{12}\protected@file@percent }
|
||||
\@writefile{lof}{\contentsline {figure}{\numberline {2.2}{\ignorespaces Example of domain of $K_{NN}$}}{12}\protected@file@percent }
|
||||
\@writefile{toc}{\contentsline {subsection}{\numberline {2.2.3}Example of information of square loss}{13}\protected@file@percent }
|
||||
\@writefile{toc}{\contentsline {subsection}{\numberline {2.2.4}labels and losses}{14}\protected@file@percent }
|
||||
\@writefile{toc}{\contentsline {subsection}{\numberline {2.2.5}Example TF(idf) documents encoding}{16}\protected@file@percent }
|
||||
\@writefile{toc}{\contentsline {chapter}{\numberline {3}Lecture 3 - 16-03-2020}{18}\protected@file@percent }
|
||||
\@writefile{lof}{\addvspace {10\p@ }}
|
||||
\@writefile{lot}{\addvspace {10\p@ }}
|
||||
\@writefile{lof}{\contentsline {figure}{\numberline {3.1}{\ignorespaces Example of domain of $K_{NN}$}}{18}\protected@file@percent }
|
||||
\@writefile{toc}{\contentsline {section}{\numberline {3.1}Overfitting}{20}\protected@file@percent }
|
||||
\@writefile{toc}{\contentsline {subsection}{\numberline {3.1.1}Noise in the data}{20}\protected@file@percent }
|
||||
\@writefile{toc}{\contentsline {section}{\numberline {3.2}Underfitting}{22}\protected@file@percent }
|
||||
\@writefile{toc}{\contentsline {section}{\numberline {3.3}Nearest neighbour}{22}\protected@file@percent }
|
||||
\@writefile{lof}{\contentsline {figure}{\numberline {3.2}{\ignorespaces Example of domain of $K_{NN}$}}{23}\protected@file@percent }
|
||||
\@writefile{lof}{\contentsline {figure}{\numberline {3.3}{\ignorespaces Example of domain of $K_{NN}$}}{23}\protected@file@percent }
|
||||
\@writefile{toc}{\contentsline {chapter}{\numberline {4}Lecture 4 - 17-03-2020}{25}\protected@file@percent }
|
||||
\@writefile{lof}{\addvspace {10\p@ }}
|
||||
\@writefile{lot}{\addvspace {10\p@ }}
|
||||
\@writefile{toc}{\contentsline {section}{\numberline {4.1}Computing $h_{NN}$}{25}\protected@file@percent }
|
||||
\@writefile{lof}{\contentsline {figure}{\numberline {4.1}{\ignorespaces Example of domain of $K_{NN}$}}{26}\protected@file@percent }
|
||||
\@writefile{toc}{\contentsline {section}{\numberline {4.2}Tree Predictor}{27}\protected@file@percent }
|
||||
\@writefile{lof}{\contentsline {figure}{\numberline {4.2}{\ignorespaces Example of domain of $K_{NN}$}}{27}\protected@file@percent }
|
||||
\@writefile{lof}{\contentsline {figure}{\numberline {4.3}{\ignorespaces Example of domain of $K_{NN}$}}{28}\protected@file@percent }
|
||||
\@writefile{lof}{\contentsline {figure}{\numberline {4.4}{\ignorespaces Example of domain of $K_{NN}$}}{29}\protected@file@percent }
|
||||
\@writefile{lof}{\contentsline {figure}{\numberline {4.5}{\ignorespaces Example of domain of $K_{NN}$}}{29}\protected@file@percent }
|
||||
\@writefile{lof}{\contentsline {figure}{\numberline {4.6}{\ignorespaces Example of domain of $K_{NN}$}}{29}\protected@file@percent }
|
||||
\@writefile{lof}{\contentsline {figure}{\numberline {4.7}{\ignorespaces Example of domain of $K_{NN}$}}{30}\protected@file@percent }
|
||||
\@writefile{toc}{\contentsline {chapter}{\numberline {5}Lecture 5 - 23-03-2020}{31}\protected@file@percent }
|
||||
\@writefile{lof}{\addvspace {10\p@ }}
|
||||
\@writefile{lot}{\addvspace {10\p@ }}
|
||||
\@writefile{toc}{\contentsline {section}{\numberline {5.1}Tree Classifier}{31}\protected@file@percent }
|
||||
\@writefile{lof}{\contentsline {figure}{\numberline {5.1}{\ignorespaces Example of domain of $K_{NN}$}}{31}\protected@file@percent }
|
||||
\@writefile{lof}{\contentsline {figure}{\numberline {5.2}{\ignorespaces Example of domain of $K_{NN}$}}{32}\protected@file@percent }
|
||||
\@writefile{toc}{\contentsline {section}{\numberline {5.2}Jensen’s inequality}{33}\protected@file@percent }
|
||||
\@writefile{lof}{\contentsline {figure}{\numberline {5.3}{\ignorespaces Example of domain of $K_{NN}$}}{33}\protected@file@percent }
|
||||
\@writefile{lof}{\contentsline {figure}{\numberline {5.4}{\ignorespaces Example of domain of $K_{NN}$}}{33}\protected@file@percent }
|
||||
\@writefile{lof}{\contentsline {figure}{\numberline {5.5}{\ignorespaces Example of domain of $K_{NN}$}}{34}\protected@file@percent }
|
||||
\@writefile{lof}{\contentsline {figure}{\numberline {5.6}{\ignorespaces Example of domain of $K_{NN}$}}{35}\protected@file@percent }
|
||||
\@writefile{lof}{\contentsline {figure}{\numberline {5.7}{\ignorespaces Example of domain of $K_{NN}$}}{36}\protected@file@percent }
|
||||
\@writefile{lof}{\contentsline {figure}{\numberline {5.8}{\ignorespaces Example of domain of $K_{NN}$}}{36}\protected@file@percent }
|
||||
\@writefile{lof}{\contentsline {figure}{\numberline {5.9}{\ignorespaces Example of domain of $K_{NN}$}}{36}\protected@file@percent }
|
||||
\@writefile{toc}{\contentsline {section}{\numberline {5.3}Tree Predictor}{37}\protected@file@percent }
|
||||
\@writefile{lof}{\contentsline {figure}{\numberline {5.10}{\ignorespaces Example of domain of $K_{NN}$}}{37}\protected@file@percent }
|
||||
\@writefile{toc}{\contentsline {section}{\numberline {5.4}Statistical model for Machine Learning}{38}\protected@file@percent }
|
||||
\@writefile{toc}{\contentsline {chapter}{\numberline {6}Lecture 6 - 24-03-2020}{40}\protected@file@percent }
|
||||
\@writefile{lof}{\addvspace {10\p@ }}
|
||||
\@writefile{lot}{\addvspace {10\p@ }}
|
||||
\@writefile{toc}{\contentsline {section}{\numberline {6.1}Bayes Optimal Predictor}{40}\protected@file@percent }
|
||||
\@writefile{toc}{\contentsline {subsection}{\numberline {6.1.1}Square Loss}{41}\protected@file@percent }
|
||||
\@writefile{lof}{\contentsline {figure}{\numberline {6.1}{\ignorespaces Example of domain of $K_{NN}$}}{41}\protected@file@percent }
|
||||
\@writefile{toc}{\contentsline {subsection}{\numberline {6.1.2}Zero-one loss for binary classification}{42}\protected@file@percent }
|
||||
\@writefile{lof}{\contentsline {figure}{\numberline {6.2}{\ignorespaces Example of domain of $K_{NN}$}}{43}\protected@file@percent }
|
||||
\@writefile{lof}{\contentsline {figure}{\numberline {6.3}{\ignorespaces Example of domain of $K_{NN}$}}{43}\protected@file@percent }
|
||||
\@writefile{lof}{\contentsline {figure}{\numberline {6.4}{\ignorespaces Example of domain of $K_{NN}$}}{44}\protected@file@percent }
|
||||
\@writefile{toc}{\contentsline {section}{\numberline {6.2}Bayes Risk}{45}\protected@file@percent }
|
||||
\@writefile{lof}{\contentsline {figure}{\numberline {6.5}{\ignorespaces Example of domain of $K_{NN}$}}{45}\protected@file@percent }
|
||||
\@writefile{toc}{\contentsline {chapter}{\numberline {7}Lecture 7 - 30-03-2020}{47}\protected@file@percent }
|
||||
\@writefile{lof}{\addvspace {10\p@ }}
|
||||
\@writefile{lot}{\addvspace {10\p@ }}
|
||||
\@writefile{toc}{\contentsline {section}{\numberline {7.1}Chernoff-Hoffding bound}{47}\protected@file@percent }
|
||||
\@writefile{toc}{\contentsline {section}{\numberline {7.2}Union Bound}{48}\protected@file@percent }
|
||||
\@writefile{lof}{\contentsline {figure}{\numberline {7.1}{\ignorespaces Example}}{48}\protected@file@percent }
|
||||
\@writefile{lof}{\contentsline {figure}{\numberline {7.2}{\ignorespaces Example}}{49}\protected@file@percent }
|
||||
\@writefile{lof}{\contentsline {figure}{\numberline {7.3}{\ignorespaces Example}}{49}\protected@file@percent }
|
||||
\@writefile{lof}{\contentsline {figure}{\numberline {7.4}{\ignorespaces Example}}{50}\protected@file@percent }
|
||||
\@writefile{lof}{\contentsline {figure}{\numberline {7.5}{\ignorespaces Example}}{50}\protected@file@percent }
|
||||
\@writefile{lof}{\contentsline {figure}{\numberline {7.6}{\ignorespaces Draw of how $\hat {h}$, $h^*$ and $f^*$ are represented}}{51}\protected@file@percent }
|
||||
\@writefile{toc}{\contentsline {section}{\numberline {7.3}Studying overfitting of a ERM}{52}\protected@file@percent }
|
||||
\@writefile{toc}{\contentsline {chapter}{\numberline {8}Lecture 8 - 31-03-2020}{54}\protected@file@percent }
|
||||
\@writefile{lof}{\addvspace {10\p@ }}
|
||||
\@writefile{lot}{\addvspace {10\p@ }}
|
||||
\@writefile{lof}{\contentsline {figure}{\numberline {8.1}{\ignorespaces Representation of $\hat {h}$, $h^*$ and $f^*$ }}{54}\protected@file@percent }
|
||||
\@writefile{lof}{\contentsline {figure}{\numberline {8.2}{\ignorespaces Example}}{55}\protected@file@percent }
|
||||
\@writefile{toc}{\contentsline {section}{\numberline {8.1}The problem of estimating risk in practise}{55}\protected@file@percent }
|
||||
\@writefile{toc}{\contentsline {section}{\numberline {8.2}Cross-validation}{57}\protected@file@percent }
|
||||
\@writefile{lof}{\contentsline {figure}{\numberline {8.3}{\ignorespaces Splitting test and training set}}{57}\protected@file@percent }
|
||||
\@writefile{lof}{\contentsline {figure}{\numberline {8.4}{\ignorespaces K-folds}}{58}\protected@file@percent }
|
||||
\@writefile{toc}{\contentsline {section}{\numberline {8.3}Nested cross validation}{59}\protected@file@percent }
|
||||
\@writefile{lof}{\contentsline {figure}{\numberline {8.5}{\ignorespaces Nested Cross Validation}}{59}\protected@file@percent }
|
||||
\@writefile{toc}{\contentsline {chapter}{\numberline {9}Lecture 9 - 06-04-2020}{60}\protected@file@percent }
|
||||
\@writefile{lof}{\addvspace {10\p@ }}
|
||||
\@writefile{lot}{\addvspace {10\p@ }}
|
||||
\@writefile{toc}{\contentsline {section}{\numberline {9.1}Tree predictors}{60}\protected@file@percent }
|
||||
\@writefile{lof}{\contentsline {figure}{\numberline {9.1}{\ignorespaces Tree building}}{60}\protected@file@percent }
|
||||
\@writefile{lof}{\contentsline {figure}{\numberline {9.2}{\ignorespaces Tree with at most N node}}{61}\protected@file@percent }
|
||||
\@writefile{toc}{\contentsline {subsection}{\numberline {9.1.1}Catalan Number}{62}\protected@file@percent }
|
||||
\@writefile{lof}{\contentsline {figure}{\numberline {9.3}{\ignorespaces Algorithm for tree predictors}}{64}\protected@file@percent }
|
||||
\@writefile{toc}{\contentsline {chapter}{\numberline {10}Lecture 10 - 07-04-2020}{66}\protected@file@percent }
|
||||
\@writefile{lof}{\addvspace {10\p@ }}
|
||||
\@writefile{lot}{\addvspace {10\p@ }}
|
||||
\@writefile{toc}{\contentsline {section}{\numberline {10.1}TO BE DEFINE}{66}\protected@file@percent }
|
||||
\@writefile{toc}{\contentsline {section}{\numberline {10.2}MANCANO 20 MINUTI DI LEZIONE}{66}\protected@file@percent }
|
||||
\@writefile{lof}{\contentsline {figure}{\numberline {10.1}{\ignorespaces Point (2) - where \hskip 1em\relax $y = cx +q$ \hskip 2em\relax $y = -cx +q $}}{68}\protected@file@percent }
|
||||
\@writefile{toc}{\contentsline {section}{\numberline {10.3}Compare risk for zero-one loss}{68}\protected@file@percent }
|
||||
\@writefile{lof}{\contentsline {figure}{\numberline {10.2}{\ignorespaces Point}}{69}\protected@file@percent }
|
||||
\@writefile{toc}{\contentsline {chapter}{\numberline {11}Lecture 11 - 20-04-2020}{70}\protected@file@percent }
|
||||
\@writefile{lof}{\addvspace {10\p@ }}
|
||||
\@writefile{lot}{\addvspace {10\p@ }}
|
||||
\@writefile{toc}{\contentsline {section}{\numberline {11.1}Analysis of $K_{NN}$}{70}\protected@file@percent }
|
||||
\@writefile{lof}{\contentsline {figure}{\numberline {11.1}{\ignorespaces Example of domain of $K_{NN}$}}{70}\protected@file@percent }
|
||||
\@writefile{lof}{\contentsline {figure}{\numberline {11.2}{\ignorespaces Diagonal length}}{71}\protected@file@percent }
|
||||
\@writefile{lof}{\contentsline {figure}{\numberline {11.3}{\ignorespaces Shape of the function}}{72}\protected@file@percent }
|
||||
\@writefile{toc}{\contentsline {subsection}{\numberline {11.1.1}Study of $K_{NN}$}{73}\protected@file@percent }
|
||||
\@writefile{toc}{\contentsline {subsection}{\numberline {11.1.2}study of trees}{74}\protected@file@percent }
|
||||
\@writefile{toc}{\contentsline {section}{\numberline {11.2}Non-parametric Algorithms}{75}\protected@file@percent }
|
||||
\@writefile{lof}{\contentsline {figure}{\numberline {11.4}{\ignorespaces Parametric and non parametric growing as training set getting larger}}{76}\protected@file@percent }
|
||||
\@writefile{toc}{\contentsline {subsection}{\numberline {11.2.1}Example of parametric algorithms}{76}\protected@file@percent }
|
||||
\@writefile{toc}{\contentsline {chapter}{\numberline {12}Lecture 12 - 21-04-2020}{77}\protected@file@percent }
|
||||
\@writefile{lof}{\addvspace {10\p@ }}
|
||||
\@writefile{lot}{\addvspace {10\p@ }}
|
||||
\@writefile{toc}{\contentsline {section}{\numberline {12.1}Non parametrics algorithms}{77}\protected@file@percent }
|
||||
\@writefile{toc}{\contentsline {subsection}{\numberline {12.1.1}Theorem: No free lunch}{77}\protected@file@percent }
|
||||
\@writefile{lof}{\contentsline {figure}{\numberline {12.1}{\ignorespaces Tree building}}{78}\protected@file@percent }
|
||||
\@writefile{toc}{\contentsline {section}{\numberline {12.2}Highly Parametric Learning Algorithm}{79}\protected@file@percent }
|
||||
\@writefile{toc}{\contentsline {subsection}{\numberline {12.2.1}Linear Predictors}{79}\protected@file@percent }
|
||||
\@writefile{lof}{\contentsline {figure}{\numberline {12.2}{\ignorespaces Dot product}}{79}\protected@file@percent }
|
||||
\@writefile{lof}{\contentsline {figure}{\numberline {12.3}{\ignorespaces Dot product}}{80}\protected@file@percent }
|
||||
\@writefile{lof}{\contentsline {figure}{\numberline {12.4}{\ignorespaces Hyperplane}}{80}\protected@file@percent }
|
||||
\@writefile{lof}{\contentsline {figure}{\numberline {12.5}{\ignorespaces Hyperplane}}{81}\protected@file@percent }
|
||||
\@writefile{lof}{\contentsline {figure}{\numberline {12.6}{\ignorespaces Hyperplane}}{81}\protected@file@percent }
|
||||
\@writefile{lof}{\contentsline {figure}{\numberline {12.7}{\ignorespaces Example of one dimensional hyperplane}}{82}\protected@file@percent }
|
||||
\@writefile{toc}{\contentsline {subsection}{\numberline {12.2.2}MinDisagreement}{83}\protected@file@percent }
|
||||
\@writefile{toc}{\contentsline {chapter}{\numberline {13}Lecture 13 - 27-04-2020}{84}\protected@file@percent }
|
||||
\@writefile{lof}{\addvspace {10\p@ }}
|
||||
\@writefile{lot}{\addvspace {10\p@ }}
|
||||
\@writefile{toc}{\contentsline {section}{\numberline {13.1}Linear prediction}{84}\protected@file@percent }
|
||||
\@writefile{toc}{\contentsline {subsection}{\numberline {13.1.1}MinDisOpt}{84}\protected@file@percent }
|
||||
\@writefile{lof}{\contentsline {figure}{\numberline {13.1}{\ignorespaces Tree building}}{85}\protected@file@percent }
|
||||
\@writefile{lof}{\contentsline {figure}{\numberline {13.2}{\ignorespaces Tree building}}{85}\protected@file@percent }
|
||||
\@writefile{lof}{\contentsline {figure}{\numberline {13.3}{\ignorespaces Tree building}}{86}\protected@file@percent }
|
||||
\@writefile{lof}{\contentsline {figure}{\numberline {13.4}{\ignorespaces Feasibilty problem}}{86}\protected@file@percent }
|
||||
\@writefile{toc}{\contentsline {section}{\numberline {13.2}The Perception Algorithm}{87}\protected@file@percent }
|
||||
\@writefile{lof}{\contentsline {figure}{\numberline {13.5}{\ignorespaces }}{87}\protected@file@percent }
|
||||
\@writefile{toc}{\contentsline {subsection}{\numberline {13.2.1}Perception convergence Theorem}{88}\protected@file@percent }
|
||||
\@writefile{lof}{\contentsline {figure}{\numberline {13.6}{\ignorespaces }}{88}\protected@file@percent }
|
||||
\@writefile{toc}{\contentsline {chapter}{\numberline {14}Lecture 14 - 28-04-2020}{91}\protected@file@percent }
|
||||
\@writefile{lof}{\addvspace {10\p@ }}
|
||||
\@writefile{lot}{\addvspace {10\p@ }}
|
||||
\@writefile{toc}{\contentsline {section}{\numberline {14.1}Linear Regression}{91}\protected@file@percent }
|
||||
\@writefile{toc}{\contentsline {subsection}{\numberline {14.1.1}The problem of linear regression}{91}\protected@file@percent }
|
||||
\@writefile{toc}{\contentsline {subsection}{\numberline {14.1.2}Ridge regression}{92}\protected@file@percent }
|
||||
\@writefile{lof}{\contentsline {figure}{\numberline {14.1}{\ignorespaces }}{92}\protected@file@percent }
|
||||
\@writefile{toc}{\contentsline {section}{\numberline {14.2}Percetron}{93}\protected@file@percent }
|
||||
\@writefile{toc}{\contentsline {subsection}{\numberline {14.2.1}Online Learning }{94}\protected@file@percent }
|
||||
\@writefile{lof}{\contentsline {figure}{\numberline {14.2}{\ignorespaces }}{94}\protected@file@percent }
|
||||
\@writefile{lof}{\contentsline {figure}{\numberline {14.3}{\ignorespaces }}{95}\protected@file@percent }
|
||||
\@writefile{toc}{\contentsline {subsection}{\numberline {14.2.2}Online Gradiant Descent (OGD)}{96}\protected@file@percent }
|
||||
\@writefile{lof}{\contentsline {figure}{\numberline {14.4}{\ignorespaces }}{96}\protected@file@percent }
|
||||
\@writefile{toc}{\contentsline {chapter}{\numberline {15}Lecture 15 - 04-05-2020}{97}\protected@file@percent }
|
||||
\@writefile{lof}{\addvspace {10\p@ }}
|
||||
\@writefile{lot}{\addvspace {10\p@ }}
|
||||
\@writefile{toc}{\contentsline {section}{\numberline {15.1}Regret analysis of OGD}{97}\protected@file@percent }
|
||||
\@writefile{toc}{\contentsline {subsection}{\numberline {15.1.1}Projected OGD}{98}\protected@file@percent }
|
||||
\@writefile{lof}{\contentsline {figure}{\numberline {15.1}{\ignorespaces }}{98}\protected@file@percent }
|
||||
\@writefile{lof}{\contentsline {figure}{\numberline {15.2}{\ignorespaces }}{98}\protected@file@percent }
|
||||
\@writefile{lof}{\contentsline {figure}{\numberline {15.3}{\ignorespaces }}{99}\protected@file@percent }
|
||||
\@writefile{lof}{\contentsline {figure}{\numberline {15.4}{\ignorespaces }}{100}\protected@file@percent }
|
||||
\@writefile{toc}{\contentsline {chapter}{\numberline {16}Lecture 16 - 05-05-2020}{102}\protected@file@percent }
|
||||
\@writefile{lof}{\addvspace {10\p@ }}
|
||||
\@writefile{lot}{\addvspace {10\p@ }}
|
||||
\@writefile{toc}{\contentsline {section}{\numberline {16.1}Analysis of Perceptron in the non-separable case using OGD framework.}{102}\protected@file@percent }
|
||||
\@writefile{lof}{\contentsline {figure}{\numberline {16.1}{\ignorespaces }}{102}\protected@file@percent }
|
||||
\@writefile{lof}{\contentsline {figure}{\numberline {16.2}{\ignorespaces Hinge loss}}{103}\protected@file@percent }
|
||||
\@writefile{lof}{\contentsline {figure}{\numberline {16.3}{\ignorespaces }}{104}\protected@file@percent }
|
||||
\@writefile{toc}{\contentsline {subsection}{\numberline {16.1.1}Strongly convex loss functions}{106}\protected@file@percent }
|
||||
\@writefile{lof}{\contentsline {figure}{\numberline {16.4}{\ignorespaces Example of more type of convex function}}{106}\protected@file@percent }
|
||||
\@writefile{toc}{\contentsline {chapter}{\numberline {17}Lecture 17 - 11-05-2020}{108}\protected@file@percent }
|
||||
\@writefile{lof}{\addvspace {10\p@ }}
|
||||
\@writefile{lot}{\addvspace {10\p@ }}
|
||||
\@writefile{toc}{\contentsline {section}{\numberline {17.1}Strongly convex loss functions}{108}\protected@file@percent }
|
||||
\@writefile{toc}{\contentsline {subsection}{\numberline {17.1.1}OGD for Strongly Convex losses}{108}\protected@file@percent }
|
||||
\@writefile{toc}{\contentsline {subsection}{\numberline {17.1.2}Relate sequential risk and statistical risk}{109}\protected@file@percent }
|
||||
\@writefile{toc}{\contentsline {chapter}{\numberline {18}Lecture 18 - 12-05-2020}{112}\protected@file@percent }
|
||||
\@writefile{lof}{\addvspace {10\p@ }}
|
||||
\@writefile{lot}{\addvspace {10\p@ }}
|
||||
\@writefile{toc}{\contentsline {section}{\numberline {18.1}Kernel functions}{112}\protected@file@percent }
|
||||
\@writefile{toc}{\contentsline {subsection}{\numberline {18.1.1}Feature expansion}{112}\protected@file@percent }
|
||||
\@writefile{lof}{\contentsline {figure}{\numberline {18.1}{\ignorespaces }}{112}\protected@file@percent }
|
||||
\@writefile{toc}{\contentsline {subsection}{\numberline {18.1.2}Kernels implements feature expansion (Efficiently}{113}\protected@file@percent }
|
||||
\@writefile{toc}{\contentsline {section}{\numberline {18.2}Gaussian Kernel}{114}\protected@file@percent }
|
||||
\@writefile{lof}{\contentsline {figure}{\numberline {18.2}{\ignorespaces }}{115}\protected@file@percent }
|
||||
\@writefile{lof}{\contentsline {figure}{\numberline {18.3}{\ignorespaces }}{115}\protected@file@percent }
|
||||
\@writefile{toc}{\contentsline {chapter}{\numberline {19}Lecture 19 - 18-05-2020}{117}\protected@file@percent }
|
||||
\@writefile{lof}{\addvspace {10\p@ }}
|
||||
\@writefile{lot}{\addvspace {10\p@ }}
|
||||
\@writefile{toc}{\contentsline {section}{\numberline {19.1}Support Vector Machine (SVM)}{120}\protected@file@percent }
|
||||
\@writefile{lof}{\contentsline {figure}{\numberline {19.1}{\ignorespaces Draw of SVG}}{120}\protected@file@percent }
|
||||
\@writefile{toc}{\contentsline {chapter}{\numberline {20}Lecture 20 - 19-05-2020}{121}\protected@file@percent }
|
||||
\@writefile{lof}{\addvspace {10\p@ }}
|
||||
\@writefile{lot}{\addvspace {10\p@ }}
|
||||
\@writefile{toc}{\contentsline {section}{\numberline {20.1}Support Vector Machine Analysis}{121}\protected@file@percent }
|
||||
\@writefile{toc}{\contentsline {subsection}{\numberline {20.1.1}Fritz John Optimality Conditions}{121}\protected@file@percent }
|
||||
\@writefile{toc}{\contentsline {subsection}{\numberline {20.1.2}Non-separable case}{122}\protected@file@percent }
|
||||
\@writefile{lof}{\contentsline {figure}{\numberline {20.1}{\ignorespaces }}{122}\protected@file@percent }
|
||||
\@writefile{lof}{\contentsline {figure}{\numberline {20.2}{\ignorespaces }}{123}\protected@file@percent }
|
||||
\@writefile{lof}{\contentsline {figure}{\numberline {20.3}{\ignorespaces }}{123}\protected@file@percent }
|
||||
\@writefile{toc}{\contentsline {section}{\numberline {20.2}Pegasos: OGD to solve SVM}{124}\protected@file@percent }
|
||||
\@writefile{toc}{\contentsline {chapter}{\numberline {21}Lecture 21 - 25-05-2020}{126}\protected@file@percent }
|
||||
\@writefile{lof}{\addvspace {10\p@ }}
|
||||
\@writefile{lot}{\addvspace {10\p@ }}
|
||||
\@writefile{toc}{\contentsline {section}{\numberline {21.1}Pegasos in Kernel space}{126}\protected@file@percent }
|
||||
\@writefile{toc}{\contentsline {section}{\numberline {21.2}Stability}{126}\protected@file@percent }
|
||||
\bibstyle{abbrv}
|
||||
\bibdata{main}
|
||||
\@writefile{lof}{\contentsline {figure}{\numberline {21.1}{\ignorespaces }}{130}\protected@file@percent }
|
||||
\relax
|
||||
\@nameuse{bbl@beforestart}
|
||||
\babel@aux{english}{}
|
||||
\@writefile{toc}{\contentsline {chapter}{\numberline {1}Lecture 1 - 09-03-2020}{7}\protected@file@percent }
|
||||
\@writefile{lof}{\addvspace {10\p@ }}
|
||||
\@writefile{lot}{\addvspace {10\p@ }}
|
||||
\@writefile{toc}{\contentsline {section}{\numberline {1.1}Introduction of the course}{7}\protected@file@percent }
|
||||
\@writefile{toc}{\contentsline {section}{\numberline {1.2}Examples}{7}\protected@file@percent }
|
||||
\@writefile{toc}{\contentsline {subsection}{\numberline {1.2.1}Spam filtering}{10}\protected@file@percent }
|
||||
\@writefile{toc}{\contentsline {chapter}{\numberline {2}Lecture 2 - 10-03-2020}{11}\protected@file@percent }
|
||||
\@writefile{lof}{\addvspace {10\p@ }}
|
||||
\@writefile{lot}{\addvspace {10\p@ }}
|
||||
\@writefile{toc}{\contentsline {section}{\numberline {2.1}Argomento}{11}\protected@file@percent }
|
||||
\@writefile{toc}{\contentsline {section}{\numberline {2.2}Loss}{11}\protected@file@percent }
|
||||
\@writefile{toc}{\contentsline {subsection}{\numberline {2.2.1}Absolute Loss}{11}\protected@file@percent }
|
||||
\@writefile{lof}{\contentsline {figure}{\numberline {2.1}{\ignorespaces Example of domain of $K_{NN}$}}{12}\protected@file@percent }
|
||||
\@writefile{toc}{\contentsline {subsection}{\numberline {2.2.2}Square Loss}{12}\protected@file@percent }
|
||||
\@writefile{lof}{\contentsline {figure}{\numberline {2.2}{\ignorespaces Example of domain of $K_{NN}$}}{12}\protected@file@percent }
|
||||
\@writefile{toc}{\contentsline {subsection}{\numberline {2.2.3}Example of information of square loss}{13}\protected@file@percent }
|
||||
\@writefile{toc}{\contentsline {subsection}{\numberline {2.2.4}labels and losses}{14}\protected@file@percent }
|
||||
\@writefile{toc}{\contentsline {subsection}{\numberline {2.2.5}Example TF(idf) documents encoding}{16}\protected@file@percent }
|
||||
\@writefile{toc}{\contentsline {chapter}{\numberline {3}Lecture 3 - 16-03-2020}{18}\protected@file@percent }
|
||||
\@writefile{lof}{\addvspace {10\p@ }}
|
||||
\@writefile{lot}{\addvspace {10\p@ }}
|
||||
\@writefile{lof}{\contentsline {figure}{\numberline {3.1}{\ignorespaces Example of domain of $K_{NN}$}}{18}\protected@file@percent }
|
||||
\@writefile{toc}{\contentsline {section}{\numberline {3.1}Overfitting}{20}\protected@file@percent }
|
||||
\@writefile{toc}{\contentsline {subsection}{\numberline {3.1.1}Noise in the data}{20}\protected@file@percent }
|
||||
\@writefile{toc}{\contentsline {section}{\numberline {3.2}Underfitting}{22}\protected@file@percent }
|
||||
\@writefile{toc}{\contentsline {section}{\numberline {3.3}Nearest neighbour}{22}\protected@file@percent }
|
||||
\@writefile{lof}{\contentsline {figure}{\numberline {3.2}{\ignorespaces Example of domain of $K_{NN}$}}{23}\protected@file@percent }
|
||||
\@writefile{lof}{\contentsline {figure}{\numberline {3.3}{\ignorespaces Example of domain of $K_{NN}$}}{23}\protected@file@percent }
|
||||
\@writefile{toc}{\contentsline {chapter}{\numberline {4}Lecture 4 - 17-03-2020}{25}\protected@file@percent }
|
||||
\@writefile{lof}{\addvspace {10\p@ }}
|
||||
\@writefile{lot}{\addvspace {10\p@ }}
|
||||
\@writefile{toc}{\contentsline {section}{\numberline {4.1}Computing $h_{NN}$}{25}\protected@file@percent }
|
||||
\@writefile{lof}{\contentsline {figure}{\numberline {4.1}{\ignorespaces Example of domain of $K_{NN}$}}{26}\protected@file@percent }
|
||||
\@writefile{toc}{\contentsline {section}{\numberline {4.2}Tree Predictor}{27}\protected@file@percent }
|
||||
\@writefile{lof}{\contentsline {figure}{\numberline {4.2}{\ignorespaces Example of domain of $K_{NN}$}}{27}\protected@file@percent }
|
||||
\@writefile{lof}{\contentsline {figure}{\numberline {4.3}{\ignorespaces Example of domain of $K_{NN}$}}{28}\protected@file@percent }
|
||||
\@writefile{lof}{\contentsline {figure}{\numberline {4.4}{\ignorespaces Example of domain of $K_{NN}$}}{29}\protected@file@percent }
|
||||
\@writefile{lof}{\contentsline {figure}{\numberline {4.5}{\ignorespaces Example of domain of $K_{NN}$}}{29}\protected@file@percent }
|
||||
\@writefile{lof}{\contentsline {figure}{\numberline {4.6}{\ignorespaces Example of domain of $K_{NN}$}}{29}\protected@file@percent }
|
||||
\@writefile{lof}{\contentsline {figure}{\numberline {4.7}{\ignorespaces Example of domain of $K_{NN}$}}{30}\protected@file@percent }
|
||||
\@writefile{toc}{\contentsline {chapter}{\numberline {5}Lecture 5 - 23-03-2020}{31}\protected@file@percent }
|
||||
\@writefile{lof}{\addvspace {10\p@ }}
|
||||
\@writefile{lot}{\addvspace {10\p@ }}
|
||||
\@writefile{toc}{\contentsline {section}{\numberline {5.1}Tree Classifier}{31}\protected@file@percent }
|
||||
\@writefile{lof}{\contentsline {figure}{\numberline {5.1}{\ignorespaces Example of domain of $K_{NN}$}}{31}\protected@file@percent }
|
||||
\@writefile{lof}{\contentsline {figure}{\numberline {5.2}{\ignorespaces Example of domain of $K_{NN}$}}{32}\protected@file@percent }
|
||||
\@writefile{toc}{\contentsline {section}{\numberline {5.2}Jensen’s inequality}{33}\protected@file@percent }
|
||||
\@writefile{lof}{\contentsline {figure}{\numberline {5.3}{\ignorespaces Example of domain of $K_{NN}$}}{33}\protected@file@percent }
|
||||
\@writefile{lof}{\contentsline {figure}{\numberline {5.4}{\ignorespaces Example of domain of $K_{NN}$}}{33}\protected@file@percent }
|
||||
\@writefile{lof}{\contentsline {figure}{\numberline {5.5}{\ignorespaces Example of domain of $K_{NN}$}}{34}\protected@file@percent }
|
||||
\@writefile{lof}{\contentsline {figure}{\numberline {5.6}{\ignorespaces Example of domain of $K_{NN}$}}{35}\protected@file@percent }
|
||||
\@writefile{lof}{\contentsline {figure}{\numberline {5.7}{\ignorespaces Example of domain of $K_{NN}$}}{36}\protected@file@percent }
|
||||
\@writefile{lof}{\contentsline {figure}{\numberline {5.8}{\ignorespaces Example of domain of $K_{NN}$}}{36}\protected@file@percent }
|
||||
\@writefile{lof}{\contentsline {figure}{\numberline {5.9}{\ignorespaces Example of domain of $K_{NN}$}}{36}\protected@file@percent }
|
||||
\@writefile{toc}{\contentsline {section}{\numberline {5.3}Tree Predictor}{37}\protected@file@percent }
|
||||
\@writefile{lof}{\contentsline {figure}{\numberline {5.10}{\ignorespaces Example of domain of $K_{NN}$}}{37}\protected@file@percent }
|
||||
\@writefile{toc}{\contentsline {section}{\numberline {5.4}Statistical model for Machine Learning}{38}\protected@file@percent }
|
||||
\@writefile{toc}{\contentsline {chapter}{\numberline {6}Lecture 6 - 24-03-2020}{40}\protected@file@percent }
|
||||
\@writefile{lof}{\addvspace {10\p@ }}
|
||||
\@writefile{lot}{\addvspace {10\p@ }}
|
||||
\@writefile{toc}{\contentsline {section}{\numberline {6.1}Bayes Optimal Predictor}{40}\protected@file@percent }
|
||||
\@writefile{toc}{\contentsline {subsection}{\numberline {6.1.1}Square Loss}{41}\protected@file@percent }
|
||||
\@writefile{lof}{\contentsline {figure}{\numberline {6.1}{\ignorespaces Example of domain of $K_{NN}$}}{41}\protected@file@percent }
|
||||
\@writefile{toc}{\contentsline {subsection}{\numberline {6.1.2}Zero-one loss for binary classification}{42}\protected@file@percent }
|
||||
\@writefile{lof}{\contentsline {figure}{\numberline {6.2}{\ignorespaces Example of domain of $K_{NN}$}}{43}\protected@file@percent }
|
||||
\@writefile{lof}{\contentsline {figure}{\numberline {6.3}{\ignorespaces Example of domain of $K_{NN}$}}{43}\protected@file@percent }
|
||||
\@writefile{lof}{\contentsline {figure}{\numberline {6.4}{\ignorespaces Example of domain of $K_{NN}$}}{44}\protected@file@percent }
|
||||
\@writefile{toc}{\contentsline {section}{\numberline {6.2}Bayes Risk}{45}\protected@file@percent }
|
||||
\@writefile{lof}{\contentsline {figure}{\numberline {6.5}{\ignorespaces Example of domain of $K_{NN}$}}{45}\protected@file@percent }
|
||||
\@writefile{toc}{\contentsline {chapter}{\numberline {7}Lecture 7 - 30-03-2020}{47}\protected@file@percent }
|
||||
\@writefile{lof}{\addvspace {10\p@ }}
|
||||
\@writefile{lot}{\addvspace {10\p@ }}
|
||||
\@writefile{toc}{\contentsline {section}{\numberline {7.1}Chernoff-Hoffding bound}{47}\protected@file@percent }
|
||||
\@writefile{toc}{\contentsline {section}{\numberline {7.2}Union Bound}{48}\protected@file@percent }
|
||||
\@writefile{lof}{\contentsline {figure}{\numberline {7.1}{\ignorespaces Example}}{48}\protected@file@percent }
|
||||
\@writefile{lof}{\contentsline {figure}{\numberline {7.2}{\ignorespaces Example}}{49}\protected@file@percent }
|
||||
\@writefile{lof}{\contentsline {figure}{\numberline {7.3}{\ignorespaces Example}}{49}\protected@file@percent }
|
||||
\@writefile{lof}{\contentsline {figure}{\numberline {7.4}{\ignorespaces Example}}{50}\protected@file@percent }
|
||||
\@writefile{lof}{\contentsline {figure}{\numberline {7.5}{\ignorespaces Example}}{50}\protected@file@percent }
|
||||
\@writefile{lof}{\contentsline {figure}{\numberline {7.6}{\ignorespaces Draw of how $\hat {h}$, $h^*$ and $f^*$ are represented}}{51}\protected@file@percent }
|
||||
\@writefile{toc}{\contentsline {section}{\numberline {7.3}Studying overfitting of a ERM}{52}\protected@file@percent }
|
||||
\@writefile{toc}{\contentsline {chapter}{\numberline {8}Lecture 8 - 31-03-2020}{54}\protected@file@percent }
|
||||
\@writefile{lof}{\addvspace {10\p@ }}
|
||||
\@writefile{lot}{\addvspace {10\p@ }}
|
||||
\@writefile{lof}{\contentsline {figure}{\numberline {8.1}{\ignorespaces Representation of $\hat {h}$, $h^*$ and $f^*$ }}{54}\protected@file@percent }
|
||||
\@writefile{lof}{\contentsline {figure}{\numberline {8.2}{\ignorespaces Example}}{55}\protected@file@percent }
|
||||
\@writefile{toc}{\contentsline {section}{\numberline {8.1}The problem of estimating risk in practise}{55}\protected@file@percent }
|
||||
\@writefile{toc}{\contentsline {section}{\numberline {8.2}Cross-validation}{57}\protected@file@percent }
|
||||
\@writefile{lof}{\contentsline {figure}{\numberline {8.3}{\ignorespaces Splitting test and training set}}{57}\protected@file@percent }
|
||||
\@writefile{lof}{\contentsline {figure}{\numberline {8.4}{\ignorespaces K-folds}}{58}\protected@file@percent }
|
||||
\@writefile{toc}{\contentsline {section}{\numberline {8.3}Nested cross validation}{59}\protected@file@percent }
|
||||
\@writefile{lof}{\contentsline {figure}{\numberline {8.5}{\ignorespaces Nested Cross Validation}}{59}\protected@file@percent }
|
||||
\@writefile{toc}{\contentsline {chapter}{\numberline {9}Lecture 9 - 06-04-2020}{60}\protected@file@percent }
|
||||
\@writefile{lof}{\addvspace {10\p@ }}
|
||||
\@writefile{lot}{\addvspace {10\p@ }}
|
||||
\@writefile{toc}{\contentsline {section}{\numberline {9.1}Tree predictors}{60}\protected@file@percent }
|
||||
\@writefile{lof}{\contentsline {figure}{\numberline {9.1}{\ignorespaces Tree building}}{60}\protected@file@percent }
|
||||
\@writefile{lof}{\contentsline {figure}{\numberline {9.2}{\ignorespaces Tree with at most N node}}{61}\protected@file@percent }
|
||||
\@writefile{toc}{\contentsline {subsection}{\numberline {9.1.1}Catalan Number}{62}\protected@file@percent }
|
||||
\@writefile{lof}{\contentsline {figure}{\numberline {9.3}{\ignorespaces Algorithm for tree predictors}}{64}\protected@file@percent }
|
||||
\@writefile{toc}{\contentsline {chapter}{\numberline {10}Lecture 10 - 07-04-2020}{66}\protected@file@percent }
|
||||
\@writefile{lof}{\addvspace {10\p@ }}
|
||||
\@writefile{lot}{\addvspace {10\p@ }}
|
||||
\@writefile{toc}{\contentsline {section}{\numberline {10.1}TO BE DEFINE}{66}\protected@file@percent }
|
||||
\@writefile{toc}{\contentsline {section}{\numberline {10.2}MANCANO 20 MINUTI DI LEZIONE}{66}\protected@file@percent }
|
||||
\@writefile{lof}{\contentsline {figure}{\numberline {10.1}{\ignorespaces Point (2) - where \hskip 1em\relax $y = cx +q$ \hskip 2em\relax $y = -cx +q $}}{68}\protected@file@percent }
|
||||
\@writefile{toc}{\contentsline {section}{\numberline {10.3}Compare risk for zero-one loss}{68}\protected@file@percent }
|
||||
\@writefile{lof}{\contentsline {figure}{\numberline {10.2}{\ignorespaces Point}}{69}\protected@file@percent }
|
||||
\@writefile{toc}{\contentsline {chapter}{\numberline {11}Lecture 11 - 20-04-2020}{70}\protected@file@percent }
|
||||
\@writefile{lof}{\addvspace {10\p@ }}
|
||||
\@writefile{lot}{\addvspace {10\p@ }}
|
||||
\@writefile{toc}{\contentsline {section}{\numberline {11.1}Analysis of $K_{NN}$}{70}\protected@file@percent }
|
||||
\@writefile{lof}{\contentsline {figure}{\numberline {11.1}{\ignorespaces Example of domain of $K_{NN}$}}{70}\protected@file@percent }
|
||||
\@writefile{lof}{\contentsline {figure}{\numberline {11.2}{\ignorespaces Diagonal length}}{71}\protected@file@percent }
|
||||
\@writefile{lof}{\contentsline {figure}{\numberline {11.3}{\ignorespaces Shape of the function}}{72}\protected@file@percent }
|
||||
\@writefile{toc}{\contentsline {subsection}{\numberline {11.1.1}Study of $K_{NN}$}{73}\protected@file@percent }
|
||||
\@writefile{toc}{\contentsline {subsection}{\numberline {11.1.2}study of trees}{74}\protected@file@percent }
|
||||
\@writefile{toc}{\contentsline {section}{\numberline {11.2}Non-parametric Algorithms}{75}\protected@file@percent }
|
||||
\@writefile{lof}{\contentsline {figure}{\numberline {11.4}{\ignorespaces Parametric and non parametric growing as training set getting larger}}{76}\protected@file@percent }
|
||||
\@writefile{toc}{\contentsline {subsection}{\numberline {11.2.1}Example of parametric algorithms}{76}\protected@file@percent }
|
||||
\@writefile{toc}{\contentsline {chapter}{\numberline {12}Lecture 12 - 21-04-2020}{77}\protected@file@percent }
|
||||
\@writefile{lof}{\addvspace {10\p@ }}
|
||||
\@writefile{lot}{\addvspace {10\p@ }}
|
||||
\@writefile{toc}{\contentsline {section}{\numberline {12.1}Non parametrics algorithms}{77}\protected@file@percent }
|
||||
\@writefile{toc}{\contentsline {subsection}{\numberline {12.1.1}Theorem: No free lunch}{77}\protected@file@percent }
|
||||
\@writefile{lof}{\contentsline {figure}{\numberline {12.1}{\ignorespaces Tree building}}{78}\protected@file@percent }
|
||||
\@writefile{toc}{\contentsline {section}{\numberline {12.2}Highly Parametric Learning Algorithm}{79}\protected@file@percent }
|
||||
\@writefile{toc}{\contentsline {subsection}{\numberline {12.2.1}Linear Predictors}{79}\protected@file@percent }
|
||||
\@writefile{lof}{\contentsline {figure}{\numberline {12.2}{\ignorespaces Dot product}}{79}\protected@file@percent }
|
||||
\@writefile{lof}{\contentsline {figure}{\numberline {12.3}{\ignorespaces Dot product}}{80}\protected@file@percent }
|
||||
\@writefile{lof}{\contentsline {figure}{\numberline {12.4}{\ignorespaces Hyperplane}}{80}\protected@file@percent }
|
||||
\@writefile{lof}{\contentsline {figure}{\numberline {12.5}{\ignorespaces Hyperplane}}{81}\protected@file@percent }
|
||||
\@writefile{lof}{\contentsline {figure}{\numberline {12.6}{\ignorespaces Hyperplane}}{81}\protected@file@percent }
|
||||
\@writefile{lof}{\contentsline {figure}{\numberline {12.7}{\ignorespaces Example of one dimensional hyperplane}}{82}\protected@file@percent }
|
||||
\@writefile{toc}{\contentsline {subsection}{\numberline {12.2.2}MinDisagreement}{83}\protected@file@percent }
|
||||
\@writefile{toc}{\contentsline {chapter}{\numberline {13}Lecture 13 - 27-04-2020}{84}\protected@file@percent }
|
||||
\@writefile{lof}{\addvspace {10\p@ }}
|
||||
\@writefile{lot}{\addvspace {10\p@ }}
|
||||
\@writefile{toc}{\contentsline {section}{\numberline {13.1}Linear prediction}{84}\protected@file@percent }
|
||||
\@writefile{toc}{\contentsline {subsection}{\numberline {13.1.1}MinDisOpt}{84}\protected@file@percent }
|
||||
\@writefile{lof}{\contentsline {figure}{\numberline {13.1}{\ignorespaces Tree building}}{85}\protected@file@percent }
|
||||
\@writefile{lof}{\contentsline {figure}{\numberline {13.2}{\ignorespaces Tree building}}{85}\protected@file@percent }
|
||||
\@writefile{lof}{\contentsline {figure}{\numberline {13.3}{\ignorespaces Tree building}}{86}\protected@file@percent }
|
||||
\@writefile{lof}{\contentsline {figure}{\numberline {13.4}{\ignorespaces Feasibilty problem}}{86}\protected@file@percent }
|
||||
\@writefile{toc}{\contentsline {section}{\numberline {13.2}The Perception Algorithm}{87}\protected@file@percent }
|
||||
\@writefile{lof}{\contentsline {figure}{\numberline {13.5}{\ignorespaces }}{87}\protected@file@percent }
|
||||
\@writefile{toc}{\contentsline {subsection}{\numberline {13.2.1}Perception convergence Theorem}{88}\protected@file@percent }
|
||||
\@writefile{lof}{\contentsline {figure}{\numberline {13.6}{\ignorespaces }}{88}\protected@file@percent }
|
||||
\@writefile{toc}{\contentsline {chapter}{\numberline {14}Lecture 14 - 28-04-2020}{91}\protected@file@percent }
|
||||
\@writefile{lof}{\addvspace {10\p@ }}
|
||||
\@writefile{lot}{\addvspace {10\p@ }}
|
||||
\@writefile{toc}{\contentsline {section}{\numberline {14.1}Linear Regression}{91}\protected@file@percent }
|
||||
\@writefile{toc}{\contentsline {subsection}{\numberline {14.1.1}The problem of linear regression}{91}\protected@file@percent }
|
||||
\@writefile{toc}{\contentsline {subsection}{\numberline {14.1.2}Ridge regression}{92}\protected@file@percent }
|
||||
\@writefile{lof}{\contentsline {figure}{\numberline {14.1}{\ignorespaces }}{92}\protected@file@percent }
|
||||
\@writefile{toc}{\contentsline {section}{\numberline {14.2}Percetron}{93}\protected@file@percent }
|
||||
\@writefile{toc}{\contentsline {subsection}{\numberline {14.2.1}Online Learning }{94}\protected@file@percent }
|
||||
\@writefile{lof}{\contentsline {figure}{\numberline {14.2}{\ignorespaces }}{94}\protected@file@percent }
|
||||
\@writefile{lof}{\contentsline {figure}{\numberline {14.3}{\ignorespaces }}{95}\protected@file@percent }
|
||||
\@writefile{toc}{\contentsline {subsection}{\numberline {14.2.2}Online Gradiant Descent (OGD)}{96}\protected@file@percent }
|
||||
\@writefile{lof}{\contentsline {figure}{\numberline {14.4}{\ignorespaces }}{96}\protected@file@percent }
|
||||
\@writefile{toc}{\contentsline {chapter}{\numberline {15}Lecture 15 - 04-05-2020}{97}\protected@file@percent }
|
||||
\@writefile{lof}{\addvspace {10\p@ }}
|
||||
\@writefile{lot}{\addvspace {10\p@ }}
|
||||
\@writefile{toc}{\contentsline {section}{\numberline {15.1}Regret analysis of OGD}{97}\protected@file@percent }
|
||||
\@writefile{toc}{\contentsline {subsection}{\numberline {15.1.1}Projected OGD}{98}\protected@file@percent }
|
||||
\@writefile{lof}{\contentsline {figure}{\numberline {15.1}{\ignorespaces }}{98}\protected@file@percent }
|
||||
\@writefile{lof}{\contentsline {figure}{\numberline {15.2}{\ignorespaces }}{98}\protected@file@percent }
|
||||
\@writefile{lof}{\contentsline {figure}{\numberline {15.3}{\ignorespaces }}{99}\protected@file@percent }
|
||||
\@writefile{lof}{\contentsline {figure}{\numberline {15.4}{\ignorespaces }}{100}\protected@file@percent }
|
||||
\@writefile{toc}{\contentsline {chapter}{\numberline {16}Lecture 16 - 05-05-2020}{102}\protected@file@percent }
|
||||
\@writefile{lof}{\addvspace {10\p@ }}
|
||||
\@writefile{lot}{\addvspace {10\p@ }}
|
||||
\@writefile{toc}{\contentsline {section}{\numberline {16.1}Analysis of Perceptron in the non-separable case using OGD framework.}{102}\protected@file@percent }
|
||||
\@writefile{lof}{\contentsline {figure}{\numberline {16.1}{\ignorespaces }}{102}\protected@file@percent }
|
||||
\@writefile{lof}{\contentsline {figure}{\numberline {16.2}{\ignorespaces Hinge loss}}{103}\protected@file@percent }
|
||||
\@writefile{lof}{\contentsline {figure}{\numberline {16.3}{\ignorespaces }}{104}\protected@file@percent }
|
||||
\@writefile{toc}{\contentsline {subsection}{\numberline {16.1.1}Strongly convex loss functions}{106}\protected@file@percent }
|
||||
\@writefile{lof}{\contentsline {figure}{\numberline {16.4}{\ignorespaces Example of more type of convex function}}{106}\protected@file@percent }
|
||||
\@writefile{toc}{\contentsline {chapter}{\numberline {17}Lecture 17 - 11-05-2020}{108}\protected@file@percent }
|
||||
\@writefile{lof}{\addvspace {10\p@ }}
|
||||
\@writefile{lot}{\addvspace {10\p@ }}
|
||||
\@writefile{toc}{\contentsline {section}{\numberline {17.1}Strongly convex loss functions}{108}\protected@file@percent }
|
||||
\@writefile{toc}{\contentsline {subsection}{\numberline {17.1.1}OGD for Strongly Convex losses}{108}\protected@file@percent }
|
||||
\@writefile{toc}{\contentsline {subsection}{\numberline {17.1.2}Relate sequential risk and statistical risk}{109}\protected@file@percent }
|
||||
\@writefile{toc}{\contentsline {chapter}{\numberline {18}Lecture 18 - 12-05-2020}{112}\protected@file@percent }
|
||||
\@writefile{lof}{\addvspace {10\p@ }}
|
||||
\@writefile{lot}{\addvspace {10\p@ }}
|
||||
\@writefile{toc}{\contentsline {section}{\numberline {18.1}Kernel functions}{112}\protected@file@percent }
|
||||
\@writefile{toc}{\contentsline {subsection}{\numberline {18.1.1}Feature expansion}{112}\protected@file@percent }
|
||||
\@writefile{lof}{\contentsline {figure}{\numberline {18.1}{\ignorespaces }}{112}\protected@file@percent }
|
||||
\@writefile{toc}{\contentsline {subsection}{\numberline {18.1.2}Kernels implements feature expansion (Efficiently}{113}\protected@file@percent }
|
||||
\@writefile{toc}{\contentsline {section}{\numberline {18.2}Gaussian Kernel}{114}\protected@file@percent }
|
||||
\@writefile{lof}{\contentsline {figure}{\numberline {18.2}{\ignorespaces }}{115}\protected@file@percent }
|
||||
\@writefile{lof}{\contentsline {figure}{\numberline {18.3}{\ignorespaces }}{115}\protected@file@percent }
|
||||
\@writefile{toc}{\contentsline {chapter}{\numberline {19}Lecture 19 - 18-05-2020}{117}\protected@file@percent }
|
||||
\@writefile{lof}{\addvspace {10\p@ }}
|
||||
\@writefile{lot}{\addvspace {10\p@ }}
|
||||
\@writefile{toc}{\contentsline {section}{\numberline {19.1}Support Vector Machine (SVM)}{120}\protected@file@percent }
|
||||
\@writefile{lof}{\contentsline {figure}{\numberline {19.1}{\ignorespaces Draw of SVG}}{120}\protected@file@percent }
|
||||
\@writefile{toc}{\contentsline {chapter}{\numberline {20}Lecture 20 - 19-05-2020}{121}\protected@file@percent }
|
||||
\@writefile{lof}{\addvspace {10\p@ }}
|
||||
\@writefile{lot}{\addvspace {10\p@ }}
|
||||
\@writefile{toc}{\contentsline {section}{\numberline {20.1}Support Vector Machine Analysis}{121}\protected@file@percent }
|
||||
\@writefile{toc}{\contentsline {subsection}{\numberline {20.1.1}Fritz John Optimality Conditions}{121}\protected@file@percent }
|
||||
\@writefile{toc}{\contentsline {subsection}{\numberline {20.1.2}Non-separable case}{122}\protected@file@percent }
|
||||
\@writefile{lof}{\contentsline {figure}{\numberline {20.1}{\ignorespaces }}{122}\protected@file@percent }
|
||||
\@writefile{lof}{\contentsline {figure}{\numberline {20.2}{\ignorespaces }}{123}\protected@file@percent }
|
||||
\@writefile{lof}{\contentsline {figure}{\numberline {20.3}{\ignorespaces }}{123}\protected@file@percent }
|
||||
\@writefile{toc}{\contentsline {section}{\numberline {20.2}Pegasos: OGD to solve SVM}{124}\protected@file@percent }
|
||||
\@writefile{toc}{\contentsline {chapter}{\numberline {21}Lecture 21 - 25-05-2020}{126}\protected@file@percent }
|
||||
\@writefile{lof}{\addvspace {10\p@ }}
|
||||
\@writefile{lot}{\addvspace {10\p@ }}
|
||||
\@writefile{toc}{\contentsline {section}{\numberline {21.1}Pegasos in Kernel space}{126}\protected@file@percent }
|
||||
\@writefile{toc}{\contentsline {section}{\numberline {21.2}Stability}{126}\protected@file@percent }
|
||||
\@writefile{lof}{\contentsline {figure}{\numberline {21.1}{\ignorespaces }}{130}\protected@file@percent }
|
||||
\@writefile{toc}{\contentsline {chapter}{\numberline {22}Lecture 22 - 26-05-2020}{131}\protected@file@percent }
|
||||
\@writefile{lof}{\addvspace {10\p@ }}
|
||||
\@writefile{lot}{\addvspace {10\p@ }}
|
||||
\@writefile{toc}{\contentsline {section}{\numberline {22.1}Continous of Pegasos}{131}\protected@file@percent }
|
||||
\@writefile{lof}{\contentsline {figure}{\numberline {22.1}{\ignorespaces }}{131}\protected@file@percent }
|
||||
\@writefile{toc}{\contentsline {section}{\numberline {22.2}Boosting and ensemble predictors }{132}\protected@file@percent }
|
||||
\@writefile{toc}{\contentsline {subsection}{\numberline {22.2.1}Bagging}{134}\protected@file@percent }
|
||||
\@writefile{toc}{\contentsline {subsection}{\numberline {22.2.2}Random Forest}{134}\protected@file@percent }
|
||||
\@writefile{lof}{\contentsline {figure}{\numberline {22.2}{\ignorespaces }}{134}\protected@file@percent }
|
||||
|
@ -1,102 +1,105 @@
|
||||
\babel@toc {english}{}
|
||||
\addvspace {10\p@ }
|
||||
\addvspace {10\p@ }
|
||||
\contentsline {figure}{\numberline {2.1}{\ignorespaces Example of domain of $K_{NN}$}}{12}%
|
||||
\contentsline {figure}{\numberline {2.2}{\ignorespaces Example of domain of $K_{NN}$}}{12}%
|
||||
\addvspace {10\p@ }
|
||||
\contentsline {figure}{\numberline {3.1}{\ignorespaces Example of domain of $K_{NN}$}}{18}%
|
||||
\contentsline {figure}{\numberline {3.2}{\ignorespaces Example of domain of $K_{NN}$}}{23}%
|
||||
\contentsline {figure}{\numberline {3.3}{\ignorespaces Example of domain of $K_{NN}$}}{23}%
|
||||
\addvspace {10\p@ }
|
||||
\contentsline {figure}{\numberline {4.1}{\ignorespaces Example of domain of $K_{NN}$}}{26}%
|
||||
\contentsline {figure}{\numberline {4.2}{\ignorespaces Example of domain of $K_{NN}$}}{27}%
|
||||
\contentsline {figure}{\numberline {4.3}{\ignorespaces Example of domain of $K_{NN}$}}{28}%
|
||||
\contentsline {figure}{\numberline {4.4}{\ignorespaces Example of domain of $K_{NN}$}}{29}%
|
||||
\contentsline {figure}{\numberline {4.5}{\ignorespaces Example of domain of $K_{NN}$}}{29}%
|
||||
\contentsline {figure}{\numberline {4.6}{\ignorespaces Example of domain of $K_{NN}$}}{29}%
|
||||
\contentsline {figure}{\numberline {4.7}{\ignorespaces Example of domain of $K_{NN}$}}{30}%
|
||||
\addvspace {10\p@ }
|
||||
\contentsline {figure}{\numberline {5.1}{\ignorespaces Example of domain of $K_{NN}$}}{31}%
|
||||
\contentsline {figure}{\numberline {5.2}{\ignorespaces Example of domain of $K_{NN}$}}{32}%
|
||||
\contentsline {figure}{\numberline {5.3}{\ignorespaces Example of domain of $K_{NN}$}}{33}%
|
||||
\contentsline {figure}{\numberline {5.4}{\ignorespaces Example of domain of $K_{NN}$}}{33}%
|
||||
\contentsline {figure}{\numberline {5.5}{\ignorespaces Example of domain of $K_{NN}$}}{34}%
|
||||
\contentsline {figure}{\numberline {5.6}{\ignorespaces Example of domain of $K_{NN}$}}{35}%
|
||||
\contentsline {figure}{\numberline {5.7}{\ignorespaces Example of domain of $K_{NN}$}}{36}%
|
||||
\contentsline {figure}{\numberline {5.8}{\ignorespaces Example of domain of $K_{NN}$}}{36}%
|
||||
\contentsline {figure}{\numberline {5.9}{\ignorespaces Example of domain of $K_{NN}$}}{36}%
|
||||
\contentsline {figure}{\numberline {5.10}{\ignorespaces Example of domain of $K_{NN}$}}{37}%
|
||||
\addvspace {10\p@ }
|
||||
\contentsline {figure}{\numberline {6.1}{\ignorespaces Example of domain of $K_{NN}$}}{41}%
|
||||
\contentsline {figure}{\numberline {6.2}{\ignorespaces Example of domain of $K_{NN}$}}{43}%
|
||||
\contentsline {figure}{\numberline {6.3}{\ignorespaces Example of domain of $K_{NN}$}}{43}%
|
||||
\contentsline {figure}{\numberline {6.4}{\ignorespaces Example of domain of $K_{NN}$}}{44}%
|
||||
\contentsline {figure}{\numberline {6.5}{\ignorespaces Example of domain of $K_{NN}$}}{45}%
|
||||
\addvspace {10\p@ }
|
||||
\contentsline {figure}{\numberline {7.1}{\ignorespaces Example}}{48}%
|
||||
\contentsline {figure}{\numberline {7.2}{\ignorespaces Example}}{49}%
|
||||
\contentsline {figure}{\numberline {7.3}{\ignorespaces Example}}{49}%
|
||||
\contentsline {figure}{\numberline {7.4}{\ignorespaces Example}}{50}%
|
||||
\contentsline {figure}{\numberline {7.5}{\ignorespaces Example}}{50}%
|
||||
\contentsline {figure}{\numberline {7.6}{\ignorespaces Draw of how $\hat {h}$, $h^*$ and $f^*$ are represented}}{51}%
|
||||
\addvspace {10\p@ }
|
||||
\contentsline {figure}{\numberline {8.1}{\ignorespaces Representation of $\hat {h}$, $h^*$ and $f^*$ }}{54}%
|
||||
\contentsline {figure}{\numberline {8.2}{\ignorespaces Example}}{55}%
|
||||
\contentsline {figure}{\numberline {8.3}{\ignorespaces Splitting test and training set}}{57}%
|
||||
\contentsline {figure}{\numberline {8.4}{\ignorespaces K-folds}}{58}%
|
||||
\contentsline {figure}{\numberline {8.5}{\ignorespaces Nested Cross Validation}}{59}%
|
||||
\addvspace {10\p@ }
|
||||
\contentsline {figure}{\numberline {9.1}{\ignorespaces Tree building}}{60}%
|
||||
\contentsline {figure}{\numberline {9.2}{\ignorespaces Tree with at most N node}}{61}%
|
||||
\contentsline {figure}{\numberline {9.3}{\ignorespaces Algorithm for tree predictors}}{64}%
|
||||
\addvspace {10\p@ }
|
||||
\contentsline {figure}{\numberline {10.1}{\ignorespaces Point (2) - where \hskip 1em\relax $y = cx +q$ \hskip 2em\relax $y = -cx +q $}}{68}%
|
||||
\contentsline {figure}{\numberline {10.2}{\ignorespaces Point}}{69}%
|
||||
\addvspace {10\p@ }
|
||||
\contentsline {figure}{\numberline {11.1}{\ignorespaces Example of domain of $K_{NN}$}}{70}%
|
||||
\contentsline {figure}{\numberline {11.2}{\ignorespaces Diagonal length}}{71}%
|
||||
\contentsline {figure}{\numberline {11.3}{\ignorespaces Shape of the function}}{72}%
|
||||
\contentsline {figure}{\numberline {11.4}{\ignorespaces Parametric and non parametric growing as training set getting larger}}{76}%
|
||||
\addvspace {10\p@ }
|
||||
\contentsline {figure}{\numberline {12.1}{\ignorespaces Tree building}}{78}%
|
||||
\contentsline {figure}{\numberline {12.2}{\ignorespaces Dot product}}{79}%
|
||||
\contentsline {figure}{\numberline {12.3}{\ignorespaces Dot product}}{80}%
|
||||
\contentsline {figure}{\numberline {12.4}{\ignorespaces Hyperplane}}{80}%
|
||||
\contentsline {figure}{\numberline {12.5}{\ignorespaces Hyperplane}}{81}%
|
||||
\contentsline {figure}{\numberline {12.6}{\ignorespaces Hyperplane}}{81}%
|
||||
\contentsline {figure}{\numberline {12.7}{\ignorespaces Example of one dimensional hyperplane}}{82}%
|
||||
\addvspace {10\p@ }
|
||||
\contentsline {figure}{\numberline {13.1}{\ignorespaces Tree building}}{85}%
|
||||
\contentsline {figure}{\numberline {13.2}{\ignorespaces Tree building}}{85}%
|
||||
\contentsline {figure}{\numberline {13.3}{\ignorespaces Tree building}}{86}%
|
||||
\contentsline {figure}{\numberline {13.4}{\ignorespaces Feasibilty problem}}{86}%
|
||||
\contentsline {figure}{\numberline {13.5}{\ignorespaces }}{87}%
|
||||
\contentsline {figure}{\numberline {13.6}{\ignorespaces }}{88}%
|
||||
\addvspace {10\p@ }
|
||||
\contentsline {figure}{\numberline {14.1}{\ignorespaces }}{92}%
|
||||
\contentsline {figure}{\numberline {14.2}{\ignorespaces }}{94}%
|
||||
\contentsline {figure}{\numberline {14.3}{\ignorespaces }}{95}%
|
||||
\contentsline {figure}{\numberline {14.4}{\ignorespaces }}{96}%
|
||||
\addvspace {10\p@ }
|
||||
\contentsline {figure}{\numberline {15.1}{\ignorespaces }}{98}%
|
||||
\contentsline {figure}{\numberline {15.2}{\ignorespaces }}{98}%
|
||||
\contentsline {figure}{\numberline {15.3}{\ignorespaces }}{99}%
|
||||
\contentsline {figure}{\numberline {15.4}{\ignorespaces }}{100}%
|
||||
\addvspace {10\p@ }
|
||||
\contentsline {figure}{\numberline {16.1}{\ignorespaces }}{102}%
|
||||
\contentsline {figure}{\numberline {16.2}{\ignorespaces Hinge loss}}{103}%
|
||||
\contentsline {figure}{\numberline {16.3}{\ignorespaces }}{104}%
|
||||
\contentsline {figure}{\numberline {16.4}{\ignorespaces Example of more type of convex function}}{106}%
|
||||
\addvspace {10\p@ }
|
||||
\addvspace {10\p@ }
|
||||
\contentsline {figure}{\numberline {18.1}{\ignorespaces }}{112}%
|
||||
\contentsline {figure}{\numberline {18.2}{\ignorespaces }}{115}%
|
||||
\contentsline {figure}{\numberline {18.3}{\ignorespaces }}{115}%
|
||||
\addvspace {10\p@ }
|
||||
\contentsline {figure}{\numberline {19.1}{\ignorespaces Draw of SVG}}{120}%
|
||||
\addvspace {10\p@ }
|
||||
\contentsline {figure}{\numberline {20.1}{\ignorespaces }}{122}%
|
||||
\contentsline {figure}{\numberline {20.2}{\ignorespaces }}{123}%
|
||||
\contentsline {figure}{\numberline {20.3}{\ignorespaces }}{123}%
|
||||
\addvspace {10\p@ }
|
||||
\contentsline {figure}{\numberline {21.1}{\ignorespaces }}{130}%
|
||||
\babel@toc {english}{}
|
||||
\addvspace {10\p@ }
|
||||
\addvspace {10\p@ }
|
||||
\contentsline {figure}{\numberline {2.1}{\ignorespaces Example of domain of $K_{NN}$}}{12}%
|
||||
\contentsline {figure}{\numberline {2.2}{\ignorespaces Example of domain of $K_{NN}$}}{12}%
|
||||
\addvspace {10\p@ }
|
||||
\contentsline {figure}{\numberline {3.1}{\ignorespaces Example of domain of $K_{NN}$}}{18}%
|
||||
\contentsline {figure}{\numberline {3.2}{\ignorespaces Example of domain of $K_{NN}$}}{23}%
|
||||
\contentsline {figure}{\numberline {3.3}{\ignorespaces Example of domain of $K_{NN}$}}{23}%
|
||||
\addvspace {10\p@ }
|
||||
\contentsline {figure}{\numberline {4.1}{\ignorespaces Example of domain of $K_{NN}$}}{26}%
|
||||
\contentsline {figure}{\numberline {4.2}{\ignorespaces Example of domain of $K_{NN}$}}{27}%
|
||||
\contentsline {figure}{\numberline {4.3}{\ignorespaces Example of domain of $K_{NN}$}}{28}%
|
||||
\contentsline {figure}{\numberline {4.4}{\ignorespaces Example of domain of $K_{NN}$}}{29}%
|
||||
\contentsline {figure}{\numberline {4.5}{\ignorespaces Example of domain of $K_{NN}$}}{29}%
|
||||
\contentsline {figure}{\numberline {4.6}{\ignorespaces Example of domain of $K_{NN}$}}{29}%
|
||||
\contentsline {figure}{\numberline {4.7}{\ignorespaces Example of domain of $K_{NN}$}}{30}%
|
||||
\addvspace {10\p@ }
|
||||
\contentsline {figure}{\numberline {5.1}{\ignorespaces Example of domain of $K_{NN}$}}{31}%
|
||||
\contentsline {figure}{\numberline {5.2}{\ignorespaces Example of domain of $K_{NN}$}}{32}%
|
||||
\contentsline {figure}{\numberline {5.3}{\ignorespaces Example of domain of $K_{NN}$}}{33}%
|
||||
\contentsline {figure}{\numberline {5.4}{\ignorespaces Example of domain of $K_{NN}$}}{33}%
|
||||
\contentsline {figure}{\numberline {5.5}{\ignorespaces Example of domain of $K_{NN}$}}{34}%
|
||||
\contentsline {figure}{\numberline {5.6}{\ignorespaces Example of domain of $K_{NN}$}}{35}%
|
||||
\contentsline {figure}{\numberline {5.7}{\ignorespaces Example of domain of $K_{NN}$}}{36}%
|
||||
\contentsline {figure}{\numberline {5.8}{\ignorespaces Example of domain of $K_{NN}$}}{36}%
|
||||
\contentsline {figure}{\numberline {5.9}{\ignorespaces Example of domain of $K_{NN}$}}{36}%
|
||||
\contentsline {figure}{\numberline {5.10}{\ignorespaces Example of domain of $K_{NN}$}}{37}%
|
||||
\addvspace {10\p@ }
|
||||
\contentsline {figure}{\numberline {6.1}{\ignorespaces Example of domain of $K_{NN}$}}{41}%
|
||||
\contentsline {figure}{\numberline {6.2}{\ignorespaces Example of domain of $K_{NN}$}}{43}%
|
||||
\contentsline {figure}{\numberline {6.3}{\ignorespaces Example of domain of $K_{NN}$}}{43}%
|
||||
\contentsline {figure}{\numberline {6.4}{\ignorespaces Example of domain of $K_{NN}$}}{44}%
|
||||
\contentsline {figure}{\numberline {6.5}{\ignorespaces Example of domain of $K_{NN}$}}{45}%
|
||||
\addvspace {10\p@ }
|
||||
\contentsline {figure}{\numberline {7.1}{\ignorespaces Example}}{48}%
|
||||
\contentsline {figure}{\numberline {7.2}{\ignorespaces Example}}{49}%
|
||||
\contentsline {figure}{\numberline {7.3}{\ignorespaces Example}}{49}%
|
||||
\contentsline {figure}{\numberline {7.4}{\ignorespaces Example}}{50}%
|
||||
\contentsline {figure}{\numberline {7.5}{\ignorespaces Example}}{50}%
|
||||
\contentsline {figure}{\numberline {7.6}{\ignorespaces Draw of how $\hat {h}$, $h^*$ and $f^*$ are represented}}{51}%
|
||||
\addvspace {10\p@ }
|
||||
\contentsline {figure}{\numberline {8.1}{\ignorespaces Representation of $\hat {h}$, $h^*$ and $f^*$ }}{54}%
|
||||
\contentsline {figure}{\numberline {8.2}{\ignorespaces Example}}{55}%
|
||||
\contentsline {figure}{\numberline {8.3}{\ignorespaces Splitting test and training set}}{57}%
|
||||
\contentsline {figure}{\numberline {8.4}{\ignorespaces K-folds}}{58}%
|
||||
\contentsline {figure}{\numberline {8.5}{\ignorespaces Nested Cross Validation}}{59}%
|
||||
\addvspace {10\p@ }
|
||||
\contentsline {figure}{\numberline {9.1}{\ignorespaces Tree building}}{60}%
|
||||
\contentsline {figure}{\numberline {9.2}{\ignorespaces Tree with at most N node}}{61}%
|
||||
\contentsline {figure}{\numberline {9.3}{\ignorespaces Algorithm for tree predictors}}{64}%
|
||||
\addvspace {10\p@ }
|
||||
\contentsline {figure}{\numberline {10.1}{\ignorespaces Point (2) - where \hskip 1em\relax $y = cx +q$ \hskip 2em\relax $y = -cx +q $}}{68}%
|
||||
\contentsline {figure}{\numberline {10.2}{\ignorespaces Point}}{69}%
|
||||
\addvspace {10\p@ }
|
||||
\contentsline {figure}{\numberline {11.1}{\ignorespaces Example of domain of $K_{NN}$}}{70}%
|
||||
\contentsline {figure}{\numberline {11.2}{\ignorespaces Diagonal length}}{71}%
|
||||
\contentsline {figure}{\numberline {11.3}{\ignorespaces Shape of the function}}{72}%
|
||||
\contentsline {figure}{\numberline {11.4}{\ignorespaces Parametric and non parametric growing as training set getting larger}}{76}%
|
||||
\addvspace {10\p@ }
|
||||
\contentsline {figure}{\numberline {12.1}{\ignorespaces Tree building}}{78}%
|
||||
\contentsline {figure}{\numberline {12.2}{\ignorespaces Dot product}}{79}%
|
||||
\contentsline {figure}{\numberline {12.3}{\ignorespaces Dot product}}{80}%
|
||||
\contentsline {figure}{\numberline {12.4}{\ignorespaces Hyperplane}}{80}%
|
||||
\contentsline {figure}{\numberline {12.5}{\ignorespaces Hyperplane}}{81}%
|
||||
\contentsline {figure}{\numberline {12.6}{\ignorespaces Hyperplane}}{81}%
|
||||
\contentsline {figure}{\numberline {12.7}{\ignorespaces Example of one dimensional hyperplane}}{82}%
|
||||
\addvspace {10\p@ }
|
||||
\contentsline {figure}{\numberline {13.1}{\ignorespaces Tree building}}{85}%
|
||||
\contentsline {figure}{\numberline {13.2}{\ignorespaces Tree building}}{85}%
|
||||
\contentsline {figure}{\numberline {13.3}{\ignorespaces Tree building}}{86}%
|
||||
\contentsline {figure}{\numberline {13.4}{\ignorespaces Feasibilty problem}}{86}%
|
||||
\contentsline {figure}{\numberline {13.5}{\ignorespaces }}{87}%
|
||||
\contentsline {figure}{\numberline {13.6}{\ignorespaces }}{88}%
|
||||
\addvspace {10\p@ }
|
||||
\contentsline {figure}{\numberline {14.1}{\ignorespaces }}{92}%
|
||||
\contentsline {figure}{\numberline {14.2}{\ignorespaces }}{94}%
|
||||
\contentsline {figure}{\numberline {14.3}{\ignorespaces }}{95}%
|
||||
\contentsline {figure}{\numberline {14.4}{\ignorespaces }}{96}%
|
||||
\addvspace {10\p@ }
|
||||
\contentsline {figure}{\numberline {15.1}{\ignorespaces }}{98}%
|
||||
\contentsline {figure}{\numberline {15.2}{\ignorespaces }}{98}%
|
||||
\contentsline {figure}{\numberline {15.3}{\ignorespaces }}{99}%
|
||||
\contentsline {figure}{\numberline {15.4}{\ignorespaces }}{100}%
|
||||
\addvspace {10\p@ }
|
||||
\contentsline {figure}{\numberline {16.1}{\ignorespaces }}{102}%
|
||||
\contentsline {figure}{\numberline {16.2}{\ignorespaces Hinge loss}}{103}%
|
||||
\contentsline {figure}{\numberline {16.3}{\ignorespaces }}{104}%
|
||||
\contentsline {figure}{\numberline {16.4}{\ignorespaces Example of more type of convex function}}{106}%
|
||||
\addvspace {10\p@ }
|
||||
\addvspace {10\p@ }
|
||||
\contentsline {figure}{\numberline {18.1}{\ignorespaces }}{112}%
|
||||
\contentsline {figure}{\numberline {18.2}{\ignorespaces }}{115}%
|
||||
\contentsline {figure}{\numberline {18.3}{\ignorespaces }}{115}%
|
||||
\addvspace {10\p@ }
|
||||
\contentsline {figure}{\numberline {19.1}{\ignorespaces Draw of SVG}}{120}%
|
||||
\addvspace {10\p@ }
|
||||
\contentsline {figure}{\numberline {20.1}{\ignorespaces }}{122}%
|
||||
\contentsline {figure}{\numberline {20.2}{\ignorespaces }}{123}%
|
||||
\contentsline {figure}{\numberline {20.3}{\ignorespaces }}{123}%
|
||||
\addvspace {10\p@ }
|
||||
\contentsline {figure}{\numberline {21.1}{\ignorespaces }}{130}%
|
||||
\addvspace {10\p@ }
|
||||
\contentsline {figure}{\numberline {22.1}{\ignorespaces }}{131}%
|
||||
\contentsline {figure}{\numberline {22.2}{\ignorespaces }}{134}%
|
||||
|
@ -1,4 +1,4 @@
|
||||
This is pdfTeX, Version 3.14159265-2.6-1.40.21 (MiKTeX 2.9.7300 64-bit) (preloaded format=pdflatex 2020.4.13) 25 MAY 2020 10:28
|
||||
This is pdfTeX, Version 3.14159265-2.6-1.40.21 (MiKTeX 2.9.7300 64-bit) (preloaded format=pdflatex 2020.4.13) 26 MAY 2020 10:29
|
||||
entering extended mode
|
||||
**./main.tex
|
||||
(main.tex
|
||||
@ -3174,52 +3174,98 @@ Package pdftex.def Info: lectures/../img/lez21-img1.JPG used on input line 181
|
||||
|
||||
LaTeX Warning: `h' float specifier changed to `ht'.
|
||||
|
||||
[129] (main.bbl [130 <./lectures/../img/lez21-img1.JPG>]
|
||||
[129] (lectures/lecture22.tex [130 <./lectures/../img/lez21-img1.JPG>]
|
||||
Chapter 22.
|
||||
<lectures/../img/lez22-img1.JPG, id=527, 164.86594pt x 105.39375pt>
|
||||
File: lectures/../img/lez22-img1.JPG Graphic file (type jpg)
|
||||
<use lectures/../img/lez22-img1.JPG>
|
||||
Package pdftex.def Info: lectures/../img/lez22-img1.JPG used on input line 16.
|
||||
|
||||
LaTeX Warning: Empty `thebibliography' environment on input line 3.
|
||||
(pdftex.def) Requested size: 117.00119pt x 74.79791pt.
|
||||
[131
|
||||
|
||||
) [131
|
||||
<./lectures/../img/lez22-img1.JPG>]
|
||||
Underfull \hbox (badness 10000) in paragraph at lines 58--67
|
||||
|
||||
] (main.aux) )
|
||||
[]
|
||||
|
||||
|
||||
Underfull \hbox (badness 10000) in paragraph at lines 69--70
|
||||
|
||||
[]
|
||||
|
||||
|
||||
Underfull \hbox (badness 10000) in paragraph at lines 100--105
|
||||
|
||||
[]
|
||||
|
||||
[132]
|
||||
Underfull \hbox (badness 10000) in paragraph at lines 111--116
|
||||
|
||||
[]
|
||||
|
||||
[133]
|
||||
Underfull \hbox (badness 10000) in paragraph at lines 143--148
|
||||
|
||||
[]
|
||||
|
||||
|
||||
Underfull \hbox (badness 10000) in paragraph at lines 158--161
|
||||
|
||||
[]
|
||||
|
||||
|
||||
Underfull \hbox (badness 10000) in paragraph at lines 165--166
|
||||
|
||||
[]
|
||||
|
||||
<lectures/../img/lez22-img2.JPG, id=538, 216.05719pt x 155.07938pt>
|
||||
File: lectures/../img/lez22-img2.JPG Graphic file (type jpg)
|
||||
<use lectures/../img/lez22-img2.JPG>
|
||||
Package pdftex.def Info: lectures/../img/lez22-img2.JPG used on input line 169
|
||||
.
|
||||
(pdftex.def) Requested size: 117.00119pt x 83.98059pt.
|
||||
[134 <./lectures/../img/lez22-img2.JPG>]) [135] (main.aux) )
|
||||
Here is how much of TeX's memory you used:
|
||||
5724 strings out of 480934
|
||||
88274 string characters out of 2909670
|
||||
5736 strings out of 480934
|
||||
88839 string characters out of 2909670
|
||||
336249 words of memory out of 3000000
|
||||
21307 multiletter control sequences out of 15000+200000
|
||||
21317 multiletter control sequences out of 15000+200000
|
||||
561784 words of font info for 96 fonts, out of 3000000 for 9000
|
||||
1141 hyphenation exceptions out of 8191
|
||||
34i,13n,42p,348b,361s stack positions out of 5000i,500n,10000p,200000b,50000s
|
||||
<C:\Users\AndreDany\AppData\Local\MiKTeX\2.9\fonts/pk/ljfour/jkn
|
||||
appen/ec/dpi600\ecbi1200.pk> <C:\Users\AndreDany\AppData\Local\MiKTeX\2.9\fonts
|
||||
/pk/ljfour/jknappen/ec/dpi600\tcbx1200.pk> <C:\Users\AndreDany\AppData\Local\Mi
|
||||
KTeX\2.9\fonts/pk/ljfour/jknappen/ec/dpi600\tcti1200.pk> <C:\Users\AndreDany\Ap
|
||||
pData\Local\MiKTeX\2.9\fonts/pk/ljfour/jknappen/ec/dpi600\ecti1200.pk> <C:\User
|
||||
s\AndreDany\AppData\Local\MiKTeX\2.9\fonts/pk/ljfour/jknappen/ec/dpi600\ecbx144
|
||||
0.pk> <C:\Users\AndreDany\AppData\Local\MiKTeX\2.9\fonts/pk/ljfour/jknappen/ec/
|
||||
dpi600\tcrm1200.pk> <C:\Users\AndreDany\AppData\Local\MiKTeX\2.9\fonts/pk/ljfou
|
||||
r/jknappen/ec/dpi600\ecbx1728.pk> <C:\Users\AndreDany\AppData\Local\MiKTeX\2.9\
|
||||
fonts/pk/ljfour/jknappen/ec/dpi600\ecrm1200.pk> <C:\Users\AndreDany\AppData\Loc
|
||||
al\MiKTeX\2.9\fonts/pk/ljfour/jknappen/ec/dpi600\ecbx1200.pk> <C:\Users\AndreDa
|
||||
ny\AppData\Local\MiKTeX\2.9\fonts/pk/ljfour/jknappen/ec/dpi600\ecbx2074.pk> <C:
|
||||
\Users\AndreDany\AppData\Local\MiKTeX\2.9\fonts/pk/ljfour/jknappen/ec/dpi600\ec
|
||||
rm2074.pk> <C:\Users\AndreDany\AppData\Local\MiKTeX\2.9\fonts/pk/ljfour/jknappe
|
||||
n/ec/dpi600\ecbx2488.pk><C:/Program Files/MiKTeX 2.9/fonts/type1/public/amsfont
|
||||
s/cm/cmbx6.pfb><C:/Program Files/MiKTeX 2.9/fonts/type1/public/amsfonts/cm/cmbx
|
||||
8.pfb><C:/Program Files/MiKTeX 2.9/fonts/type1/public/amsfonts/cm/cmex10.pfb><C
|
||||
:/Program Files/MiKTeX 2.9/fonts/type1/public/amsfonts/cm/cmmi10.pfb><C:/Progra
|
||||
m Files/MiKTeX 2.9/fonts/type1/public/amsfonts/cm/cmmi12.pfb><C:/Program Files/
|
||||
MiKTeX 2.9/fonts/type1/public/amsfonts/cm/cmmi6.pfb><C:/Program Files/MiKTeX 2.
|
||||
9/fonts/type1/public/amsfonts/cm/cmmi8.pfb><C:/Program Files/MiKTeX 2.9/fonts/t
|
||||
ype1/public/amsfonts/cm/cmr12.pfb><C:/Program Files/MiKTeX 2.9/fonts/type1/publ
|
||||
ic/amsfonts/cm/cmr6.pfb><C:/Program Files/MiKTeX 2.9/fonts/type1/public/amsfont
|
||||
s/cm/cmr8.pfb><C:/Program Files/MiKTeX 2.9/fonts/type1/public/amsfonts/cm/cmsy1
|
||||
0.pfb><C:/Program Files/MiKTeX 2.9/fonts/type1/public/amsfonts/cm/cmsy6.pfb><C:
|
||||
/Program Files/MiKTeX 2.9/fonts/type1/public/amsfonts/cm/cmsy8.pfb><C:/Program
|
||||
Files/MiKTeX 2.9/fonts/type1/public/amsfonts/symbols/msam10.pfb><C:/Program Fil
|
||||
es/MiKTeX 2.9/fonts/type1/public/amsfonts/symbols/msbm10.pfb>
|
||||
Output written on main.pdf (132 pages, 2720089 bytes).
|
||||
<C:\Users\AndreDan
|
||||
y\AppData\Local\MiKTeX\2.9\fonts/pk/ljfour/jknappen/ec/dpi600\ecbi1200.pk> <C:\
|
||||
Users\AndreDany\AppData\Local\MiKTeX\2.9\fonts/pk/ljfour/jknappen/ec/dpi600\tcb
|
||||
x1200.pk> <C:\Users\AndreDany\AppData\Local\MiKTeX\2.9\fonts/pk/ljfour/jknappen
|
||||
/ec/dpi600\tcti1200.pk> <C:\Users\AndreDany\AppData\Local\MiKTeX\2.9\fonts/pk/l
|
||||
jfour/jknappen/ec/dpi600\ecti1200.pk> <C:\Users\AndreDany\AppData\Local\MiKTeX\
|
||||
2.9\fonts/pk/ljfour/jknappen/ec/dpi600\ecbx1440.pk> <C:\Users\AndreDany\AppData
|
||||
\Local\MiKTeX\2.9\fonts/pk/ljfour/jknappen/ec/dpi600\tcrm1200.pk> <C:\Users\And
|
||||
reDany\AppData\Local\MiKTeX\2.9\fonts/pk/ljfour/jknappen/ec/dpi600\ecbx1728.pk>
|
||||
<C:\Users\AndreDany\AppData\Local\MiKTeX\2.9\fonts/pk/ljfour/jknappen/ec/dpi60
|
||||
0\ecrm1200.pk> <C:\Users\AndreDany\AppData\Local\MiKTeX\2.9\fonts/pk/ljfour/jkn
|
||||
appen/ec/dpi600\ecbx1200.pk> <C:\Users\AndreDany\AppData\Local\MiKTeX\2.9\fonts
|
||||
/pk/ljfour/jknappen/ec/dpi600\ecbx2074.pk> <C:\Users\AndreDany\AppData\Local\Mi
|
||||
KTeX\2.9\fonts/pk/ljfour/jknappen/ec/dpi600\ecrm2074.pk> <C:\Users\AndreDany\Ap
|
||||
pData\Local\MiKTeX\2.9\fonts/pk/ljfour/jknappen/ec/dpi600\ecbx2488.pk><C:/Progr
|
||||
am Files/MiKTeX 2.9/fonts/type1/public/amsfonts/cm/cmbx6.pfb><C:/Program Files/
|
||||
MiKTeX 2.9/fonts/type1/public/amsfonts/cm/cmbx8.pfb><C:/Program Files/MiKTeX 2.
|
||||
9/fonts/type1/public/amsfonts/cm/cmex10.pfb><C:/Program Files/MiKTeX 2.9/fonts/
|
||||
type1/public/amsfonts/cm/cmmi10.pfb><C:/Program Files/MiKTeX 2.9/fonts/type1/pu
|
||||
blic/amsfonts/cm/cmmi12.pfb><C:/Program Files/MiKTeX 2.9/fonts/type1/public/ams
|
||||
fonts/cm/cmmi6.pfb><C:/Program Files/MiKTeX 2.9/fonts/type1/public/amsfonts/cm/
|
||||
cmmi8.pfb><C:/Program Files/MiKTeX 2.9/fonts/type1/public/amsfonts/cm/cmr12.pfb
|
||||
><C:/Program Files/MiKTeX 2.9/fonts/type1/public/amsfonts/cm/cmr6.pfb><C:/Progr
|
||||
am Files/MiKTeX 2.9/fonts/type1/public/amsfonts/cm/cmr8.pfb><C:/Program Files/M
|
||||
iKTeX 2.9/fonts/type1/public/amsfonts/cm/cmsy10.pfb><C:/Program Files/MiKTeX 2.
|
||||
9/fonts/type1/public/amsfonts/cm/cmsy6.pfb><C:/Program Files/MiKTeX 2.9/fonts/t
|
||||
ype1/public/amsfonts/cm/cmsy8.pfb><C:/Program Files/MiKTeX 2.9/fonts/type1/publ
|
||||
ic/amsfonts/symbols/msam10.pfb><C:/Program Files/MiKTeX 2.9/fonts/type1/public/
|
||||
amsfonts/symbols/msbm10.pfb>
|
||||
Output written on main.pdf (136 pages, 2756325 bytes).
|
||||
PDF statistics:
|
||||
1045 PDF objects out of 1200 (max. 8388607)
|
||||
1058 PDF objects out of 1200 (max. 8388607)
|
||||
0 named destinations out of 1000 (max. 500000)
|
||||
411 words of extra memory for PDF output out of 10000 (max. 10000000)
|
||||
421 words of extra memory for PDF output out of 10000 (max. 10000000)
|
||||
|
||||
|
Binary file not shown.
Binary file not shown.
@ -1,160 +1,159 @@
|
||||
\documentclass[a4paper,12pt]{report}
|
||||
\usepackage{amsmath}
|
||||
\usepackage{systeme}
|
||||
\usepackage{amssymb}
|
||||
\usepackage{subfiles}
|
||||
\usepackage[english]{babel}
|
||||
\usepackage[dvipsnames]{xcolor}
|
||||
\usepackage{graphicx}
|
||||
\usepackage{xcolor}
|
||||
\usepackage{sectsty}
|
||||
\usepackage{framed}
|
||||
\usepackage{titlesec}
|
||||
\usepackage[T1]{fontenc}
|
||||
%Options: Sonny, Lenny, Glenn, Conny, Rejne, Bjarne, Bjornstrup
|
||||
%\usepackage[Glenn]{fncychap}
|
||||
|
||||
\graphicspath{ {./img/} }
|
||||
\definecolor{mypink}{cmyk}{0, 0.7808, 0.4429, 0.1412}
|
||||
\definecolor{mygray}{gray}{0.6}
|
||||
\definecolor{DarkGreen}{RGB}{0,100,0}
|
||||
\definecolor{GoodGreen}{RGB}{46,139,87}
|
||||
|
||||
\newcommand\barra[1]{\mathbb{#1}}
|
||||
\newcommand\hnn{h_{NN}}
|
||||
\newcommand\hknn{h_{k-NN}}
|
||||
\newcommand\knn{K_{NN}}
|
||||
\newcommand\nl{N_{\ell}}
|
||||
\newcommand\sll{S_{\ell}}
|
||||
\newcommand\red[1]{\textcolor{BrickRed}{#1}}
|
||||
\newcommand\bred[1]{\textcolor{Red}{\textbf{#1}}}
|
||||
\newcommand\blue[1]{\textcolor{Blue}{\textbf{#1}}}
|
||||
\newcommand\gray[1]{\textcolor{mygray}{#1}}
|
||||
\newcommand\col[2]{\textcolor{#2}{#1}}
|
||||
\newcommand\expt[1]{$\barra{E}\left[ \,{#1}\, \right] $}
|
||||
|
||||
%\titleformat{\chapter}[display]
|
||||
% {\normalfont\bfseries}{}{0pt}{\Large}
|
||||
\titleformat{\chapter}[display]
|
||||
{\normalfont\bfseries}{}{0pt}{\Huge\color{Blue}}
|
||||
\titlespacing*{\chapter}{0pt}{-80pt}{40pt}
|
||||
\chapterfont{\color{Blue}}
|
||||
\sectionfont{\color{DarkGreen}}
|
||||
\subsectionfont{\color{red}}
|
||||
|
||||
|
||||
|
||||
\begin{document}
|
||||
|
||||
\begin{titlepage}
|
||||
\begin{center}
|
||||
\vspace*{0.1cm}
|
||||
|
||||
\Huge
|
||||
\textbf{\col{Statistical Methods for \\ Machine Learning}{Blue}} \vspace{0.5cm}
|
||||
|
||||
\includegraphics[width=.5\linewidth]{unimi}
|
||||
|
||||
\vspace{0.5cm}
|
||||
\LARGE
|
||||
Data Science and Economics \\
|
||||
Università degli Studi di Milano\\
|
||||
\vspace{0.5cm}
|
||||
|
||||
|
||||
\vspace{1.5cm}
|
||||
|
||||
\textbf{Andrea Ierardi}
|
||||
|
||||
\vfill
|
||||
|
||||
Lecture notes
|
||||
|
||||
\vspace{1cm}
|
||||
|
||||
\includegraphics[width=0.9\linewidth]{front}
|
||||
|
||||
\end{center}
|
||||
\end{titlepage}
|
||||
|
||||
|
||||
\newpage
|
||||
\tableofcontents
|
||||
%\listoftables
|
||||
\listoffigures
|
||||
|
||||
\newpage
|
||||
|
||||
\subfile{lectures/lecture1}
|
||||
|
||||
|
||||
\newpage
|
||||
\subfile{lectures/lecture2}
|
||||
|
||||
\newpage
|
||||
\subfile{lectures/lecture3}
|
||||
|
||||
\newpage
|
||||
\subfile{lectures/lecture4}
|
||||
|
||||
|
||||
\newpage
|
||||
\subfile{lectures/lecture5}
|
||||
|
||||
|
||||
\newpage
|
||||
\subfile{lectures/lecture6}
|
||||
|
||||
|
||||
\newpage
|
||||
\subfile{lectures/lecture7}
|
||||
|
||||
|
||||
\newpage
|
||||
\subfile{lectures/lecture8}
|
||||
|
||||
|
||||
\newpage
|
||||
\subfile{lectures/lecture9}
|
||||
|
||||
|
||||
\newpage
|
||||
\subfile{lectures/lecture10}
|
||||
|
||||
\newpage
|
||||
\subfile{lectures/lecture11}
|
||||
|
||||
\newpage
|
||||
\subfile{lectures/lecture12}
|
||||
|
||||
\newpage
|
||||
\subfile{lectures/lecture13}
|
||||
|
||||
\newpage
|
||||
\subfile{lectures/lecture14}
|
||||
\newpage
|
||||
\subfile{lectures/lecture15}
|
||||
\newpage
|
||||
\subfile{lectures/lecture16}
|
||||
|
||||
\newpage
|
||||
\subfile{lectures/lecture17}
|
||||
|
||||
\newpage
|
||||
\subfile{lectures/lecture18}
|
||||
|
||||
\newpage
|
||||
\subfile{lectures/lecture19}
|
||||
|
||||
\newpage
|
||||
\subfile{lectures/lecture20}
|
||||
\newpage
|
||||
|
||||
\newpage
|
||||
\subfile{lectures/lecture21}
|
||||
\newpage
|
||||
\bibliographystyle{abbrv}
|
||||
\bibliography{main}
|
||||
|
||||
\end{document}
|
||||
\documentclass[a4paper,12pt]{report}
|
||||
\usepackage{amsmath}
|
||||
\usepackage{systeme}
|
||||
\usepackage{amssymb}
|
||||
\usepackage{subfiles}
|
||||
\usepackage[english]{babel}
|
||||
\usepackage[dvipsnames]{xcolor}
|
||||
\usepackage{graphicx}
|
||||
\usepackage{xcolor}
|
||||
\usepackage{sectsty}
|
||||
\usepackage{framed}
|
||||
\usepackage{titlesec}
|
||||
\usepackage[T1]{fontenc}
|
||||
%Options: Sonny, Lenny, Glenn, Conny, Rejne, Bjarne, Bjornstrup
|
||||
%\usepackage[Glenn]{fncychap}
|
||||
|
||||
\graphicspath{ {./img/} }
|
||||
\definecolor{mypink}{cmyk}{0, 0.7808, 0.4429, 0.1412}
|
||||
\definecolor{mygray}{gray}{0.6}
|
||||
\definecolor{DarkGreen}{RGB}{0,100,0}
|
||||
\definecolor{GoodGreen}{RGB}{46,139,87}
|
||||
|
||||
\newcommand\barra[1]{\mathbb{#1}}
|
||||
\newcommand\hnn{h_{NN}}
|
||||
\newcommand\hknn{h_{k-NN}}
|
||||
\newcommand\knn{K_{NN}}
|
||||
\newcommand\nl{N_{\ell}}
|
||||
\newcommand\sll{S_{\ell}}
|
||||
\newcommand\red[1]{\textcolor{BrickRed}{#1}}
|
||||
\newcommand\bred[1]{\textcolor{Red}{\textbf{#1}}}
|
||||
\newcommand\blue[1]{\textcolor{Blue}{\textbf{#1}}}
|
||||
\newcommand\gray[1]{\textcolor{mygray}{#1}}
|
||||
\newcommand\col[2]{\textcolor{#2}{#1}}
|
||||
\newcommand\expt[1]{$\barra{E}\left[ \,{#1}\, \right] $}
|
||||
|
||||
%\titleformat{\chapter}[display]
|
||||
% {\normalfont\bfseries}{}{0pt}{\Large}
|
||||
\titleformat{\chapter}[display]
|
||||
{\normalfont\bfseries}{}{0pt}{\Huge\color{Blue}}
|
||||
\titlespacing*{\chapter}{0pt}{-80pt}{40pt}
|
||||
\chapterfont{\color{Blue}}
|
||||
\sectionfont{\color{DarkGreen}}
|
||||
\subsectionfont{\color{red}}
|
||||
|
||||
|
||||
|
||||
\begin{document}
|
||||
|
||||
\begin{titlepage}
|
||||
\begin{center}
|
||||
\vspace*{0.1cm}
|
||||
|
||||
\Huge
|
||||
\textbf{\col{Statistical Methods for \\ Machine Learning}{Blue}} \vspace{0.5cm}
|
||||
|
||||
\includegraphics[width=.5\linewidth]{unimi}
|
||||
|
||||
\vspace{0.5cm}
|
||||
\LARGE
|
||||
Data Science and Economics \\
|
||||
Università degli Studi di Milano\\
|
||||
\vspace{0.5cm}
|
||||
|
||||
|
||||
\vspace{1.5cm}
|
||||
|
||||
\textbf{Andrea Ierardi}
|
||||
|
||||
\vfill
|
||||
|
||||
Lecture notes
|
||||
|
||||
\vspace{1cm}
|
||||
|
||||
\includegraphics[width=0.9\linewidth]{front}
|
||||
|
||||
\end{center}
|
||||
\end{titlepage}
|
||||
|
||||
|
||||
\newpage
|
||||
\tableofcontents
|
||||
%\listoftables
|
||||
\listoffigures
|
||||
|
||||
\newpage
|
||||
|
||||
\subfile{lectures/lecture1}
|
||||
|
||||
|
||||
\newpage
|
||||
\subfile{lectures/lecture2}
|
||||
|
||||
\newpage
|
||||
\subfile{lectures/lecture3}
|
||||
|
||||
\newpage
|
||||
\subfile{lectures/lecture4}
|
||||
|
||||
|
||||
\newpage
|
||||
\subfile{lectures/lecture5}
|
||||
|
||||
|
||||
\newpage
|
||||
\subfile{lectures/lecture6}
|
||||
|
||||
|
||||
\newpage
|
||||
\subfile{lectures/lecture7}
|
||||
|
||||
|
||||
\newpage
|
||||
\subfile{lectures/lecture8}
|
||||
|
||||
|
||||
\newpage
|
||||
\subfile{lectures/lecture9}
|
||||
|
||||
|
||||
\newpage
|
||||
\subfile{lectures/lecture10}
|
||||
|
||||
\newpage
|
||||
\subfile{lectures/lecture11}
|
||||
|
||||
\newpage
|
||||
\subfile{lectures/lecture12}
|
||||
|
||||
\newpage
|
||||
\subfile{lectures/lecture13}
|
||||
|
||||
\newpage
|
||||
\subfile{lectures/lecture14}
|
||||
\newpage
|
||||
\subfile{lectures/lecture15}
|
||||
\newpage
|
||||
\subfile{lectures/lecture16}
|
||||
|
||||
\newpage
|
||||
\subfile{lectures/lecture17}
|
||||
|
||||
\newpage
|
||||
\subfile{lectures/lecture18}
|
||||
|
||||
\newpage
|
||||
\subfile{lectures/lecture19}
|
||||
|
||||
\newpage
|
||||
\subfile{lectures/lecture20}
|
||||
\newpage
|
||||
|
||||
\newpage
|
||||
\subfile{lectures/lecture21}
|
||||
\newpage
|
||||
\subfile{lectures/lecture22}
|
||||
\newpage
|
||||
\end{document}
|
||||
%This is never printed
|
@ -1,95 +1,100 @@
|
||||
\babel@toc {english}{}
|
||||
\contentsline {chapter}{\numberline {1}Lecture 1 - 09-03-2020}{7}%
|
||||
\contentsline {section}{\numberline {1.1}Introduction of the course}{7}%
|
||||
\contentsline {section}{\numberline {1.2}Examples}{7}%
|
||||
\contentsline {subsection}{\numberline {1.2.1}Spam filtering}{10}%
|
||||
\contentsline {chapter}{\numberline {2}Lecture 2 - 10-03-2020}{11}%
|
||||
\contentsline {section}{\numberline {2.1}Argomento}{11}%
|
||||
\contentsline {section}{\numberline {2.2}Loss}{11}%
|
||||
\contentsline {subsection}{\numberline {2.2.1}Absolute Loss}{11}%
|
||||
\contentsline {subsection}{\numberline {2.2.2}Square Loss}{12}%
|
||||
\contentsline {subsection}{\numberline {2.2.3}Example of information of square loss}{13}%
|
||||
\contentsline {subsection}{\numberline {2.2.4}labels and losses}{14}%
|
||||
\contentsline {subsection}{\numberline {2.2.5}Example TF(idf) documents encoding}{16}%
|
||||
\contentsline {chapter}{\numberline {3}Lecture 3 - 16-03-2020}{18}%
|
||||
\contentsline {section}{\numberline {3.1}Overfitting}{20}%
|
||||
\contentsline {subsection}{\numberline {3.1.1}Noise in the data}{20}%
|
||||
\contentsline {section}{\numberline {3.2}Underfitting}{22}%
|
||||
\contentsline {section}{\numberline {3.3}Nearest neighbour}{22}%
|
||||
\contentsline {chapter}{\numberline {4}Lecture 4 - 17-03-2020}{25}%
|
||||
\contentsline {section}{\numberline {4.1}Computing $h_{NN}$}{25}%
|
||||
\contentsline {section}{\numberline {4.2}Tree Predictor}{27}%
|
||||
\contentsline {chapter}{\numberline {5}Lecture 5 - 23-03-2020}{31}%
|
||||
\contentsline {section}{\numberline {5.1}Tree Classifier}{31}%
|
||||
\contentsline {section}{\numberline {5.2}Jensen’s inequality}{33}%
|
||||
\contentsline {section}{\numberline {5.3}Tree Predictor}{37}%
|
||||
\contentsline {section}{\numberline {5.4}Statistical model for Machine Learning}{38}%
|
||||
\contentsline {chapter}{\numberline {6}Lecture 6 - 24-03-2020}{40}%
|
||||
\contentsline {section}{\numberline {6.1}Bayes Optimal Predictor}{40}%
|
||||
\contentsline {subsection}{\numberline {6.1.1}Square Loss}{41}%
|
||||
\contentsline {subsection}{\numberline {6.1.2}Zero-one loss for binary classification}{42}%
|
||||
\contentsline {section}{\numberline {6.2}Bayes Risk}{45}%
|
||||
\contentsline {chapter}{\numberline {7}Lecture 7 - 30-03-2020}{47}%
|
||||
\contentsline {section}{\numberline {7.1}Chernoff-Hoffding bound}{47}%
|
||||
\contentsline {section}{\numberline {7.2}Union Bound}{48}%
|
||||
\contentsline {section}{\numberline {7.3}Studying overfitting of a ERM}{52}%
|
||||
\contentsline {chapter}{\numberline {8}Lecture 8 - 31-03-2020}{54}%
|
||||
\contentsline {section}{\numberline {8.1}The problem of estimating risk in practise}{55}%
|
||||
\contentsline {section}{\numberline {8.2}Cross-validation}{57}%
|
||||
\contentsline {section}{\numberline {8.3}Nested cross validation}{59}%
|
||||
\contentsline {chapter}{\numberline {9}Lecture 9 - 06-04-2020}{60}%
|
||||
\contentsline {section}{\numberline {9.1}Tree predictors}{60}%
|
||||
\contentsline {subsection}{\numberline {9.1.1}Catalan Number}{62}%
|
||||
\contentsline {chapter}{\numberline {10}Lecture 10 - 07-04-2020}{66}%
|
||||
\contentsline {section}{\numberline {10.1}TO BE DEFINE}{66}%
|
||||
\contentsline {section}{\numberline {10.2}MANCANO 20 MINUTI DI LEZIONE}{66}%
|
||||
\contentsline {section}{\numberline {10.3}Compare risk for zero-one loss}{68}%
|
||||
\contentsline {chapter}{\numberline {11}Lecture 11 - 20-04-2020}{70}%
|
||||
\contentsline {section}{\numberline {11.1}Analysis of $K_{NN}$}{70}%
|
||||
\contentsline {subsection}{\numberline {11.1.1}Study of $K_{NN}$}{73}%
|
||||
\contentsline {subsection}{\numberline {11.1.2}study of trees}{74}%
|
||||
\contentsline {section}{\numberline {11.2}Non-parametric Algorithms}{75}%
|
||||
\contentsline {subsection}{\numberline {11.2.1}Example of parametric algorithms}{76}%
|
||||
\contentsline {chapter}{\numberline {12}Lecture 12 - 21-04-2020}{77}%
|
||||
\contentsline {section}{\numberline {12.1}Non parametrics algorithms}{77}%
|
||||
\contentsline {subsection}{\numberline {12.1.1}Theorem: No free lunch}{77}%
|
||||
\contentsline {section}{\numberline {12.2}Highly Parametric Learning Algorithm}{79}%
|
||||
\contentsline {subsection}{\numberline {12.2.1}Linear Predictors}{79}%
|
||||
\contentsline {subsection}{\numberline {12.2.2}MinDisagreement}{83}%
|
||||
\contentsline {chapter}{\numberline {13}Lecture 13 - 27-04-2020}{84}%
|
||||
\contentsline {section}{\numberline {13.1}Linear prediction}{84}%
|
||||
\contentsline {subsection}{\numberline {13.1.1}MinDisOpt}{84}%
|
||||
\contentsline {section}{\numberline {13.2}The Perception Algorithm}{87}%
|
||||
\contentsline {subsection}{\numberline {13.2.1}Perception convergence Theorem}{88}%
|
||||
\contentsline {chapter}{\numberline {14}Lecture 14 - 28-04-2020}{91}%
|
||||
\contentsline {section}{\numberline {14.1}Linear Regression}{91}%
|
||||
\contentsline {subsection}{\numberline {14.1.1}The problem of linear regression}{91}%
|
||||
\contentsline {subsection}{\numberline {14.1.2}Ridge regression}{92}%
|
||||
\contentsline {section}{\numberline {14.2}Percetron}{93}%
|
||||
\contentsline {subsection}{\numberline {14.2.1}Online Learning }{94}%
|
||||
\contentsline {subsection}{\numberline {14.2.2}Online Gradiant Descent (OGD)}{96}%
|
||||
\contentsline {chapter}{\numberline {15}Lecture 15 - 04-05-2020}{97}%
|
||||
\contentsline {section}{\numberline {15.1}Regret analysis of OGD}{97}%
|
||||
\contentsline {subsection}{\numberline {15.1.1}Projected OGD}{98}%
|
||||
\contentsline {chapter}{\numberline {16}Lecture 16 - 05-05-2020}{102}%
|
||||
\contentsline {section}{\numberline {16.1}Analysis of Perceptron in the non-separable case using OGD framework.}{102}%
|
||||
\contentsline {subsection}{\numberline {16.1.1}Strongly convex loss functions}{106}%
|
||||
\contentsline {chapter}{\numberline {17}Lecture 17 - 11-05-2020}{108}%
|
||||
\contentsline {section}{\numberline {17.1}Strongly convex loss functions}{108}%
|
||||
\contentsline {subsection}{\numberline {17.1.1}OGD for Strongly Convex losses}{108}%
|
||||
\contentsline {subsection}{\numberline {17.1.2}Relate sequential risk and statistical risk}{109}%
|
||||
\contentsline {chapter}{\numberline {18}Lecture 18 - 12-05-2020}{112}%
|
||||
\contentsline {section}{\numberline {18.1}Kernel functions}{112}%
|
||||
\contentsline {subsection}{\numberline {18.1.1}Feature expansion}{112}%
|
||||
\contentsline {subsection}{\numberline {18.1.2}Kernels implements feature expansion (Efficiently}{113}%
|
||||
\contentsline {section}{\numberline {18.2}Gaussian Kernel}{114}%
|
||||
\contentsline {chapter}{\numberline {19}Lecture 19 - 18-05-2020}{117}%
|
||||
\contentsline {section}{\numberline {19.1}Support Vector Machine (SVM)}{120}%
|
||||
\contentsline {chapter}{\numberline {20}Lecture 20 - 19-05-2020}{121}%
|
||||
\contentsline {section}{\numberline {20.1}Support Vector Machine Analysis}{121}%
|
||||
\contentsline {subsection}{\numberline {20.1.1}Fritz John Optimality Conditions}{121}%
|
||||
\contentsline {subsection}{\numberline {20.1.2}Non-separable case}{122}%
|
||||
\contentsline {section}{\numberline {20.2}Pegasos: OGD to solve SVM}{124}%
|
||||
\contentsline {chapter}{\numberline {21}Lecture 21 - 25-05-2020}{126}%
|
||||
\contentsline {section}{\numberline {21.1}Pegasos in Kernel space}{126}%
|
||||
\contentsline {section}{\numberline {21.2}Stability}{126}%
|
||||
\babel@toc {english}{}
|
||||
\contentsline {chapter}{\numberline {1}Lecture 1 - 09-03-2020}{7}%
|
||||
\contentsline {section}{\numberline {1.1}Introduction of the course}{7}%
|
||||
\contentsline {section}{\numberline {1.2}Examples}{7}%
|
||||
\contentsline {subsection}{\numberline {1.2.1}Spam filtering}{10}%
|
||||
\contentsline {chapter}{\numberline {2}Lecture 2 - 10-03-2020}{11}%
|
||||
\contentsline {section}{\numberline {2.1}Argomento}{11}%
|
||||
\contentsline {section}{\numberline {2.2}Loss}{11}%
|
||||
\contentsline {subsection}{\numberline {2.2.1}Absolute Loss}{11}%
|
||||
\contentsline {subsection}{\numberline {2.2.2}Square Loss}{12}%
|
||||
\contentsline {subsection}{\numberline {2.2.3}Example of information of square loss}{13}%
|
||||
\contentsline {subsection}{\numberline {2.2.4}labels and losses}{14}%
|
||||
\contentsline {subsection}{\numberline {2.2.5}Example TF(idf) documents encoding}{16}%
|
||||
\contentsline {chapter}{\numberline {3}Lecture 3 - 16-03-2020}{18}%
|
||||
\contentsline {section}{\numberline {3.1}Overfitting}{20}%
|
||||
\contentsline {subsection}{\numberline {3.1.1}Noise in the data}{20}%
|
||||
\contentsline {section}{\numberline {3.2}Underfitting}{22}%
|
||||
\contentsline {section}{\numberline {3.3}Nearest neighbour}{22}%
|
||||
\contentsline {chapter}{\numberline {4}Lecture 4 - 17-03-2020}{25}%
|
||||
\contentsline {section}{\numberline {4.1}Computing $h_{NN}$}{25}%
|
||||
\contentsline {section}{\numberline {4.2}Tree Predictor}{27}%
|
||||
\contentsline {chapter}{\numberline {5}Lecture 5 - 23-03-2020}{31}%
|
||||
\contentsline {section}{\numberline {5.1}Tree Classifier}{31}%
|
||||
\contentsline {section}{\numberline {5.2}Jensen’s inequality}{33}%
|
||||
\contentsline {section}{\numberline {5.3}Tree Predictor}{37}%
|
||||
\contentsline {section}{\numberline {5.4}Statistical model for Machine Learning}{38}%
|
||||
\contentsline {chapter}{\numberline {6}Lecture 6 - 24-03-2020}{40}%
|
||||
\contentsline {section}{\numberline {6.1}Bayes Optimal Predictor}{40}%
|
||||
\contentsline {subsection}{\numberline {6.1.1}Square Loss}{41}%
|
||||
\contentsline {subsection}{\numberline {6.1.2}Zero-one loss for binary classification}{42}%
|
||||
\contentsline {section}{\numberline {6.2}Bayes Risk}{45}%
|
||||
\contentsline {chapter}{\numberline {7}Lecture 7 - 30-03-2020}{47}%
|
||||
\contentsline {section}{\numberline {7.1}Chernoff-Hoffding bound}{47}%
|
||||
\contentsline {section}{\numberline {7.2}Union Bound}{48}%
|
||||
\contentsline {section}{\numberline {7.3}Studying overfitting of a ERM}{52}%
|
||||
\contentsline {chapter}{\numberline {8}Lecture 8 - 31-03-2020}{54}%
|
||||
\contentsline {section}{\numberline {8.1}The problem of estimating risk in practise}{55}%
|
||||
\contentsline {section}{\numberline {8.2}Cross-validation}{57}%
|
||||
\contentsline {section}{\numberline {8.3}Nested cross validation}{59}%
|
||||
\contentsline {chapter}{\numberline {9}Lecture 9 - 06-04-2020}{60}%
|
||||
\contentsline {section}{\numberline {9.1}Tree predictors}{60}%
|
||||
\contentsline {subsection}{\numberline {9.1.1}Catalan Number}{62}%
|
||||
\contentsline {chapter}{\numberline {10}Lecture 10 - 07-04-2020}{66}%
|
||||
\contentsline {section}{\numberline {10.1}TO BE DEFINE}{66}%
|
||||
\contentsline {section}{\numberline {10.2}MANCANO 20 MINUTI DI LEZIONE}{66}%
|
||||
\contentsline {section}{\numberline {10.3}Compare risk for zero-one loss}{68}%
|
||||
\contentsline {chapter}{\numberline {11}Lecture 11 - 20-04-2020}{70}%
|
||||
\contentsline {section}{\numberline {11.1}Analysis of $K_{NN}$}{70}%
|
||||
\contentsline {subsection}{\numberline {11.1.1}Study of $K_{NN}$}{73}%
|
||||
\contentsline {subsection}{\numberline {11.1.2}study of trees}{74}%
|
||||
\contentsline {section}{\numberline {11.2}Non-parametric Algorithms}{75}%
|
||||
\contentsline {subsection}{\numberline {11.2.1}Example of parametric algorithms}{76}%
|
||||
\contentsline {chapter}{\numberline {12}Lecture 12 - 21-04-2020}{77}%
|
||||
\contentsline {section}{\numberline {12.1}Non parametrics algorithms}{77}%
|
||||
\contentsline {subsection}{\numberline {12.1.1}Theorem: No free lunch}{77}%
|
||||
\contentsline {section}{\numberline {12.2}Highly Parametric Learning Algorithm}{79}%
|
||||
\contentsline {subsection}{\numberline {12.2.1}Linear Predictors}{79}%
|
||||
\contentsline {subsection}{\numberline {12.2.2}MinDisagreement}{83}%
|
||||
\contentsline {chapter}{\numberline {13}Lecture 13 - 27-04-2020}{84}%
|
||||
\contentsline {section}{\numberline {13.1}Linear prediction}{84}%
|
||||
\contentsline {subsection}{\numberline {13.1.1}MinDisOpt}{84}%
|
||||
\contentsline {section}{\numberline {13.2}The Perception Algorithm}{87}%
|
||||
\contentsline {subsection}{\numberline {13.2.1}Perception convergence Theorem}{88}%
|
||||
\contentsline {chapter}{\numberline {14}Lecture 14 - 28-04-2020}{91}%
|
||||
\contentsline {section}{\numberline {14.1}Linear Regression}{91}%
|
||||
\contentsline {subsection}{\numberline {14.1.1}The problem of linear regression}{91}%
|
||||
\contentsline {subsection}{\numberline {14.1.2}Ridge regression}{92}%
|
||||
\contentsline {section}{\numberline {14.2}Percetron}{93}%
|
||||
\contentsline {subsection}{\numberline {14.2.1}Online Learning }{94}%
|
||||
\contentsline {subsection}{\numberline {14.2.2}Online Gradiant Descent (OGD)}{96}%
|
||||
\contentsline {chapter}{\numberline {15}Lecture 15 - 04-05-2020}{97}%
|
||||
\contentsline {section}{\numberline {15.1}Regret analysis of OGD}{97}%
|
||||
\contentsline {subsection}{\numberline {15.1.1}Projected OGD}{98}%
|
||||
\contentsline {chapter}{\numberline {16}Lecture 16 - 05-05-2020}{102}%
|
||||
\contentsline {section}{\numberline {16.1}Analysis of Perceptron in the non-separable case using OGD framework.}{102}%
|
||||
\contentsline {subsection}{\numberline {16.1.1}Strongly convex loss functions}{106}%
|
||||
\contentsline {chapter}{\numberline {17}Lecture 17 - 11-05-2020}{108}%
|
||||
\contentsline {section}{\numberline {17.1}Strongly convex loss functions}{108}%
|
||||
\contentsline {subsection}{\numberline {17.1.1}OGD for Strongly Convex losses}{108}%
|
||||
\contentsline {subsection}{\numberline {17.1.2}Relate sequential risk and statistical risk}{109}%
|
||||
\contentsline {chapter}{\numberline {18}Lecture 18 - 12-05-2020}{112}%
|
||||
\contentsline {section}{\numberline {18.1}Kernel functions}{112}%
|
||||
\contentsline {subsection}{\numberline {18.1.1}Feature expansion}{112}%
|
||||
\contentsline {subsection}{\numberline {18.1.2}Kernels implements feature expansion (Efficiently}{113}%
|
||||
\contentsline {section}{\numberline {18.2}Gaussian Kernel}{114}%
|
||||
\contentsline {chapter}{\numberline {19}Lecture 19 - 18-05-2020}{117}%
|
||||
\contentsline {section}{\numberline {19.1}Support Vector Machine (SVM)}{120}%
|
||||
\contentsline {chapter}{\numberline {20}Lecture 20 - 19-05-2020}{121}%
|
||||
\contentsline {section}{\numberline {20.1}Support Vector Machine Analysis}{121}%
|
||||
\contentsline {subsection}{\numberline {20.1.1}Fritz John Optimality Conditions}{121}%
|
||||
\contentsline {subsection}{\numberline {20.1.2}Non-separable case}{122}%
|
||||
\contentsline {section}{\numberline {20.2}Pegasos: OGD to solve SVM}{124}%
|
||||
\contentsline {chapter}{\numberline {21}Lecture 21 - 25-05-2020}{126}%
|
||||
\contentsline {section}{\numberline {21.1}Pegasos in Kernel space}{126}%
|
||||
\contentsline {section}{\numberline {21.2}Stability}{126}%
|
||||
\contentsline {chapter}{\numberline {22}Lecture 22 - 26-05-2020}{131}%
|
||||
\contentsline {section}{\numberline {22.1}Continous of Pegasos}{131}%
|
||||
\contentsline {section}{\numberline {22.2}Boosting and ensemble predictors }{132}%
|
||||
\contentsline {subsection}{\numberline {22.2.1}Bagging}{134}%
|
||||
\contentsline {subsection}{\numberline {22.2.2}Random Forest}{134}%
|
||||
|
Loading…
x
Reference in New Issue
Block a user