completed lecture 6

This commit is contained in:
Andreaierardi 2020-04-13 15:18:17 +02:00
parent b5aa522402
commit 1a394639b2
13 changed files with 720 additions and 197 deletions

View File

@ -1,6 +1,10 @@
\relax
\@nameuse{bbl@beforestart}
\babel@aux{english}{}
\@writefile{toc}{\contentsline {chapter}{\numberline {1}6 - 07-04-2020}{1}\protected@file@percent }
\@writefile{toc}{\contentsline {chapter}{\numberline {1}Lecture 6 - 07-04-2020}{1}\protected@file@percent }
\@writefile{lof}{\addvspace {10\p@ }}
\@writefile{lot}{\addvspace {10\p@ }}
\@writefile{toc}{\contentsline {section}{\numberline {1.1}Bayes Optimal Predictor}{1}\protected@file@percent }
\@writefile{toc}{\contentsline {subsection}{\numberline {1.1.1}Square Loss}{2}\protected@file@percent }
\@writefile{toc}{\contentsline {subsection}{\numberline {1.1.2}Zero-one loss for binary classification}{3}\protected@file@percent }
\@writefile{toc}{\contentsline {section}{\numberline {1.2}Bayes Risk}{5}\protected@file@percent }

View File

@ -1,4 +1,4 @@
This is pdfTeX, Version 3.14159265-2.6-1.40.21 (MiKTeX 2.9.7300 64-bit) (preloaded format=pdflatex 2020.4.13) 13 APR 2020 12:29
This is pdfTeX, Version 3.14159265-2.6-1.40.21 (MiKTeX 2.9.7300 64-bit) (preloaded format=pdflatex 2020.4.13) 13 APR 2020 15:14
entering extended mode
**./lecture6.tex
(lecture6.tex
@ -238,7 +238,7 @@ File: l3backend-pdfmode.def 2020-03-12 L3 backend support: PDF mode
\l__kernel_color_stack_int=\count193
\l__pdf_internal_box=\box48
)
No file lecture6.aux.
(lecture6.aux)
\openout1 = `lecture6.aux'.
LaTeX Font Info: Checking defaults for OML/cmm/m/it on input line 2.
@ -255,7 +255,7 @@ LaTeX Font Info: Checking defaults for OMX/cmex/m/n on input line 2.
LaTeX Font Info: ... okay on input line 2.
LaTeX Font Info: Checking defaults for U/cmr/m/n on input line 2.
LaTeX Font Info: ... okay on input line 2.
("C:\Program Files\MiKTeX 2.9\tex/context/base/mkii\supp-pdf.mkii"
("C:\Program Files\MiKTeX 2.9\tex/context/base/mkii\supp-pdf.mkii"
[Loading MPS to PDF converter (version 2006.09.02).]
\scratchcounter=\count194
\scratchdimen=\dimen156
@ -308,25 +308,187 @@ G,.JBIG2,.JB2,.eps]
(grfext) \AppendGraphicsExtensions on input line 504.
)
Chapter 1.
LaTeX Font Info: Trying to load font information for U+msa on input line 5.
("C:\Program Files\MiKTeX 2.9\tex/latex/amsfonts\umsa.fd"
File: umsa.fd 2013/01/14 v3.01 AMS symbols A
)
LaTeX Font Info: Trying to load font information for U+msb on input line 5.
("C:\Program Files\MiKTeX 2.9\tex/latex/amsfonts\umsb.fd"
File: umsb.fd 2013/01/14 v3.01 AMS symbols B
)
Underfull \hbox (badness 10000) in paragraph at lines 5--17
[]
Underfull \hbox (badness 10000) in paragraph at lines 21--27
[]
Underfull \hbox (badness 10000) in paragraph at lines 40--46
[]
Underfull \hbox (badness 10000) in paragraph at lines 40--46
[]
Underfull \hbox (badness 10000) in paragraph at lines 48--52
[]
Underfull \hbox (badness 10000) in paragraph at lines 48--52
[]
[1
{C:/Users/AndreDany/AppData/Local/MiKTeX/2.9/pdftex/config/pdftex.map}]
(lecture6.aux) )
Underfull \hbox (badness 10000) in paragraph at lines 53--57
[]
Overfull \hbox (22.00105pt too wide) detected at line 62
[] \U/msb/m/n/12 E [] \OT1/cmr/m/n/12 = \U/msb/m/n/12 E\OT1/cmr/m/n/12 [\OML/cm
m/m/it/12 X\OT1/cmr/m/n/12 ] + \U/msb/m/n/12 E\OT1/cmr/m/n/12 [\OML/cmm/m/it/12
Y\OT1/cmr/m/n/12 ] = \OML/cmm/m/it/12 argmin \U/msb/m/n/12 E [] \OT1/cmr/m/n/1
2 =
[]
Underfull \hbox (badness 10000) in paragraph at lines 62--66
[]
Underfull \hbox (badness 10000) in paragraph at lines 81--85
[]
Underfull \hbox (badness 10000) in paragraph at lines 81--85
[]
Underfull \hbox (badness 10000) in paragraph at lines 87--93
[]
[2]
Underfull \hbox (badness 10000) in paragraph at lines 98--100
[]
Underfull \hbox (badness 10000) in paragraph at lines 111--115
[]
Overfull \hbox (13.10564pt too wide) detected at line 126
\OML/cmm/m/it/12 X \U/msa/m/n/12 v \OML/cmm/m/it/12 D[] [][]\OMS/cmsy/m/n/12 !
[][]
[]
Underfull \hbox (badness 10000) in paragraph at lines 132--150
[]
Underfull \hbox (badness 10000) in paragraph at lines 132--150
[]
[3]
Underfull \hbox (badness 10000) in paragraph at lines 165--169
[]
Overfull \hbox (55.32307pt too wide) detected at line 171
\OT1/cmr/m/n/12 = \OML/cmm/m/it/12 argmin \OT1/cmr/m/n/12 ( \OML/cmm/m/it/12 I\
OMS/cmsy/m/n/12 f[] \OT1/cmr/m/n/12 = 1\OMS/cmsy/m/n/12 g  \U/msb/m/n/12 E []
\OT1/cmr/m/n/12 + \OML/cmm/m/it/12 I\OMS/cmsy/m/n/12 f[] \OT1/cmr/m/n/12 = \OMS
/cmsy/m/n/12 \OT1/cmr/m/n/12 1\OMS/cmsy/m/n/12 g  \U/msb/m/n/12 E [] \OT1/cmr
/m/n/12 ) = [][]
[]
Underfull \hbox (badness 10000) in paragraph at lines 176--177
[]
Underfull \hbox (badness 10000) in paragraph at lines 179--185
[]
[4]
Underfull \hbox (badness 10000) in paragraph at lines 206--210
[]
LaTeX Warning: File `bayesrisk.jpg' not found on input line 220.
! Package pdftex.def Error: File `bayesrisk.jpg' not found: using draft setting
.
See the pdftex.def package documentation for explanation.
Type H <return> for immediate help.
...
l.220 \includegraphics{bayesrisk.jpg}
Try typing <return> to proceed.
If that doesn't work, type X <return> to quit.
LaTeX Font Info: Trying to load font information for T1+cmtt on input line 2
20.
("C:\Program Files\MiKTeX 2.9\tex/latex/base\t1cmtt.fd"
File: t1cmtt.fd 2019/12/16 v2.5j Standard LaTeX font definitions
) [5] (lecture6.aux) )
Here is how much of TeX's memory you used:
5023 strings out of 480934
67601 string characters out of 2909670
328093 words of memory out of 3000000
20808 multiletter control sequences out of 15000+200000
534946 words of font info for 28 fonts, out of 3000000 for 9000
5153 strings out of 480934
69770 string characters out of 2909670
334037 words of memory out of 3000000
20886 multiletter control sequences out of 15000+200000
547608 words of font info for 60 fonts, out of 3000000 for 9000
1141 hyphenation exceptions out of 8191
42i,5n,50p,332b,113s stack positions out of 5000i,500n,10000p,200000b,50000s
<C:\Users\AndreDany\AppData\Local\MiKTeX\2.9\fonts/pk/ljfour/j
knappen/ec/dpi600\ecrm1200.pk> <C:\Users\AndreDany\AppData\Local\MiKTeX\2.9\fon
ts/pk/ljfour/jknappen/ec/dpi600\ecbx2488.pk>
Output written on lecture6.pdf (1 page, 8852 bytes).
42i,7n,50p,332b,168s stack positions out of 5000i,500n,10000p,200000b,50000s
<C:\Users\AndreDany\AppData\Local\MiKTeX\2.9\fonts/pk/lj
four/jknappen/ec/dpi600\ectt1200.pk> <C:\Users\AndreDany\AppData\Local\MiKTeX\2
.9\fonts/pk/ljfour/jknappen/ec/dpi600\tcbx1200.pk> <C:\Users\AndreDany\AppData\
Local\MiKTeX\2.9\fonts/pk/ljfour/jknappen/ec/dpi600\ecbx1200.pk> <C:\Users\Andr
eDany\AppData\Local\MiKTeX\2.9\fonts/pk/ljfour/jknappen/ec/dpi600\tcrm1200.pk>
<C:\Users\AndreDany\AppData\Local\MiKTeX\2.9\fonts/pk/ljfour/jknappen/ec/dpi600
\ecbx1440.pk> <C:\Users\AndreDany\AppData\Local\MiKTeX\2.9\fonts/pk/ljfour/jkna
ppen/ec/dpi600\ecti1200.pk> <C:\Users\AndreDany\AppData\Local\MiKTeX\2.9\fonts/
pk/ljfour/jknappen/ec/dpi600\ecbx1728.pk> <C:\Users\AndreDany\AppData\Local\MiK
TeX\2.9\fonts/pk/ljfour/jknappen/ec/dpi600\ecrm1200.pk> <C:\Users\AndreDany\App
Data\Local\MiKTeX\2.9\fonts/pk/ljfour/jknappen/ec/dpi600\ecbx2488.pk><C:/Progra
m Files/MiKTeX 2.9/fonts/type1/public/amsfonts/cm/cmex10.pfb><C:/Program Files/
MiKTeX 2.9/fonts/type1/public/amsfonts/cm/cmmi12.pfb><C:/Program Files/MiKTeX 2
.9/fonts/type1/public/amsfonts/cm/cmmi8.pfb><C:/Program Files/MiKTeX 2.9/fonts/
type1/public/amsfonts/cm/cmr12.pfb><C:/Program Files/MiKTeX 2.9/fonts/type1/pub
lic/amsfonts/cm/cmr8.pfb><C:/Program Files/MiKTeX 2.9/fonts/type1/public/amsfon
ts/cm/cmsy10.pfb><C:/Program Files/MiKTeX 2.9/fonts/type1/public/amsfonts/cm/cm
sy8.pfb><C:/Program Files/MiKTeX 2.9/fonts/type1/public/amsfonts/symbols/msam10
.pfb><C:/Program Files/MiKTeX 2.9/fonts/type1/public/amsfonts/symbols/msbm10.pf
b>
Output written on lecture6.pdf (5 pages, 159989 bytes).
PDF statistics:
29 PDF objects out of 1000 (max. 8388607)
265 PDF objects out of 1000 (max. 8388607)
0 named destinations out of 1000 (max. 500000)
1 words of extra memory for PDF output out of 10000 (max. 10000000)

View File

@ -2,6 +2,225 @@
\begin{document}
\chapter{Lecture 6 - 07-04-2020}
$(X, Y)$ We random variables drawn iid from $D$ on $X \cdot Y$ $\longrightarrow$ where $D$ is fixed but unknown\\\\
Independence does not hold. We do not collect datapoints to an independent
process.\\
Example: identify new article and i want to put categories. The feed is highly
depend on what is happening in the world and there are some news highly
correlated. Why do we make an assumption that follows reality?
Is very convenient in mathematical term.
If you assume Independence you can make a lot of process in mathematical
term in making the algorithm.\\
If you have enough data they look independent enough. Statistical learning is
not the only way of analyse algorithms —> we will see in linear ML algorithm
and at the end you can use both statistical model s
\section{Bayes Optimal Predictor}
$$ f^* : X \rightarrow Y$$
$$ f^*(x) = argmin \, \barra{E}\left[ \, \ell(y,\hat{y})| X=x \, \right] \qquad \hat{y} \in Y$$
\\
In general $Y$ given $X$ has distribution $D_y|X=x$
\\
Clearly $\forall$ $h$ \quad $X\rightarrow Y$
\\
$$
\barra{E} \left[ \, \ell(y, f^*(x)) | X=x \, \right] \leq \barra{E}\left[ \, \ell(y,h(x)| X = x \, \right]
$$
$$
X,Y \qquad \barra{E} \left[ \, Y|X = x \, \right] = F(x) \quad \longrightarrow \red{Conditional Expectation}
$$
$$
\barra{E} \left[ \, \barra{E} \left[ \, Y|X \, \right] \, \right] = \barra{E}(Y)
$$
\\
Now take Expectation for distribution
$$
\barra{E} \left[ \, \ell(y, f^*(x))\, \right] \leq \left[ \, \barra{E} (\ell(y, h(x)) \, \right]
$$
\\ where \red{risk is smaller in $f^*$}
\\
I can look at the quantity before\\
$l_d$ Bayes risk $\longrightarrow$ Smallest possible risk given a learning problm
\\\\
$$
l_d(f^*) > 0 \qquad \textit{because y are still stochastic given X}
$$
\\
Learning problem can be complem $\rightarrow$ large risk
\\\\
\subsection{Square Loss}
$$\ell(y,\hat{y} = (y - \hat{y})^2$$
I want to compute bayes optimal predictor\\
$\hat{y}, y \in \barra{R}$
\\
$$
f^*(x) = argmin \, \barra{E} \left[ \, (y-\hat{y})^2 | X = x \, \right] = \qquad \hat{y} \in \barra{R}
$$\
$$
\textit{we use }\qquad \barra{E}\left[\,X+Y\,\right] = \barra{E}[X] + \barra{E}[Y] = argmin \, \barra{E}\left[\,\red{y^2} + \hat{y}^2- 2\cdot y \cdot \hat{y}^2 | X = x \, \right] =
$$
\\
Dropping $\red{y^2}$ i remove something that is not important for $\hat{y}$
\\
$$
= argmin ( \barra{E} \left[\, y^2 | X = x\, \right] + \hat{y}^2 - 2 \cdot \hat{y} \cdot \barra{E} \left[ \, y | X = x \, \right] ) =
$$
$$
= argmin (\hat{y}^2 - 2 \cdot \hat{y} \cdot \barra{E} \left[ \, y | X = x \, \right] ) =
$$
\\ Expectation is a number, so it's a \red{constant}
\\
Assume $ \boxdot = y^2 $
$$
argmin \, \left[\, \boxdot + \hat{y}^2 + 2 \cdot \hat{y} \cdot \barra{E} \left[\, Y|X =x\,\right] \right]
$$
where red{$G(\hat{y})$ is equal to the part between $\left[...\right]$}
$$
\frac{d G(\hat{y})}{d\hat{y}} = 2 \cdot \hat{y}- 2 \cdot \barra{E} \left[ \, y | X= x \, \right] = 0 \quad \longrightarrow \quad \red{\textit{So setting derivative to 0}}
$$
\\ --- DISEGNO OPT CURVE ---\\\\
$G' (\hat{y}) = \hat{y}^2 - 2\cdot b \cdot \hat{y}$
\\
$$
\hat{y} = \barra{E} \left[ \, y| X= x \, \right] \qquad f^*(x) = \barra{E} \left[ \, y | X = x \, \right]
$$
\\
Square loss is nice because expected prediction is ...\\
In order to predict the best possibile we have to estimate the value given data
point.
\\
$$
\barra{E} \left[ \, (y- f^*(x))^2 | X = x \, \right] =
$$
$$
= \barra{E} \left[ \, (y- \barra{E} \left[ \, y | X = x \,\right] )^2 | X = x \, \right] = Var \left[ \, Y | X = x \, \right]
$$
\\
\subsection{Zero-one loss for binary classification}
$ Y = \{-1,1\}
$
$$
\ell(y,\hat{y}) = I \{ \hat{y} \neq y \}
\qquad I_A (x) =
\begin{cases}
1 \quad x \in A
\\
0 \quad x \not\in A
\end{cases}
$$
\\
\red{If $\hat{y} \neq y$ true, indicator function will give us 1, otherwise it will give 0}
\\
$$
D \quad on \qquad X \cdot Y \qquad D_x^* \quad D_{y|x} = D
$$\
$$
D_x \qquad \eta: X \longrightarrow \left[ \, 0,1 \, \right] \qquad \eta = \barra{P} \,(y = 1 | X = x )
$$\
$$
D \leadsto (D_x, \eta) \quad \longrightarrow \quad \red{\textit{Distribution 0-1 loss}}
$$\
$$
X \backsim D_x \quad \longrightarrow \quad \red{ \textit{Where $\backsim$ mean "draw from" and $D_x$ is marginal distribution} }
$$
$$
Y = 1 \qquad \textit{ with probability } \eta(x)
$$\
$$
D_{y|x} = \{ \eta(x), 1- \eta(x) \}
$$
\\
Suppose we have a learning domain\\
--- DISEGNO --
\\
where $\eta$ is a function of $x$, so i can plot it\\
$\eta$ will te me $Prob (x) = $
\\
$\eta$ tells me a lot how hard is learning problem in the domain
\\
$\eta(x)$ is not necessary continous
\\
--- DISEGNO ---
\\\\
$\eta(x) \in \{0,1\} $ \qquad $y$ is always determined by $x$
\\
How to get $f^*$ from the graph?
\\
$$
f^+ : X \rightarrow \{-1,1\}
$$
$$
Y = \{-1, +1 \}
$$
--- DISEGNO ---\\
===============================\\
MANCA ROBAAAAAAAAAAAAAAAAAAAAAAAAAAAAA\\
==============================
$$
f^*(x) = argmin \, \barra{E} \left[ \, \ell(y, \hat{y}) | X= x\, \right] = \qquad \longrightarrow \hat{y} \in \{-1,+1 \}
$$
$$
= argmin \, \barra{E} \left[ \, I\{\hat{y} = 1\} \cdot I\{Y=-1\} + I\{\hat{y}=-1\} \cdot I\{y=1\} \, | \, X = x \, \right] =
$$
\\
we are splitting wrong cases
\\
$$
= argmin \, ( \, I\{\hat{y} = 1\} \cdot \barra{E} \left[ \, I\{Y=-1\} |\, X = x\, \right] + I\{\hat{y}=-1\} \cdot \barra{E} \left[ \, I\{y=1\} \, | \, X = x \, \right] \, ) = \quad \red{\divideontimes}
$$\\
We know that: $$ \barra{E} \left[ \, I \{y = -1 \} \, | \, X = x \, \right] = 1 \cdot \barra{P} \\ (\hat{y} = -1 | X = x ) + 0 \cdot \barra{P} (y = 1 | X= x) =
$$
$$
\barra{P} (x = -1 | X=x ) = \, \red{ 1- \eta(x) }
$$\\
$$
\red{\divideontimes} = argmin \, ( \, \col{I\{\hat{y} = 1\} \cdot (1 - \eta(x))}{Blue} + \col{I \{ \hat{y} = -1\} \cdot (\eta(x)}{Orange} \, )
$$
where \col{Blue}{Blue} colored $I \{...\} = 1$° and \col{Orange}{Orange} $I \{...\} = 2$°
\\\\
I have to choose \red{-1 or +1 } so we will \textbf{remove one of the two (1° or 2°) }
\\
It depend on $\eta(x)$:
\begin{itemize}
\item If $\eta(x) < \frac{1}{2}$ \quad $\longrightarrow$ \quad kill 1°
\item Else $\eta(x) \geq \frac{1}{2}$ \quad $\longrightarrow$ \quad kill 2°
\end{itemize}
$$
f^*(x) =
\begin{cases}
+1 \qquad if \, \eta(x) \geq \frac{1}{2}\\
-1 \qquad if \, \eta(x) < \frac{1}{2}
\end{cases}
$$
\section{Bayes Risk}
$$
\barra{E} \left[ \, I \{ y \neq f^*(x) \}\, | \, X = x \, \right] = \barra{P}(y \neq f^*(x)|X= x)
$$\
$$
\eta(x) \geq \frac{1}{2} \quad \Rightarrow \quad \hat{y} = 1 \quad \Rightarrow \quad \barra{P} (y \neq 1 | X= x) = 1-\eta(x)
$$\
$$
\eta(x) < \frac{1}{2} \quad \Rightarrow \quad \hat{y} = -1 \quad \Rightarrow \quad \barra{P} (y \neq 1 | X= x) = \eta(x) \quad
$$
\\
Conditiona risk for 0-1 loss is:
\\
$$
\barra{E} \left[ \, \ell (y, f^*(x)) \, | \, X = x \, \right]
\quad = \quad I \{ \eta(x) \geq \frac{1}{2}\} \cdot(1-\eta(x)) + I \{ \eta(x) <\frac{1}{2}\} \cdot \eta(x) =
$$
$$
= min \, \{ \eta(x), 1- \eta(x) \}
$$\
$$
\barra{E} \left[ \, \ell , f^*(x) \, \right] = \barra{E} \left[ \, min \, \{ \eta(x) , 1- \eta(x) \} \, \right]
$$
\includegraphics{bayesrisk.jpg}
\\
Conditional risk will be high aroun the half so min between the two is around
the half since the labels are random i will get an error near $50\%$.\\
My condition risk will be 0 in the region in the bottom since label are going to
be deterministic.
\end{document}

View File

@ -1,54 +1,58 @@
\relax
\@nameuse{bbl@beforestart}
\babel@aux{english}{}
\@writefile{toc}{\contentsline {chapter}{\numberline {1}Lecture 1 - 09-03-2020}{2}\protected@file@percent }
\@writefile{toc}{\contentsline {chapter}{\numberline {1}Lecture 1 - 09-03-2020}{3}\protected@file@percent }
\@writefile{lof}{\addvspace {10\p@ }}
\@writefile{lot}{\addvspace {10\p@ }}
\@writefile{toc}{\contentsline {section}{\numberline {1.1}Introduction}{2}\protected@file@percent }
\@writefile{toc}{\contentsline {chapter}{\numberline {2}Lecture 2 - 07-04-2020}{5}\protected@file@percent }
\@writefile{toc}{\contentsline {section}{\numberline {1.1}Introduction}{3}\protected@file@percent }
\@writefile{toc}{\contentsline {chapter}{\numberline {2}Lecture 2 - 07-04-2020}{6}\protected@file@percent }
\@writefile{lof}{\addvspace {10\p@ }}
\@writefile{lot}{\addvspace {10\p@ }}
\@writefile{toc}{\contentsline {section}{\numberline {2.1}Argomento}{5}\protected@file@percent }
\@writefile{toc}{\contentsline {section}{\numberline {2.2}Loss}{5}\protected@file@percent }
\@writefile{toc}{\contentsline {subsection}{\numberline {2.2.1}Absolute Loss}{5}\protected@file@percent }
\@writefile{toc}{\contentsline {subsection}{\numberline {2.2.2}Square Loss}{6}\protected@file@percent }
\@writefile{toc}{\contentsline {subsection}{\numberline {2.2.3}Example of information of square loss}{6}\protected@file@percent }
\@writefile{toc}{\contentsline {subsection}{\numberline {2.2.4}labels and losses}{7}\protected@file@percent }
\@writefile{toc}{\contentsline {subsection}{\numberline {2.2.5}Example TF(idf) documents encoding}{9}\protected@file@percent }
\@writefile{toc}{\contentsline {chapter}{\numberline {3}Lecture 3 - 07-04-2020}{11}\protected@file@percent }
\@writefile{toc}{\contentsline {section}{\numberline {2.1}Argomento}{6}\protected@file@percent }
\@writefile{toc}{\contentsline {section}{\numberline {2.2}Loss}{6}\protected@file@percent }
\@writefile{toc}{\contentsline {subsection}{\numberline {2.2.1}Absolute Loss}{6}\protected@file@percent }
\@writefile{toc}{\contentsline {subsection}{\numberline {2.2.2}Square Loss}{7}\protected@file@percent }
\@writefile{toc}{\contentsline {subsection}{\numberline {2.2.3}Example of information of square loss}{7}\protected@file@percent }
\@writefile{toc}{\contentsline {subsection}{\numberline {2.2.4}labels and losses}{8}\protected@file@percent }
\@writefile{toc}{\contentsline {subsection}{\numberline {2.2.5}Example TF(idf) documents encoding}{10}\protected@file@percent }
\@writefile{toc}{\contentsline {chapter}{\numberline {3}Lecture 3 - 07-04-2020}{12}\protected@file@percent }
\@writefile{lof}{\addvspace {10\p@ }}
\@writefile{lot}{\addvspace {10\p@ }}
\@writefile{toc}{\contentsline {section}{\numberline {3.1}Overfitting}{13}\protected@file@percent }
\@writefile{toc}{\contentsline {subsection}{\numberline {3.1.1}Noise in the data}{13}\protected@file@percent }
\@writefile{toc}{\contentsline {section}{\numberline {3.2}Underfitting}{14}\protected@file@percent }
\@writefile{toc}{\contentsline {section}{\numberline {3.3}Nearest neighbour}{15}\protected@file@percent }
\@writefile{toc}{\contentsline {chapter}{\numberline {4}Lecture 4 - 07-04-2020}{17}\protected@file@percent }
\@writefile{toc}{\contentsline {section}{\numberline {3.1}Overfitting}{14}\protected@file@percent }
\@writefile{toc}{\contentsline {subsection}{\numberline {3.1.1}Noise in the data}{14}\protected@file@percent }
\@writefile{toc}{\contentsline {section}{\numberline {3.2}Underfitting}{15}\protected@file@percent }
\@writefile{toc}{\contentsline {section}{\numberline {3.3}Nearest neighbour}{16}\protected@file@percent }
\@writefile{toc}{\contentsline {chapter}{\numberline {4}Lecture 4 - 07-04-2020}{18}\protected@file@percent }
\@writefile{lof}{\addvspace {10\p@ }}
\@writefile{lot}{\addvspace {10\p@ }}
\@writefile{toc}{\contentsline {section}{\numberline {4.1}Computing $h_{NN}$}{17}\protected@file@percent }
\@writefile{toc}{\contentsline {section}{\numberline {4.2}Tree Predictor}{18}\protected@file@percent }
\@writefile{toc}{\contentsline {chapter}{\numberline {5}Lecture 5 - 07-04-2020}{21}\protected@file@percent }
\@writefile{toc}{\contentsline {section}{\numberline {4.1}Computing $h_{NN}$}{18}\protected@file@percent }
\@writefile{toc}{\contentsline {section}{\numberline {4.2}Tree Predictor}{19}\protected@file@percent }
\@writefile{toc}{\contentsline {chapter}{\numberline {5}Lecture 5 - 07-04-2020}{22}\protected@file@percent }
\@writefile{lof}{\addvspace {10\p@ }}
\@writefile{lot}{\addvspace {10\p@ }}
\@writefile{toc}{\contentsline {section}{\numberline {5.1}Tree Classifier}{21}\protected@file@percent }
\@writefile{toc}{\contentsline {section}{\numberline {5.2}Jensens inequality}{22}\protected@file@percent }
\@writefile{toc}{\contentsline {section}{\numberline {5.3}Tree Predictor}{24}\protected@file@percent }
\@writefile{toc}{\contentsline {section}{\numberline {5.4}Statistical model for Machine Learning}{25}\protected@file@percent }
\@writefile{toc}{\contentsline {chapter}{\numberline {6}Lecture 6 - 07-04-2020}{27}\protected@file@percent }
\@writefile{toc}{\contentsline {section}{\numberline {5.1}Tree Classifier}{22}\protected@file@percent }
\@writefile{toc}{\contentsline {section}{\numberline {5.2}Jensens inequality}{23}\protected@file@percent }
\@writefile{toc}{\contentsline {section}{\numberline {5.3}Tree Predictor}{25}\protected@file@percent }
\@writefile{toc}{\contentsline {section}{\numberline {5.4}Statistical model for Machine Learning}{26}\protected@file@percent }
\@writefile{toc}{\contentsline {chapter}{\numberline {6}Lecture 6 - 07-04-2020}{28}\protected@file@percent }
\@writefile{lof}{\addvspace {10\p@ }}
\@writefile{lot}{\addvspace {10\p@ }}
\@writefile{toc}{\contentsline {chapter}{\numberline {7}Lecture 7 - 07-04-2020}{28}\protected@file@percent }
\@writefile{toc}{\contentsline {section}{\numberline {6.1}Bayes Optimal Predictor}{28}\protected@file@percent }
\@writefile{toc}{\contentsline {subsection}{\numberline {6.1.1}Square Loss}{29}\protected@file@percent }
\@writefile{toc}{\contentsline {subsection}{\numberline {6.1.2}Zero-one loss for binary classification}{30}\protected@file@percent }
\@writefile{toc}{\contentsline {section}{\numberline {6.2}Bayes Risk}{32}\protected@file@percent }
\@writefile{toc}{\contentsline {chapter}{\numberline {7}Lecture 7 - 07-04-2020}{33}\protected@file@percent }
\@writefile{lof}{\addvspace {10\p@ }}
\@writefile{lot}{\addvspace {10\p@ }}
\@writefile{toc}{\contentsline {chapter}{\numberline {8}Lecture 8 - 07-04-2020}{29}\protected@file@percent }
\@writefile{toc}{\contentsline {chapter}{\numberline {8}Lecture 8 - 07-04-2020}{34}\protected@file@percent }
\@writefile{lof}{\addvspace {10\p@ }}
\@writefile{lot}{\addvspace {10\p@ }}
\@writefile{toc}{\contentsline {chapter}{\numberline {9}Lecture 9 - 07-04-2020}{30}\protected@file@percent }
\@writefile{toc}{\contentsline {chapter}{\numberline {9}Lecture 9 - 07-04-2020}{35}\protected@file@percent }
\@writefile{lof}{\addvspace {10\p@ }}
\@writefile{lot}{\addvspace {10\p@ }}
\@writefile{toc}{\contentsline {chapter}{\numberline {10}Lecture 10 - 07-04-2020}{31}\protected@file@percent }
\@writefile{toc}{\contentsline {chapter}{\numberline {10}Lecture 10 - 07-04-2020}{36}\protected@file@percent }
\@writefile{lof}{\addvspace {10\p@ }}
\@writefile{lot}{\addvspace {10\p@ }}
\@writefile{toc}{\contentsline {section}{\numberline {10.1}TO BE DEFINE}{31}\protected@file@percent }
\@writefile{toc}{\contentsline {section}{\numberline {10.1}TO BE DEFINE}{36}\protected@file@percent }
\bibstyle{abbrv}
\bibdata{main}

View File

@ -1,4 +1,4 @@
This is pdfTeX, Version 3.14159265-2.6-1.40.21 (MiKTeX 2.9.7300 64-bit) (preloaded format=pdflatex 2020.4.13) 13 APR 2020 12:51
This is pdfTeX, Version 3.14159265-2.6-1.40.21 (MiKTeX 2.9.7300 64-bit) (preloaded format=pdflatex 2020.4.13) 13 APR 2020 15:16
entering extended mode
**./main.tex
(main.tex
@ -325,7 +325,7 @@ File: umsb.fd 2013/01/14 v3.01 AMS symbols B
[1
]
] [2]
(lectures/lecture1.tex
Chapter 1.
@ -338,7 +338,7 @@ Underfull \hbox (badness 10000) in paragraph at lines 8--95
[]
[2
[3
]
Underfull \hbox (badness 10000) in paragraph at lines 98--138
@ -355,7 +355,7 @@ Underfull \hbox (badness 10000) in paragraph at lines 98--138
[]
[3]) [4] (lectures/lecture2.tex
[4]) [5] (lectures/lecture2.tex
Chapter 2.
Underfull \hbox (badness 10000) in paragraph at lines 7--16
@ -377,7 +377,7 @@ Underfull \hbox (badness 10000) in paragraph at lines 32--35
[]
[5
[6
]
Underfull \hbox (badness 10000) in paragraph at lines 49--52
@ -394,7 +394,7 @@ Underfull \hbox (badness 10000) in paragraph at lines 63--81
[]
[6]
[7]
Underfull \hbox (badness 10000) in paragraph at lines 81--86
[]
@ -419,37 +419,37 @@ Underfull \hbox (badness 10000) in paragraph at lines 110--115
[]
[7]
Underfull \hbox (badness 10000) in paragraph at lines 118--156
[]
Underfull \hbox (badness 10000) in paragraph at lines 118--156
[]
Underfull \hbox (badness 10000) in paragraph at lines 118--156
[]
Underfull \hbox (badness 10000) in paragraph at lines 118--156
[]
Underfull \hbox (badness 10000) in paragraph at lines 118--156
[]
Underfull \hbox (badness 10000) in paragraph at lines 118--156
[]
[8]
Underfull \hbox (badness 10000) in paragraph at lines 118--156
[]
Underfull \hbox (badness 10000) in paragraph at lines 118--156
[]
Underfull \hbox (badness 10000) in paragraph at lines 118--156
[]
Underfull \hbox (badness 10000) in paragraph at lines 118--156
[]
Underfull \hbox (badness 10000) in paragraph at lines 118--156
[]
Underfull \hbox (badness 10000) in paragraph at lines 118--156
[]
[9]
Underfull \hbox (badness 10000) in paragraph at lines 164--171
[]
@ -459,12 +459,12 @@ Underfull \hbox (badness 10000) in paragraph at lines 172--182
[]
[9]
[10]
Underfull \hbox (badness 10000) in paragraph at lines 189--199
[]
) [10] (lectures/lecture3.tex
) [11] (lectures/lecture3.tex
Chapter 3.
Underfull \hbox (badness 10000) in paragraph at lines 5--7
@ -521,7 +521,7 @@ Underfull \hbox (badness 10000) in paragraph at lines 28--35
[]
[11
[12
]
Underfull \hbox (badness 10000) in paragraph at lines 48--54
@ -548,7 +548,7 @@ Underfull \hbox (badness 10000) in paragraph at lines 78--86
[]
[12]
[13]
Underfull \hbox (badness 10000) in paragraph at lines 89--96
[]
@ -593,7 +593,7 @@ Underfull \hbox (badness 10000) in paragraph at lines 110--138
[]
[13]
[14]
Underfull \hbox (badness 10000) in paragraph at lines 144--160
[]
@ -609,7 +609,7 @@ Overfull \hbox (36.23656pt too wide) detected at line 170
[]
[]
[14]
[15]
Underfull \hbox (badness 10000) in paragraph at lines 177--179
[]
@ -644,12 +644,12 @@ Underfull \hbox (badness 10000) in paragraph at lines 187--223
[]
[15]
[16]
Underfull \hbox (badness 10000) in paragraph at lines 225--226
[]
) [16] (lectures/lecture4.tex
) [17] (lectures/lecture4.tex
Chapter 4.
Underfull \hbox (badness 10000) in paragraph at lines 10--14
@ -686,7 +686,7 @@ Underfull \hbox (badness 10000) in paragraph at lines 42--66
[]
[17
[18
]
Underfull \hbox (badness 10000) in paragraph at lines 68--70
@ -698,7 +698,7 @@ Underfull \hbox (badness 10000) in paragraph at lines 73--88
[]
[18]
[19]
Underfull \hbox (badness 10000) in paragraph at lines 93--99
[]
@ -753,7 +753,7 @@ Underfull \hbox (badness 10000) in paragraph at lines 142--93
[]
[19] [20] (lectures/lecture5.tex
[20] [21] (lectures/lecture5.tex
Chapter 5.
Underfull \hbox (badness 10000) in paragraph at lines 8--15
@ -790,7 +790,7 @@ Underfull \hbox (badness 10000) in paragraph at lines 55--67
[]
[21
[22
]
Underfull \hbox (badness 10000) in paragraph at lines 69--71
@ -862,7 +862,7 @@ Underfull \hbox (badness 10000) in paragraph at lines 104--131
[]
[22]
[23]
Underfull \hbox (badness 10000) in paragraph at lines 134--136
[]
@ -892,43 +892,43 @@ Underfull \hbox (badness 10000) in paragraph at lines 157--162
[]
[23]
Underfull \hbox (badness 10000) in paragraph at lines 172--178
[]
Underfull \hbox (badness 10000) in paragraph at lines 172--178
[]
LaTeX Warning: Command \textquoteright invalid in math mode on input line 190.
LaTeX Warning: Command \textquoteright invalid in math mode on input line 190.
Underfull \hbox (badness 10000) in paragraph at lines 183--194
[]
Underfull \hbox (badness 10000) in paragraph at lines 183--194
[]
Underfull \hbox (badness 10000) in paragraph at lines 204--214
[]
Underfull \hbox (badness 10000) in paragraph at lines 204--214
[]
[24]
Underfull \hbox (badness 10000) in paragraph at lines 172--178
[]
Underfull \hbox (badness 10000) in paragraph at lines 172--178
[]
LaTeX Warning: Command \textquoteright invalid in math mode on input line 190.
LaTeX Warning: Command \textquoteright invalid in math mode on input line 190.
Underfull \hbox (badness 10000) in paragraph at lines 183--194
[]
Underfull \hbox (badness 10000) in paragraph at lines 183--194
[]
Underfull \hbox (badness 10000) in paragraph at lines 204--214
[]
Underfull \hbox (badness 10000) in paragraph at lines 204--214
[]
[25]
Underfull \hbox (badness 10000) in paragraph at lines 216--222
[]
@ -938,21 +938,149 @@ Underfull \hbox (badness 10000) in paragraph at lines 216--222
[]
) [25] [26] (lectures/lecture6.tex
) [26] [27] (lectures/lecture6.tex
Chapter 6.
) [27
] (lectures/lecture7.tex
Underfull \hbox (badness 10000) in paragraph at lines 5--17
[]
Underfull \hbox (badness 10000) in paragraph at lines 21--27
[]
Underfull \hbox (badness 10000) in paragraph at lines 40--46
[]
Underfull \hbox (badness 10000) in paragraph at lines 40--46
[]
Underfull \hbox (badness 10000) in paragraph at lines 48--52
[]
Underfull \hbox (badness 10000) in paragraph at lines 48--52
[]
[28
]
Underfull \hbox (badness 10000) in paragraph at lines 53--57
[]
Overfull \hbox (22.00105pt too wide) detected at line 62
[] \U/msb/m/n/12 E [] \OT1/cmr/m/n/12 = \U/msb/m/n/12 E\OT1/cmr/m/n/12 [\OML/cm
m/m/it/12 X\OT1/cmr/m/n/12 ] + \U/msb/m/n/12 E\OT1/cmr/m/n/12 [\OML/cmm/m/it/12
Y\OT1/cmr/m/n/12 ] = \OML/cmm/m/it/12 argmin \U/msb/m/n/12 E [] \OT1/cmr/m/n/1
2 =
[]
Underfull \hbox (badness 10000) in paragraph at lines 62--66
[]
Underfull \hbox (badness 10000) in paragraph at lines 81--85
[]
Underfull \hbox (badness 10000) in paragraph at lines 81--85
[]
Underfull \hbox (badness 10000) in paragraph at lines 87--93
[]
[29]
Underfull \hbox (badness 10000) in paragraph at lines 98--100
[]
Underfull \hbox (badness 10000) in paragraph at lines 111--115
[]
Overfull \hbox (13.10564pt too wide) detected at line 126
\OML/cmm/m/it/12 X \U/msa/m/n/12 v \OML/cmm/m/it/12 D[] [][]\OMS/cmsy/m/n/12 !
[][]
[]
Underfull \hbox (badness 10000) in paragraph at lines 132--150
[]
Underfull \hbox (badness 10000) in paragraph at lines 132--150
[]
[30]
Underfull \hbox (badness 10000) in paragraph at lines 165--169
[]
Overfull \hbox (55.32307pt too wide) detected at line 171
\OT1/cmr/m/n/12 = \OML/cmm/m/it/12 argmin \OT1/cmr/m/n/12 ( \OML/cmm/m/it/12 I\
OMS/cmsy/m/n/12 f[] \OT1/cmr/m/n/12 = 1\OMS/cmsy/m/n/12 g  \U/msb/m/n/12 E []
\OT1/cmr/m/n/12 + \OML/cmm/m/it/12 I\OMS/cmsy/m/n/12 f[] \OT1/cmr/m/n/12 = \OMS
/cmsy/m/n/12 \OT1/cmr/m/n/12 1\OMS/cmsy/m/n/12 g  \U/msb/m/n/12 E [] \OT1/cmr
/m/n/12 ) = [][]
[]
Underfull \hbox (badness 10000) in paragraph at lines 176--177
[]
Underfull \hbox (badness 10000) in paragraph at lines 179--185
[]
[31]
Underfull \hbox (badness 10000) in paragraph at lines 206--210
[]
<./img/bayesrisk.jpg, id=130, 496.10344pt x 237.13594pt>
File: ./img/bayesrisk.jpg Graphic file (type jpg)
<use ./img/bayesrisk.jpg>
Package pdftex.def Info: ./img/bayesrisk.jpg used on input line 220.
(pdftex.def) Requested size: 496.10222pt x 237.13535pt.
)
Overfull \hbox (106.10222pt too wide) in paragraph at lines 219--101
[]
[]
[32 <./img/bayesrisk.jpg>] (lectures/lecture7.tex
Chapter 7.
) [28
) [33
] (lectures/lecture8.tex
Chapter 8.
) [29
) [34
] (lectures/lecture9.tex
Chapter 9.
) [30
) [35
] (lectures/lecture10.tex
Chapter 10.
@ -984,7 +1112,7 @@ Underfull \hbox (badness 10000) in paragraph at lines 28--34
[]
[31
[36
]
Underfull \hbox (badness 10000) in paragraph at lines 37--50
@ -1031,46 +1159,48 @@ Underfull \hbox (badness 10000) in paragraph at lines 63--96
[]
[32]) [33] (main.bbl
[37]) [38] (main.bbl
LaTeX Warning: Empty `thebibliography' environment on input line 3.
) [34
) [39
] (main.aux) )
Here is how much of TeX's memory you used:
5150 strings out of 480934
71185 string characters out of 2909670
5163 strings out of 480934
71375 string characters out of 2909670
339202 words of memory out of 3000000
20865 multiletter control sequences out of 15000+200000
552486 words of font info for 70 fonts, out of 3000000 for 9000
20874 multiletter control sequences out of 15000+200000
553472 words of font info for 74 fonts, out of 3000000 for 9000
1141 hyphenation exceptions out of 8191
34i,11n,42p,309b,280s stack positions out of 5000i,500n,10000p,200000b,50000s
<C:\Users\AndreDany\AppData\Local\MiKTeX\2.9\fonts/pk/ljfour/jkn
appen/ec/dpi600\tcti1200.pk> <C:\Users\AndreDany\AppData\Local\MiKTeX\2.9\fonts
/pk/ljfour/jknappen/ec/dpi600\ecti1200.pk> <C:\Users\AndreDany\AppData\Local\Mi
KTeX\2.9\fonts/pk/ljfour/jknappen/ec/dpi600\ecbx1440.pk> <C:\Users\AndreDany\Ap
pData\Local\MiKTeX\2.9\fonts/pk/ljfour/jknappen/ec/dpi600\tcrm1200.pk> <C:\User
s\AndreDany\AppData\Local\MiKTeX\2.9\fonts/pk/ljfour/jknappen/ec/dpi600\ecbx172
8.pk> <C:\Users\AndreDany\AppData\Local\MiKTeX\2.9\fonts/pk/ljfour/jknappen/ec/
dpi600\ecrm1200.pk> <C:\Users\AndreDany\AppData\Local\MiKTeX\2.9\fonts/pk/ljfou
r/jknappen/ec/dpi600\ecbx1200.pk> <C:\Users\AndreDany\AppData\Local\MiKTeX\2.9\
fonts/pk/ljfour/jknappen/ec/dpi600\ecbx2074.pk> <C:\Users\AndreDany\AppData\Loc
al\MiKTeX\2.9\fonts/pk/ljfour/jknappen/ec/dpi600\ecrm2074.pk> <C:\Users\AndreDa
ny\AppData\Local\MiKTeX\2.9\fonts/pk/ljfour/jknappen/ec/dpi600\ecbx2488.pk><C:/
Program Files/MiKTeX 2.9/fonts/type1/public/amsfonts/cm/cmex10.pfb><C:/Program
Files/MiKTeX 2.9/fonts/type1/public/amsfonts/cm/cmmi12.pfb><C:/Program Files/Mi
KTeX 2.9/fonts/type1/public/amsfonts/cm/cmmi6.pfb><C:/Program Files/MiKTeX 2.9/
fonts/type1/public/amsfonts/cm/cmmi8.pfb><C:/Program Files/MiKTeX 2.9/fonts/typ
e1/public/amsfonts/cm/cmr12.pfb><C:/Program Files/MiKTeX 2.9/fonts/type1/public
/amsfonts/cm/cmr6.pfb><C:/Program Files/MiKTeX 2.9/fonts/type1/public/amsfonts/
cm/cmr8.pfb><C:/Program Files/MiKTeX 2.9/fonts/type1/public/amsfonts/cm/cmsy10.
pfb><C:/Program Files/MiKTeX 2.9/fonts/type1/public/amsfonts/cm/cmsy6.pfb><C:/P
rogram Files/MiKTeX 2.9/fonts/type1/public/amsfonts/cm/cmsy8.pfb><C:/Program Fi
les/MiKTeX 2.9/fonts/type1/public/amsfonts/symbols/msbm10.pfb>
Output written on main.pdf (35 pages, 1250492 bytes).
appen/ec/dpi600\tcbx1200.pk> <C:\Users\AndreDany\AppData\Local\MiKTeX\2.9\fonts
/pk/ljfour/jknappen/ec/dpi600\tcti1200.pk> <C:\Users\AndreDany\AppData\Local\Mi
KTeX\2.9\fonts/pk/ljfour/jknappen/ec/dpi600\ecti1200.pk> <C:\Users\AndreDany\Ap
pData\Local\MiKTeX\2.9\fonts/pk/ljfour/jknappen/ec/dpi600\ecbx1440.pk> <C:\User
s\AndreDany\AppData\Local\MiKTeX\2.9\fonts/pk/ljfour/jknappen/ec/dpi600\tcrm120
0.pk> <C:\Users\AndreDany\AppData\Local\MiKTeX\2.9\fonts/pk/ljfour/jknappen/ec/
dpi600\ecbx1728.pk> <C:\Users\AndreDany\AppData\Local\MiKTeX\2.9\fonts/pk/ljfou
r/jknappen/ec/dpi600\ecrm1200.pk> <C:\Users\AndreDany\AppData\Local\MiKTeX\2.9\
fonts/pk/ljfour/jknappen/ec/dpi600\ecbx1200.pk> <C:\Users\AndreDany\AppData\Loc
al\MiKTeX\2.9\fonts/pk/ljfour/jknappen/ec/dpi600\ecbx2074.pk> <C:\Users\AndreDa
ny\AppData\Local\MiKTeX\2.9\fonts/pk/ljfour/jknappen/ec/dpi600\ecrm2074.pk> <C:
\Users\AndreDany\AppData\Local\MiKTeX\2.9\fonts/pk/ljfour/jknappen/ec/dpi600\ec
bx2488.pk><C:/Program Files/MiKTeX 2.9/fonts/type1/public/amsfonts/cm/cmex10.pf
b><C:/Program Files/MiKTeX 2.9/fonts/type1/public/amsfonts/cm/cmmi12.pfb><C:/Pr
ogram Files/MiKTeX 2.9/fonts/type1/public/amsfonts/cm/cmmi6.pfb><C:/Program Fil
es/MiKTeX 2.9/fonts/type1/public/amsfonts/cm/cmmi8.pfb><C:/Program Files/MiKTeX
2.9/fonts/type1/public/amsfonts/cm/cmr12.pfb><C:/Program Files/MiKTeX 2.9/font
s/type1/public/amsfonts/cm/cmr6.pfb><C:/Program Files/MiKTeX 2.9/fonts/type1/pu
blic/amsfonts/cm/cmr8.pfb><C:/Program Files/MiKTeX 2.9/fonts/type1/public/amsfo
nts/cm/cmsy10.pfb><C:/Program Files/MiKTeX 2.9/fonts/type1/public/amsfonts/cm/c
msy6.pfb><C:/Program Files/MiKTeX 2.9/fonts/type1/public/amsfonts/cm/cmsy8.pfb>
<C:/Program Files/MiKTeX 2.9/fonts/type1/public/amsfonts/symbols/msam10.pfb><C:
/Program Files/MiKTeX 2.9/fonts/type1/public/amsfonts/symbols/msbm10.pfb>
Output written on main.pdf (40 pages, 1301652 bytes).
PDF statistics:
531 PDF objects out of 1000 (max. 8388607)
570 PDF objects out of 1000 (max. 8388607)
0 named destinations out of 1000 (max. 500000)
11 words of extra memory for PDF output out of 10000 (max. 10000000)
16 words of extra memory for PDF output out of 10000 (max. 10000000)

View File

@ -36,7 +36,7 @@
{\normalfont\bfseries}{}{0pt}{\Huge\color{Blue}}
\titlespacing*{\chapter}{0pt}{-80pt}{40pt}
\chapterfont{\color{Blue}}
\sectionfont{\color{GoodGreen}}
\sectionfont{\color{DarkGreen}}
\subsectionfont{\color{BrickRed}}

View File

@ -1,30 +1,34 @@
\babel@toc {english}{}
\contentsline {chapter}{\numberline {1}Lecture 1 - 09-03-2020}{2}%
\contentsline {section}{\numberline {1.1}Introduction}{2}%
\contentsline {chapter}{\numberline {2}Lecture 2 - 07-04-2020}{5}%
\contentsline {section}{\numberline {2.1}Argomento}{5}%
\contentsline {section}{\numberline {2.2}Loss}{5}%
\contentsline {subsection}{\numberline {2.2.1}Absolute Loss}{5}%
\contentsline {subsection}{\numberline {2.2.2}Square Loss}{6}%
\contentsline {subsection}{\numberline {2.2.3}Example of information of square loss}{6}%
\contentsline {subsection}{\numberline {2.2.4}labels and losses}{7}%
\contentsline {subsection}{\numberline {2.2.5}Example TF(idf) documents encoding}{9}%
\contentsline {chapter}{\numberline {3}Lecture 3 - 07-04-2020}{11}%
\contentsline {section}{\numberline {3.1}Overfitting}{13}%
\contentsline {subsection}{\numberline {3.1.1}Noise in the data}{13}%
\contentsline {section}{\numberline {3.2}Underfitting}{14}%
\contentsline {section}{\numberline {3.3}Nearest neighbour}{15}%
\contentsline {chapter}{\numberline {4}Lecture 4 - 07-04-2020}{17}%
\contentsline {section}{\numberline {4.1}Computing $h_{NN}$}{17}%
\contentsline {section}{\numberline {4.2}Tree Predictor}{18}%
\contentsline {chapter}{\numberline {5}Lecture 5 - 07-04-2020}{21}%
\contentsline {section}{\numberline {5.1}Tree Classifier}{21}%
\contentsline {section}{\numberline {5.2}Jensens inequality}{22}%
\contentsline {section}{\numberline {5.3}Tree Predictor}{24}%
\contentsline {section}{\numberline {5.4}Statistical model for Machine Learning}{25}%
\contentsline {chapter}{\numberline {6}Lecture 6 - 07-04-2020}{27}%
\contentsline {chapter}{\numberline {7}Lecture 7 - 07-04-2020}{28}%
\contentsline {chapter}{\numberline {8}Lecture 8 - 07-04-2020}{29}%
\contentsline {chapter}{\numberline {9}Lecture 9 - 07-04-2020}{30}%
\contentsline {chapter}{\numberline {10}Lecture 10 - 07-04-2020}{31}%
\contentsline {section}{\numberline {10.1}TO BE DEFINE}{31}%
\contentsline {chapter}{\numberline {1}Lecture 1 - 09-03-2020}{3}%
\contentsline {section}{\numberline {1.1}Introduction}{3}%
\contentsline {chapter}{\numberline {2}Lecture 2 - 07-04-2020}{6}%
\contentsline {section}{\numberline {2.1}Argomento}{6}%
\contentsline {section}{\numberline {2.2}Loss}{6}%
\contentsline {subsection}{\numberline {2.2.1}Absolute Loss}{6}%
\contentsline {subsection}{\numberline {2.2.2}Square Loss}{7}%
\contentsline {subsection}{\numberline {2.2.3}Example of information of square loss}{7}%
\contentsline {subsection}{\numberline {2.2.4}labels and losses}{8}%
\contentsline {subsection}{\numberline {2.2.5}Example TF(idf) documents encoding}{10}%
\contentsline {chapter}{\numberline {3}Lecture 3 - 07-04-2020}{12}%
\contentsline {section}{\numberline {3.1}Overfitting}{14}%
\contentsline {subsection}{\numberline {3.1.1}Noise in the data}{14}%
\contentsline {section}{\numberline {3.2}Underfitting}{15}%
\contentsline {section}{\numberline {3.3}Nearest neighbour}{16}%
\contentsline {chapter}{\numberline {4}Lecture 4 - 07-04-2020}{18}%
\contentsline {section}{\numberline {4.1}Computing $h_{NN}$}{18}%
\contentsline {section}{\numberline {4.2}Tree Predictor}{19}%
\contentsline {chapter}{\numberline {5}Lecture 5 - 07-04-2020}{22}%
\contentsline {section}{\numberline {5.1}Tree Classifier}{22}%
\contentsline {section}{\numberline {5.2}Jensens inequality}{23}%
\contentsline {section}{\numberline {5.3}Tree Predictor}{25}%
\contentsline {section}{\numberline {5.4}Statistical model for Machine Learning}{26}%
\contentsline {chapter}{\numberline {6}Lecture 6 - 07-04-2020}{28}%
\contentsline {section}{\numberline {6.1}Bayes Optimal Predictor}{28}%
\contentsline {subsection}{\numberline {6.1.1}Square Loss}{29}%
\contentsline {subsection}{\numberline {6.1.2}Zero-one loss for binary classification}{30}%
\contentsline {section}{\numberline {6.2}Bayes Risk}{32}%
\contentsline {chapter}{\numberline {7}Lecture 7 - 07-04-2020}{33}%
\contentsline {chapter}{\numberline {8}Lecture 8 - 07-04-2020}{34}%
\contentsline {chapter}{\numberline {9}Lecture 9 - 07-04-2020}{35}%
\contentsline {chapter}{\numberline {10}Lecture 10 - 07-04-2020}{36}%
\contentsline {section}{\numberline {10.1}TO BE DEFINE}{36}%