diff --git a/mini_proj/report/references.bib b/mini_proj/report/references.bib index e69de29..69700f7 100644 --- a/mini_proj/report/references.bib +++ b/mini_proj/report/references.bib @@ -0,0 +1,35 @@ +@techreport{knn, + title={Discriminatory analysis-nonparametric discrimination: consistency properties}, + author={Fix, Evelyn and Hodges Jr, Joseph L}, + year={1951}, + institution={California Univ Berkeley} +} +@article{svm, + title={Support-vector networks}, + author={Cortes, Corinna and Vapnik, Vladimir}, + journal={Machine learning}, + volume={20}, + number={3}, + pages={273--297}, + year={1995}, + publisher={Springer} +} +@article{naivebayes, + title={Idiot's Bayes—not so stupid after all?}, + author={Hand, David J and Yu, Keming}, + journal={International statistical review}, + volume={69}, + number={3}, + pages={385--398}, + year={2001}, + publisher={Wiley Online Library} +} +@article{randomforest, + title={Classification and regression by randomForest}, + author={Liaw, Andy and Wiener, Matthew and others}, + journal={R news}, + volume={2}, + number={3}, + pages={18--22}, + year={2002} +} diff --git a/mini_proj/report/waldo.tex b/mini_proj/report/waldo.tex index 08e2e1d..0cc2217 100644 --- a/mini_proj/report/waldo.tex +++ b/mini_proj/report/waldo.tex @@ -19,6 +19,9 @@ \usepackage{bookmark} \usepackage{natbib} + \usepackage{xcolor} + \newcommand{\todo}[1]{\marginpar{{\textsf{TODO}}}{\textbf{\color{red}[#1]}}} + \begin{document} \title{What is Waldo?} \author{Kelvin Davis \and Jip J. Dekker\and Anthony Silvestere} @@ -46,8 +49,10 @@ \begin{figure}[ht] \includegraphics[scale=0.35]{waldo} \centering - \caption{A headshot of the character ``Waldo'', or ``Wally''. Pictures of - Waldo copyrighted by Martin Handford used under the fair-use policy.} + \caption{ + A headshot of the character ``Waldo'', or ``Wally''. Pictures of Waldo + copyrighted by Martin Handford and are used under the fair-use policy. + } \label{fig:waldo} \end{figure} @@ -74,13 +79,49 @@ \section{Background} \label{sec:background} + The classification methods used can separated into two separate groups: + classical machine learning methods and neural network architectures. Many of + the classical machine learning algorithms have variations and improvements + for various purposes; however, for this report we will be using their only + their basic versions. In contrast, we will use different neural network + architectures, as this method is currently the most used for image + classification. + + \subsection{Classical Machine Learning Methods} + + \paragraph{Naive Bayes Classifier} + + \cite{naivebayes} + + \paragraph{$k$-Nearest Neighbors} + + ($k$-NN) \cite{knn} + + \paragraph{Support Vector Machine} + + \cite{svm} + + \paragraph{Random Forest} + + \cite{randomforest} + + \subsection{Neural Network Architectures} + \todo{Did we only do the three in the end? (Alexnet?)} + + \paragraph{Convolutional Neural Networks} + + \paragraph{LeNet} + + \paragraph{Fully Convolutional Neural Networks} + + \section{Methods} \label{sec:methods} \section{Results and Discussion} \label{sec:results} \section{Conclusion} \label{sec:conclusion} - \bibliographystyle{humannat} + \bibliographystyle{alpha} \bibliography{references} \end{document}