diff --git a/mini_proj/Load_Images.py b/mini_proj/Load_Images.py index eec373a..d0b986e 100644 --- a/mini_proj/Load_Images.py +++ b/mini_proj/Load_Images.py @@ -56,11 +56,9 @@ def gen_data(w_path, n_w_path): print("ERROR: Data may not be completely saved") -def __main__(): +if __name__ == "__main__": # Paths to the Waldo images waldo_path = 'waldo_data/64/waldo' n_waldo_path = 'waldo_data/64/notwaldo' gen_data(waldo_path, n_waldo_path) - -__main__() \ No newline at end of file diff --git a/mini_proj/_image_classifier.py b/mini_proj/_image_classifier.py new file mode 100644 index 0000000..5e42ecc --- /dev/null +++ b/mini_proj/_image_classifier.py @@ -0,0 +1,50 @@ +class ImageClassifier: + """Class to create an ImageClassifier from a regular classifier with 5 + methods that are common amongst classifiers. + """ + + def __init__(self, clf, *args, **kwargs): + self.clf = clf(*args, **kwargs) + + def fit(self, X, *args, **kwargs): + X = X.reshape((len(X), -1)) + return self.clf.fit(X, *args, **kwargs) + + def predict(self, X, *args, **kwargs): + X = X.reshape((len(X), -1)) + return self.clf.predict(X, *args, **kwargs) + + def score(self, X, *args, **kwargs): + X = X.reshape((len(X), -1)) + return self.clf.score(X, *args, **kwargs) + + def get_params(self, *args, **kwargs): + return self.clf.get_params(*args, **kwargs) + + def set_params(self, **params): + return self.set_params(**params) + +if __name__ == '__main__': + + # Import datasets, classifiers and performance metrics + from sklearn import datasets, svm, metrics + + # The digits dataset + digits = datasets.load_digits() + + n_samples = len(digits.images) + data = digits.images + + # Create a classifier: a support vector classifier + classifier = ImageClassifier(svm.SVC, gamma=0.001) + + # We learn the digits on the first half of the digits + classifier.fit(data[:n_samples // 2], digits.target[:n_samples // 2]) + + # Now predict the value of the digit on the second half: + expected = digits.target[n_samples // 2:] + predicted = classifier.predict(data[n_samples // 2:]) + + print("Classification report for classifier %s:\n%s\n" + % (classifier, metrics.classification_report(expected, predicted))) + print("Confusion matrix:\n%s" % metrics.confusion_matrix(expected, predicted)) diff --git a/mini_proj/waldo_model.py b/mini_proj/waldo_model.py index 0365e58..0d9aa43 100644 --- a/mini_proj/waldo_model.py +++ b/mini_proj/waldo_model.py @@ -8,11 +8,12 @@ os.environ['TF_CPP_MIN_LOG_LEVEL'] = '2' from keras.layers import Dense, Dropout, Activation, Flatten, Input from keras.layers import Conv2D, MaxPooling2D, ZeroPadding2D from keras.models import Model -from keras.layers.normalization import BatchNormalization -from keras.utils import np_utils + +from sklearn import svm, tree, naive_bayes, ensemble +from _image_classifier import ImageClassifier + from keras.optimizers import Adadelta from keras.callbacks import ModelCheckpoint - from keras import backend as K K.set_image_dim_ordering('th') np.random.seed(7) @@ -58,6 +59,10 @@ lbl_test = np.load('Waldo_test_lbl.npy') ## Define model model = FCN() +svm_iclf = ImageClassifier(svm.SVC) +tree_iclf = ImageClassifier(tree.DecisionTreeClassifier) +naive_bayes_iclf = ImageClassifier(naive_bayes.GaussianNBd) +ensemble_iclf = ImageClassifier(ensemble.RandomForestClassifier) ## Define training parameters epochs = 20 # an epoch is one forward pass and back propogation of all training data diff --git a/wk11/week11.tex b/wk11/week11.tex index 72911a5..8616188 100644 --- a/wk11/week11.tex +++ b/wk11/week11.tex @@ -47,10 +47,9 @@ f(1,0,0) & = & 1\\ \text{otherwise }f(\_,\_,\_) & = & 0 \end{eqnarray*} -\begin{figure}[H] - \includegraphics[scale=0.55]{plots} +\begin{figure}[ht] + \includegraphics[scale=0.6]{plots} \centering - \captionsetup{width=0.80\textwidth} \caption{Plots of the execution of the cellular automata with the different updating methods. From top-left to top-right: Synchronous, Random Independent, Random Order. From bottom-left to bottom-right: Clocked, @@ -131,10 +130,9 @@ simulate the system at varying densities between 0\% and 20\% and use the graphs showing the energy released from the system over time to gauge how where the runaway reaction occurs. -\begin{figure}[H] +\begin{figure}[ht] \includegraphics[scale=0.70]{plots2} \centering - \captionsetup{width=0.80\textwidth} \caption{Plots of energy released over time. Each plot corresponds a different density: 0\%, 5\%, 8\%, 10\%, 11\%, 12\%, 13\%, 15\%, 17\% and 20\%} \label{fig:plot2}