merged some changes
This commit is contained in:
commit
b6892721e1
@ -56,11 +56,9 @@ def gen_data(w_path, n_w_path):
|
|||||||
print("ERROR: Data may not be completely saved")
|
print("ERROR: Data may not be completely saved")
|
||||||
|
|
||||||
|
|
||||||
def __main__():
|
if __name__ == "__main__":
|
||||||
# Paths to the Waldo images
|
# Paths to the Waldo images
|
||||||
waldo_path = 'waldo_data/64/waldo'
|
waldo_path = 'waldo_data/64/waldo'
|
||||||
n_waldo_path = 'waldo_data/64/notwaldo'
|
n_waldo_path = 'waldo_data/64/notwaldo'
|
||||||
|
|
||||||
gen_data(waldo_path, n_waldo_path)
|
gen_data(waldo_path, n_waldo_path)
|
||||||
|
|
||||||
__main__()
|
|
50
mini_proj/_image_classifier.py
Normal file
50
mini_proj/_image_classifier.py
Normal file
@ -0,0 +1,50 @@
|
|||||||
|
class ImageClassifier:
|
||||||
|
"""Class to create an ImageClassifier from a regular classifier with 5
|
||||||
|
methods that are common amongst classifiers.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, clf, *args, **kwargs):
|
||||||
|
self.clf = clf(*args, **kwargs)
|
||||||
|
|
||||||
|
def fit(self, X, *args, **kwargs):
|
||||||
|
X = X.reshape((len(X), -1))
|
||||||
|
return self.clf.fit(X, *args, **kwargs)
|
||||||
|
|
||||||
|
def predict(self, X, *args, **kwargs):
|
||||||
|
X = X.reshape((len(X), -1))
|
||||||
|
return self.clf.predict(X, *args, **kwargs)
|
||||||
|
|
||||||
|
def score(self, X, *args, **kwargs):
|
||||||
|
X = X.reshape((len(X), -1))
|
||||||
|
return self.clf.score(X, *args, **kwargs)
|
||||||
|
|
||||||
|
def get_params(self, *args, **kwargs):
|
||||||
|
return self.clf.get_params(*args, **kwargs)
|
||||||
|
|
||||||
|
def set_params(self, **params):
|
||||||
|
return self.set_params(**params)
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
|
||||||
|
# Import datasets, classifiers and performance metrics
|
||||||
|
from sklearn import datasets, svm, metrics
|
||||||
|
|
||||||
|
# The digits dataset
|
||||||
|
digits = datasets.load_digits()
|
||||||
|
|
||||||
|
n_samples = len(digits.images)
|
||||||
|
data = digits.images
|
||||||
|
|
||||||
|
# Create a classifier: a support vector classifier
|
||||||
|
classifier = ImageClassifier(svm.SVC, gamma=0.001)
|
||||||
|
|
||||||
|
# We learn the digits on the first half of the digits
|
||||||
|
classifier.fit(data[:n_samples // 2], digits.target[:n_samples // 2])
|
||||||
|
|
||||||
|
# Now predict the value of the digit on the second half:
|
||||||
|
expected = digits.target[n_samples // 2:]
|
||||||
|
predicted = classifier.predict(data[n_samples // 2:])
|
||||||
|
|
||||||
|
print("Classification report for classifier %s:\n%s\n"
|
||||||
|
% (classifier, metrics.classification_report(expected, predicted)))
|
||||||
|
print("Confusion matrix:\n%s" % metrics.confusion_matrix(expected, predicted))
|
@ -8,11 +8,12 @@ os.environ['TF_CPP_MIN_LOG_LEVEL'] = '2'
|
|||||||
from keras.layers import Dense, Dropout, Activation, Flatten, Input
|
from keras.layers import Dense, Dropout, Activation, Flatten, Input
|
||||||
from keras.layers import Conv2D, MaxPooling2D, ZeroPadding2D
|
from keras.layers import Conv2D, MaxPooling2D, ZeroPadding2D
|
||||||
from keras.models import Model
|
from keras.models import Model
|
||||||
from keras.layers.normalization import BatchNormalization
|
|
||||||
from keras.utils import np_utils
|
from sklearn import svm, tree, naive_bayes, ensemble
|
||||||
|
from _image_classifier import ImageClassifier
|
||||||
|
|
||||||
from keras.optimizers import Adadelta
|
from keras.optimizers import Adadelta
|
||||||
from keras.callbacks import ModelCheckpoint
|
from keras.callbacks import ModelCheckpoint
|
||||||
|
|
||||||
from keras import backend as K
|
from keras import backend as K
|
||||||
K.set_image_dim_ordering('th')
|
K.set_image_dim_ordering('th')
|
||||||
np.random.seed(7)
|
np.random.seed(7)
|
||||||
@ -58,6 +59,10 @@ lbl_test = np.load('Waldo_test_lbl.npy')
|
|||||||
|
|
||||||
## Define model
|
## Define model
|
||||||
model = FCN()
|
model = FCN()
|
||||||
|
svm_iclf = ImageClassifier(svm.SVC)
|
||||||
|
tree_iclf = ImageClassifier(tree.DecisionTreeClassifier)
|
||||||
|
naive_bayes_iclf = ImageClassifier(naive_bayes.GaussianNBd)
|
||||||
|
ensemble_iclf = ImageClassifier(ensemble.RandomForestClassifier)
|
||||||
|
|
||||||
## Define training parameters
|
## Define training parameters
|
||||||
epochs = 20 # an epoch is one forward pass and back propogation of all training data
|
epochs = 20 # an epoch is one forward pass and back propogation of all training data
|
||||||
|
@ -47,10 +47,9 @@ f(1,0,0) & = & 1\\
|
|||||||
\text{otherwise }f(\_,\_,\_) & = & 0
|
\text{otherwise }f(\_,\_,\_) & = & 0
|
||||||
\end{eqnarray*}
|
\end{eqnarray*}
|
||||||
|
|
||||||
\begin{figure}[H]
|
\begin{figure}[ht]
|
||||||
\includegraphics[scale=0.55]{plots}
|
\includegraphics[scale=0.6]{plots}
|
||||||
\centering
|
\centering
|
||||||
\captionsetup{width=0.80\textwidth}
|
|
||||||
\caption{Plots of the execution of the cellular automata with the different
|
\caption{Plots of the execution of the cellular automata with the different
|
||||||
updating methods. From top-left to top-right: Synchronous, Random
|
updating methods. From top-left to top-right: Synchronous, Random
|
||||||
Independent, Random Order. From bottom-left to bottom-right: Clocked,
|
Independent, Random Order. From bottom-left to bottom-right: Clocked,
|
||||||
@ -131,10 +130,9 @@ simulate the system at varying densities between 0\% and 20\% and use
|
|||||||
the graphs showing the energy released from the system over time to
|
the graphs showing the energy released from the system over time to
|
||||||
gauge how where the runaway reaction occurs.
|
gauge how where the runaway reaction occurs.
|
||||||
|
|
||||||
\begin{figure}[H]
|
\begin{figure}[ht]
|
||||||
\includegraphics[scale=0.70]{plots2}
|
\includegraphics[scale=0.70]{plots2}
|
||||||
\centering
|
\centering
|
||||||
\captionsetup{width=0.80\textwidth}
|
|
||||||
\caption{Plots of energy released over time. Each plot corresponds a
|
\caption{Plots of energy released over time. Each plot corresponds a
|
||||||
different density: 0\%, 5\%, 8\%, 10\%, 11\%, 12\%, 13\%, 15\%, 17\% and 20\%}
|
different density: 0\%, 5\%, 8\%, 10\%, 11\%, 12\%, 13\%, 15\%, 17\% and 20\%}
|
||||||
\label{fig:plot2}
|
\label{fig:plot2}
|
||||||
|
Reference in New Issue
Block a user