Added traditional classifiers:
svm, decision tree, gaussian naive bayes, random forest.
This commit is contained in:
parent
630ee78a30
commit
76e3023750
50
mini_proj/_image_classifier.py
Normal file
50
mini_proj/_image_classifier.py
Normal file
@ -0,0 +1,50 @@
|
||||
class ImageClassifier:
|
||||
"""Class to create an ImageClassifier from a regular classifier with 5
|
||||
methods that are common amongst classifiers.
|
||||
"""
|
||||
|
||||
def __init__(self, clf, *args, **kwargs):
|
||||
self.clf = clf(*args, **kwargs)
|
||||
|
||||
def fit(self, X, *args, **kwargs):
|
||||
X = X.reshape((len(X), -1))
|
||||
return self.clf.fit(X, *args, **kwargs)
|
||||
|
||||
def predict(self, X, *args, **kwargs):
|
||||
X = X.reshape((len(X), -1))
|
||||
return self.clf.predict(X, *args, **kwargs)
|
||||
|
||||
def score(self, X, *args, **kwargs):
|
||||
X = X.reshape((len(X), -1))
|
||||
return self.clf.score(X, *args, **kwargs)
|
||||
|
||||
def get_params(self, *args, **kwargs):
|
||||
return self.clf.get_params(*args, **kwargs)
|
||||
|
||||
def set_params(self, **params):
|
||||
return self.set_params(**params)
|
||||
|
||||
if __name__ == '__main__':
|
||||
|
||||
# Import datasets, classifiers and performance metrics
|
||||
from sklearn import datasets, svm, metrics
|
||||
|
||||
# The digits dataset
|
||||
digits = datasets.load_digits()
|
||||
|
||||
n_samples = len(digits.images)
|
||||
data = digits.images
|
||||
|
||||
# Create a classifier: a support vector classifier
|
||||
classifier = ImageClassifier(svm.SVC, gamma=0.001)
|
||||
|
||||
# We learn the digits on the first half of the digits
|
||||
classifier.fit(data[:n_samples // 2], digits.target[:n_samples // 2])
|
||||
|
||||
# Now predict the value of the digit on the second half:
|
||||
expected = digits.target[n_samples // 2:]
|
||||
predicted = classifier.predict(data[n_samples // 2:])
|
||||
|
||||
print("Classification report for classifier %s:\n%s\n"
|
||||
% (classifier, metrics.classification_report(expected, predicted)))
|
||||
print("Confusion matrix:\n%s" % metrics.confusion_matrix(expected, predicted))
|
@ -1,6 +1,8 @@
|
||||
import numpy as np
|
||||
import sys
|
||||
import time as t
|
||||
from sklearn import svm, tree, naive_bayes, ensemble
|
||||
from _image_classifier import ImageClassifier
|
||||
'''
|
||||
from keras.models import Sequential
|
||||
from keras.layers import Dense, Dropout, Activation, Flatten, Reshape, Merge, Permute
|
||||
@ -43,7 +45,7 @@ def FCN():
|
||||
# up4x = UpSampling2D(size=(4, 4))(conv4x)
|
||||
# model = Model(input=inputs, output=up4x)
|
||||
# # Optimizer uses recommended Adadelta values
|
||||
# model.compile(optimizer=Adadelta(lr=0.01), loss='categorical_crossentropy', metrics=['accuracy'])
|
||||
# model.compile(optimizer=Adadelta(lr=0.01), loss='categorical_crossentropy', metrics=['accuracy'])
|
||||
return model
|
||||
|
||||
|
||||
@ -55,9 +57,13 @@ lbl_test = np.load('Waldo_test_lbl.npy')
|
||||
|
||||
## Define model
|
||||
model = FCN()
|
||||
svm_iclf = ImageClassifier(svm.SVC)
|
||||
tree_iclf = ImageClassifier(tree.DecisionTreeClassifier)
|
||||
naive_bayes_iclf = ImageClassifier(naive_bayes.GaussianNBd)
|
||||
ensemble_iclf = ImageClassifier(ensemble.RandomForestClassifier)
|
||||
|
||||
## Define training parameters
|
||||
epochs = 40 # an epoch is one forward pass and back propogation of all training data
|
||||
epochs = 40 # an epoch is one forward pass and back propogation of all training data
|
||||
batch_size = 5
|
||||
#lrate = 0.01
|
||||
#decay = lrate/epochs
|
||||
@ -125,4 +131,3 @@ pred_lbl = model.predict(im_test, verbose=1, batch_size=batch_size)
|
||||
end = t.time()
|
||||
print("Images generated in {} seconds".format(end - start))
|
||||
np.save('Test/predicted_results.npy', pred_lbl)
|
||||
|
||||
|
Reference in New Issue
Block a user