diff --git a/mini_proj/Load_Images.py b/mini_proj/Load_Images.py index 802b394..0b83761 100644 --- a/mini_proj/Load_Images.py +++ b/mini_proj/Load_Images.py @@ -19,8 +19,6 @@ def preprocess(img): hsv[:, :, 2] = exposure.equalize_hist(hsv[:, :, 2]) img = color.hsv2rgb(hsv) - #img = img/255 # Scaling images down to values of 0-255 - return img ''' @@ -66,8 +64,7 @@ def gen_data(w_path, n_w_path): pic = augment(pic) pic_roll = np.rollaxis(pic, -1) # rolls colour axis to 0 imgs_raw.append(pic_roll) - imgs_lbl.append(1) - + imgs_lbl.append(1) # Value of 1 as Waldo is still present in the transformed image print('Completed: {0}/{1} Waldo images'.format(w+1, total_w)) w += 1 @@ -91,7 +88,7 @@ def gen_data(w_path, n_w_path): ## Randomise and split data into training and test sets # Code was modified from code written by: Kyle O'Brien (medium.com/@kylepob61392) n_images = len(imgs_raw) - TRAIN_TEST_SPLIT = 0.75 + TRAIN_TEST_SPLIT = 0.75 # Amount of trainingdata as a percentage of the total # Split at the given index split_index = int(TRAIN_TEST_SPLIT * n_images) @@ -113,17 +110,6 @@ def gen_data(w_path, n_w_path): test_data.append(imgs_raw[index]) test_lbl.append(imgs_lbl[index]) - # # Calculate what 30% of each set is - # third_of_w = math.floor(0.3*total_w) - # third_of_nw = math.floor(0.3*total_nw) - - # # Split data into training and test data (60%/30%) - # train_data = np.append(imgs_raw[(third_of_w+1):total_w], imgs_raw[(total_w + third_of_nw + 1):len(imgs_raw)-1], axis=0) - # train_lbl = np.append(imgs_lbl[(third_of_w+1):total_w], imgs_lbl[(total_w + third_of_nw + 1):len(imgs_lbl)-1], axis=0) - # # If axis not given, both arrays are flattened before being appended - # test_data = np.append(imgs_raw[0:third_of_w], imgs_raw[total_w:(total_w + third_of_nw)], axis=0) - # test_lbl = np.append(imgs_lbl[0:third_of_w], imgs_lbl[total_w:(total_w + third_of_nw)], axis=0) - try: # Save the data as numpy files np.save('Waldo_train_data.npy', train_data)