Training a Fruit Classifer

Experimenting with Callbacks

  • Let's create our data generators
In [2]:
from __future__ import print_function
import keras
from keras.preprocessing.image import ImageDataGenerator
from keras.models import Sequential
from keras.layers import Dense, Dropout, Activation, Flatten
from keras.layers import Conv2D, MaxPooling2D
import os

num_classes = 81
img_rows, img_cols = 32, 32
batch_size = 16

from keras.preprocessing.image import ImageDataGenerator

train_data_dir = './fruits-360/train'
validation_data_dir = './fruits-360/validation'

# Let's use some data augmentaiton 
train_datagen = ImageDataGenerator(
      rescale=1./255,
      rotation_range=30,
      width_shift_range=0.3,
      height_shift_range=0.3,
      horizontal_flip=True,
      fill_mode='nearest')
 
validation_datagen = ImageDataGenerator(rescale=1./255)
 
train_generator = train_datagen.flow_from_directory(
        train_data_dir,
        target_size=(img_rows, img_cols),
        batch_size=batch_size,
        class_mode='categorical',
        shuffle=True)
 
validation_generator = validation_datagen.flow_from_directory(
        validation_data_dir,
        target_size=(img_rows, img_cols),
        batch_size=batch_size,
        class_mode='categorical',
        shuffle=False)
Found 41322 images belonging to 81 classes.
Found 13877 images belonging to 81 classes.

Let's define our model

In [ ]:
model = Sequential()

# Padding = 'same'  results in padding the input such that
# the output has the same length as the original input
model.add(Conv2D(32, (3, 3), padding='same',
                 input_shape= (img_rows, img_cols, 3)))
model.add(Activation('relu'))
model.add(Conv2D(32, (3, 3)))
model.add(Activation('relu'))
model.add(MaxPooling2D(pool_size=(2, 2)))
model.add(Dropout(0.25))

model.add(Conv2D(64, (3, 3), padding='same'))
model.add(Activation('relu'))
model.add(Conv2D(64, (3, 3)))
model.add(Activation('relu'))
model.add(MaxPooling2D(pool_size=(2, 2)))
model.add(Dropout(0.25))

model.add(Flatten())
model.add(Dense(512))
model.add(Activation('relu'))
model.add(Dropout(0.5))
model.add(Dense(num_classes))
model.add(Activation('softmax'))

# initiate RMSprop optimizer and configure some parameters
#opt = keras.optimizers.rmsprop(lr=0.0001, decay=1e-6)
print(model.summary())
In [3]:
from keras.optimizers import RMSprop, SGD
from keras.callbacks import ModelCheckpoint, EarlyStopping, ReduceLROnPlateau

                     
checkpoint = ModelCheckpoint("/home/deeplearningcv/DeepLearningCV/Trained Models/fruits_fresh_cnn_1.h5",
                             monitor="val_loss",
                             mode="min",
                             save_best_only = True,
                             verbose=1)

earlystop = EarlyStopping(monitor = 'val_loss', 
                          min_delta = 0, 
                          patience = 3,
                          verbose = 1,
                          restore_best_weights = True)

reduce_lr = ReduceLROnPlateau(monitor = 'val_loss',
                              factor = 0.2,
                              patience = 3,
                              verbose = 1,
                              min_delta = 0.0001)

# we put our call backs into a callback list
callbacks = [earlystop, checkpoint, reduce_lr]

# We use a very small learning rate 
model.compile(loss = 'categorical_crossentropy',
              optimizer = RMSprop(lr = 0.001),
              metrics = ['accuracy'])

nb_train_samples = 41322
nb_validation_samples = 13877
epochs = 10

history = model.fit_generator(
    train_generator,
    steps_per_epoch = nb_train_samples // batch_size,
    epochs = epochs,
    callbacks = callbacks,
    validation_data = validation_generator,
    validation_steps = nb_validation_samples // batch_size)
Epoch 1/5
2582/2582 [==============================] - 269s 104ms/step - loss: 1.7532 - acc: 0.4740 - val_loss: 0.5560 - val_acc: 0.8069

Epoch 00001: val_loss improved from inf to 0.55598, saving model to /home/deeplearningcv/DeepLearningCV/Trained Models/fruits_fresh_cnn.h5
Epoch 2/5
2582/2582 [==============================] - 262s 101ms/step - loss: 0.6594 - acc: 0.7921 - val_loss: 0.4869 - val_acc: 0.8577

Epoch 00002: val_loss improved from 0.55598 to 0.48694, saving model to /home/deeplearningcv/DeepLearningCV/Trained Models/fruits_fresh_cnn.h5
Epoch 3/5
2582/2582 [==============================] - 206s 80ms/step - loss: 0.5250 - acc: 0.8455 - val_loss: 0.2939 - val_acc: 0.9148

Epoch 00003: val_loss improved from 0.48694 to 0.29386, saving model to /home/deeplearningcv/DeepLearningCV/Trained Models/fruits_fresh_cnn.h5
Epoch 4/5
2582/2582 [==============================] - 263s 102ms/step - loss: 0.5116 - acc: 0.8599 - val_loss: 0.3469 - val_acc: 0.9279

Epoch 00004: val_loss did not improve from 0.29386
Epoch 5/5
2582/2582 [==============================] - 275s 106ms/step - loss: 0.5639 - acc: 0.8580 - val_loss: 0.6419 - val_acc: 0.8394

Epoch 00005: val_loss did not improve from 0.29386

Displaying our Confusion Matrix

In [10]:
from sklearn.metrics import classification_report, confusion_matrix

#Confution Matrix and Classification Report
Y_pred = model.predict_generator(validation_generator, nb_validation_samples // batch_size+1)
y_pred = np.argmax(Y_pred, axis=1)
print('Confusion Matrix')
print(confusion_matrix(validation_generator.classes, y_pred))
print('Classification Report')
target_names = list(class_labels.values())
print(classification_report(validation_generator.classes, y_pred, target_names=target_names))
Confusion Matrix
[[136   0   0 ...   0   0  21]
 [ 21 137   0 ...   0   0   0]
 [  0  21  68 ...   0   0   0]
 ...
 [  0   0   0 ... 143   0   0]
 [  0   0   0 ...  21 106   0]
 [  0   0   0 ...   0  21 228]]
Classification Report
                     precision    recall  f1-score   support

     Apple Braeburn       0.49      0.83      0.61       164
     Apple Golden 1       0.82      0.84      0.83       164
     Apple Golden 2       0.69      0.41      0.52       164
     Apple Golden 3       0.56      0.97      0.71       161
 Apple Granny Smith       0.58      0.87      0.70       164
        Apple Red 1       0.71      0.84      0.77       164
        Apple Red 2       0.94      0.60      0.73       164
        Apple Red 3       0.82      0.85      0.84       144
Apple Red Delicious       0.87      0.87      0.87       166
   Apple Red Yellow       0.80      0.78      0.79       164
            Apricot       0.56      0.87      0.68       164
            Avocado       0.88      0.48      0.62       143
       Avocado ripe       0.87      0.87      0.87       166
             Banana       0.72      0.63      0.67       166
         Banana Red       0.44      0.80      0.57       166
       Cactus fruit       0.52      0.56      0.54       166
       Cantaloupe 1       0.87      0.87      0.87       164
       Cantaloupe 2       0.89      0.87      0.88       164
          Carambula       0.63      0.68      0.65       166
           Cherry 1       0.52      0.87      0.65       164
           Cherry 2       1.00      0.28      0.44       246
     Cherry Rainier       0.90      0.80      0.85       246
   Cherry Wax Black       0.87      0.87      0.87       164
     Cherry Wax Red       0.86      0.81      0.84       164
  Cherry Wax Yellow       0.87      0.87      0.87       164
         Clementine       0.95      0.83      0.88       166
              Cocos       0.87      0.87      0.87       166
              Dates       0.87      0.87      0.87       166
         Granadilla       0.55      0.83      0.66       166
         Grape Pink       0.87      0.87      0.87       164
        Grape White       0.87      0.87      0.87       166
      Grape White 2       0.86      0.77      0.82       166
    Grapefruit Pink       0.85      0.74      0.79       166
   Grapefruit White       0.96      0.26      0.41       164
              Guava       0.96      0.66      0.78       166
        Huckleberry       0.87      0.87      0.87       166
               Kaki       0.73      0.87      0.79       166
               Kiwi       0.76      0.33      0.46       156
           Kumquats       1.00      0.54      0.70       166
              Lemon       0.51      0.76      0.61       164
        Lemon Meyer       0.87      0.87      0.87       166
              Limes       0.86      0.80      0.83       166
             Lychee       0.87      0.87      0.87       166
          Mandarine       1.00      0.22      0.36       166
              Mango       0.87      0.87      0.87       166
           Maracuja       0.86      0.75      0.80       166
 Melon Piel de Sapo       0.85      0.91      0.88       246
           Mulberry       0.87      0.87      0.87       164
          Nectarine       0.80      0.70      0.75       164
             Orange       0.73      0.32      0.44       160
             Papaya       0.63      0.65      0.64       164
      Passion Fruit       0.87      0.87      0.87       166
              Peach       0.52      0.77      0.62       164
         Peach Flat       0.43      0.87      0.57       164
               Pear       0.72      0.87      0.79       164
         Pear Abate       0.34      0.82      0.48       166
       Pear Monster       0.86      0.77      0.81       166
      Pear Williams       0.83      0.41      0.55       166
             Pepino       0.76      0.63      0.69       166
           Physalis       0.87      0.87      0.87       164
 Physalis with Husk       0.87      0.29      0.44       164
          Pineapple       0.89      0.70      0.79       166
     Pineapple Mini       0.97      0.21      0.34       163
       Pitahaya Red       0.87      0.86      0.86       166
               Plum       0.82      0.85      0.84       151
        Pomegranate       0.35      0.37      0.36       164
             Quince       0.87      0.87      0.87       166
           Rambutan       0.87      0.87      0.87       164
          Raspberry       0.87      0.87      0.87       166
              Salak       0.46      0.31      0.37       162
         Strawberry       0.64      0.87      0.74       164
   Strawberry Wedge       0.93      0.85      0.89       246
          Tamarillo       0.82      0.87      0.85       166
            Tangelo       0.80      0.87      0.84       166
           Tomato 1       0.85      0.91      0.88       246
           Tomato 2       0.89      0.73      0.80       225
           Tomato 3       0.57      0.91      0.70       246
           Tomato 4       1.00      0.31      0.47       160
  Tomato Cherry Red       0.87      0.87      0.87       164
      Tomato Maroon       0.83      0.83      0.83       127
             Walnut       0.92      0.92      0.92       249

          micro avg       0.74      0.74      0.74     13877
          macro avg       0.78      0.74      0.73     13877
       weighted avg       0.79      0.74      0.74     13877

In [3]:
import matplotlib.pyplot as plt
import sklearn
from sklearn.metrics import classification_report, confusion_matrix
import numpy as np
from keras.models import load_model

img_row, img_height, img_depth = 32,32,3
model = load_model('/home/deeplearningcv/DeepLearningCV/Trained Models/fruits_fresh_cnn.h5')

class_labels = validation_generator.class_indices
class_labels = {v: k for k, v in class_labels.items()}
classes = list(class_labels.values())

nb_train_samples = 41322
nb_validation_samples = 13877

#Confution Matrix and Classification Report
Y_pred = model.predict_generator(validation_generator, nb_validation_samples // batch_size+1)
y_pred = np.argmax(Y_pred, axis=1)

target_names = list(class_labels.values())

plt.figure(figsize=(20,20))
cnf_matrix = confusion_matrix(validation_generator.classes, y_pred)

plt.imshow(cnf_matrix, interpolation='nearest')
plt.colorbar()
tick_marks = np.arange(len(classes))
_ = plt.xticks(tick_marks, classes, rotation=90)
_ = plt.yticks(tick_marks, classes)

Testing our fruit classifier

In [6]:
from keras.models import load_model
from keras.preprocessing import image
import numpy as np
import os
import cv2
import numpy as np
from os import listdir
from os.path import isfile, join
import re

def draw_test(name, pred, im, true_label):
    BLACK = [0,0,0]
    expanded_image = cv2.copyMakeBorder(im, 160, 0, 0, 500 ,cv2.BORDER_CONSTANT,value=BLACK)
    cv2.putText(expanded_image, "predited - "+ pred, (20, 60) , cv2.FONT_HERSHEY_SIMPLEX,1, (0,0,255), 2)
    cv2.putText(expanded_image, "true - "+ true_label, (20, 120) , cv2.FONT_HERSHEY_SIMPLEX,1, (0,255,0), 2)
    cv2.imshow(name, expanded_image)


def getRandomImage(path, img_width, img_height):
    """function loads a random images from a random folder in our test path """
    folders = list(filter(lambda x: os.path.isdir(os.path.join(path, x)), os.listdir(path)))
    random_directory = np.random.randint(0,len(folders))
    path_class = folders[random_directory]
    file_path = path + path_class
    file_names = [f for f in listdir(file_path) if isfile(join(file_path, f))]
    random_file_index = np.random.randint(0,len(file_names))
    image_name = file_names[random_file_index]
    final_path = file_path + "/" + image_name
    return image.load_img(final_path, target_size = (img_width, img_height)), final_path, path_class

# dimensions of our images
img_width, img_height = 32, 32


files = []
predictions = []
true_labels = []
# predicting images
for i in range(0, 10):
    path = './fruits-360/validation/' 
    img, final_path, true_label = getRandomImage(path, img_width, img_height)
    files.append(final_path)
    true_labels.append(true_label)
    x = image.img_to_array(img)
    x = x * 1./255
    x = np.expand_dims(x, axis=0)
    images = np.vstack([x])
    classes = model.predict_classes(images, batch_size = 10)
    predictions.append(classes)
    
for i in range(0, len(files)):
    image = cv2.imread((files[i]))
    draw_test("Prediction", class_labels[predictions[i][0]], image, true_labels[i])
    cv2.waitKey(0)

cv2.destroyAllWindows()