# install.packages("keras")
# library("keras")
# install_keras(tensorflow = "gpu")
# install.packages("pROC")

Load Libraries

library(keras)
library(grid)

Load Cifar_10

?dataset_cifar10 
cifar <- dataset_cifar10()  
names(cifar)
## [1] "train" "test"
c(train_images, train_labels) %<-% cifar$train
c(test_images, test_labels) %<-% cifar$test

dim(train_images)
## [1] 50000    32    32     3
length(train_labels)
## [1] 50000
table(train_labels)
## train_labels
##    0    1    2    3    4    5    6    7    8    9 
## 5000 5000 5000 5000 5000 5000 5000 5000 5000 5000
dim(test_images)
## [1] 10000    32    32     3

10 Classes: airplane, automobiel, bird, cat, deer, dog, frog, horse, ship, truck.

n = nrow(train_images)
for(i in 0:9){
  ind = sample((1:n)[which(train_labels == i)], 10)
  for(j in 1:10){
    img = train_images[ind[j], , , ] / 255
    grid.raster(img, x=(i+1)/11, y=j/11, height = 0.09)
  }
}

Prepare Data

Each label is a binary vector of length 10, with the j-th being 1 if it’s from the (j+1)th category.

train_labels = to_categorical(train_labels, num_classes = 10)
test_labels = to_categorical(test_labels, num_classes = 10)

train_images <- train_images / 255
test_images <- test_images / 255

Specify Network Architecture

model <- keras_model_sequential()
model %>%
  # Start with hidden 2D convolutional layer being fed 32x32 pixel images
  layer_conv_2d(
    filter=32, kernel_size=c(3,3), padding="same",
                 input_shape=c(32,32,3)
    ) %>%
  layer_activation("relu") %>% 
  
  # Second hidden layer
  layer_conv_2d(filter=32, kernel_size=c(3,3)) %>%
  layer_activation("relu") %>% 
  
  # Max pooling and dropout
  layer_max_pooling_2d(pool_size=c(2,2)) %>%  
  layer_dropout(0.25) %>%
  
  # Two additional hidden layers
  layer_conv_2d(filter=32 , kernel_size=c(3,3),padding="same") %>% 
  layer_activation("relu") %>%  
  
  layer_conv_2d(filter=32,kernel_size=c(3,3) ) %>%  
  layer_activation("relu") %>%  
  
  # Max pooling and dropout
  layer_max_pooling_2d(pool_size=c(2,2)) %>%  
  layer_dropout(0.25) %>%
  
  #Flatten the input and feed into dense layer
  layer_flatten() %>%  
  layer_dense(512) %>%  
  layer_activation("relu") %>%  
  layer_dropout(0.5) %>%  
  
  layer_dense(10) %>%  
  layer_activation("softmax") 

Pick a Loss function and an Optimizer

opt <- optimizer_adam(lr = 0.0001, decay = 1e-6 )

model %>%
  compile(loss="categorical_crossentropy",
          optimizer=opt,
          metrics = "accuracy"
          )

summary(model)
## ___________________________________________________________________________
## Layer (type)                     Output Shape                  Param #     
## ===========================================================================
## conv2d_1 (Conv2D)                (None, 32, 32, 32)            896         
## ___________________________________________________________________________
## activation_1 (Activation)        (None, 32, 32, 32)            0           
## ___________________________________________________________________________
## conv2d_2 (Conv2D)                (None, 30, 30, 32)            9248        
## ___________________________________________________________________________
## activation_2 (Activation)        (None, 30, 30, 32)            0           
## ___________________________________________________________________________
## max_pooling2d_1 (MaxPooling2D)   (None, 15, 15, 32)            0           
## ___________________________________________________________________________
## dropout_1 (Dropout)              (None, 15, 15, 32)            0           
## ___________________________________________________________________________
## conv2d_3 (Conv2D)                (None, 15, 15, 32)            9248        
## ___________________________________________________________________________
## activation_3 (Activation)        (None, 15, 15, 32)            0           
## ___________________________________________________________________________
## conv2d_4 (Conv2D)                (None, 13, 13, 32)            9248        
## ___________________________________________________________________________
## activation_4 (Activation)        (None, 13, 13, 32)            0           
## ___________________________________________________________________________
## max_pooling2d_2 (MaxPooling2D)   (None, 6, 6, 32)              0           
## ___________________________________________________________________________
## dropout_2 (Dropout)              (None, 6, 6, 32)              0           
## ___________________________________________________________________________
## flatten_1 (Flatten)              (None, 1152)                  0           
## ___________________________________________________________________________
## dense_1 (Dense)                  (None, 512)                   590336      
## ___________________________________________________________________________
## activation_5 (Activation)        (None, 512)                   0           
## ___________________________________________________________________________
## dropout_3 (Dropout)              (None, 512)                   0           
## ___________________________________________________________________________
## dense_2 (Dense)                  (None, 10)                    5130        
## ___________________________________________________________________________
## activation_6 (Activation)        (None, 10)                    0           
## ===========================================================================
## Total params: 624,106
## Trainable params: 624,106
## Non-trainable params: 0
## ___________________________________________________________________________

Training

model %>% fit(train_images, train_labels, batch_size=64, epochs=80)

Evaluation

score <- model %>% evaluate(test_images, test_labels)
score
## $loss
## [1] 0.6055766
## 
## $acc
## [1] 0.7951
class_pred <- model %>% predict_classes(test_images)
class_pred <- to_categorical(class_pred, num_classes = 10)
t(test_labels) %*% class_pred
##       [,1] [,2] [,3] [,4] [,5] [,6] [,7] [,8] [,9] [,10]
##  [1,]  830   16   30   18    9    3   10    7   45    32
##  [2,]   11  896    2    3    2    1    7    3   17    58
##  [3,]   59    1  673   38   74   47   60   24   13    11
##  [4,]   13    6   60  596   69  142   52   31   13    18
##  [5,]   11    2   51   36  779   21   45   45    9     1
##  [6,]    9    3   30  140   44  698   20   45    4     7
##  [7,]    6    1   23   36   26   13  882    3    7     3
##  [8,]   14    1   25   28   43   46    5  831    2     5
##  [9,]   47   21    8   10    6    4    2    2  878    22
## [10,]   16   47    4    4    3    4    4   11   19   888