-
Notifications
You must be signed in to change notification settings - Fork 1
/
11-r-mnist-lenet.R
47 lines (37 loc) · 1.39 KB
/
11-r-mnist-lenet.R
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
pdf(file = "11-r-mnist-lenet.pdf", width = 5.83, height = 4.13)
library(keras)
c(c(x_train, y_train), c(x_test, y_test)) %<-% dataset_mnist()
my_index <- sample(1:60000, 6000)
x_train <- x_train[my_index, , ]
y_train <- y_train[my_index]
x_train <- x_train / 255
x_test <- x_test / 255
x_train2d <- x_train %>% array_reshape(c(-1, 28, 28, 1))
x_test2d <- x_test %>% array_reshape(c(-1, 28, 28, 1))
my_model <- keras_model_sequential() %>%
layer_conv_2d(filters = 20, kernel_size = 5, activation = "relu",
input_shape = c(28, 28, 1)) %>%
layer_max_pooling_2d(pool_size = 2, strides = 2) %>%
layer_conv_2d(filters = 50, kernel_size = 5, activation = "relu") %>%
layer_max_pooling_2d(pool_size = 2, strides = 2) %>%
layer_dropout(rate = 0.25) %>%
layer_flatten() %>%
layer_dense(units = 500, activation = "relu") %>%
layer_dropout(rate = 0.5) %>%
layer_dense(units = 10, activation = "softmax")
my_model %>% compile(
loss = "sparse_categorical_crossentropy",
optimizer = "rmsprop",
metrics = c("accuracy"))
my_cb <- callback_early_stopping(patience = 5,
restore_best_weights = TRUE)
my_history <- my_model %>%
fit(x = x_train2d,
y = y_train,
validation_split = 0.2,
batch_size = 128,
epochs = 20,
callbacks = list(my_cb),
verbose = 0)
plot(my_history)
my_model %>% evaluate(x = x_test2d, y = y_test)