Por ua vez, este relatorio contem continualçao dos dois ultimos relatorios do digit
O modelo utilizado, será um ja treinado, conforme orientações. não havera a demonstração do modelo, para o relatorio não ficar muito extenso.
model_cnn <- load_model_hdf5("modelo_1_digit")
x_test<- read.csv("/opt/datasets/digits/test.csv")
dct28 <- mrbsizeR::dctMatrix(28)
aplica_dct <- function(x){
q <- x %>%
matrix(28,28,byrow = T) %>%
apply(2,rev) %>%
t()
q <- t(dct28)%*%q%*%dct28
return( as.numeric(q) )
}
x_test_transf <- x_test %>%
apply(1,aplica_dct) %>%
t()
x_test <- cbind(x_test,x_test_transf)
Criando o objeto tipo array.
dim(x_test)
## [1] 28000 1568
a<-x_test
x_test<-a[sample(nrow(a), 4208), ]
x_test<-data.matrix(x_test)
dim(x_test)
## [1] 4208 1568
x_test_cnn <- array_reshape(x_test, c(nrow(x_test), 28, 28*2, 1))
y_test <- readRDS("y_test.rds")
Treinando o modelo
model_cnn <- keras_model_sequential() %>%
layer_conv_2d(
filters = 32,
kernel_size = c(3, 3),
activation = 'relu',
input_shape = c(28,28*2,1)) %>%
layer_conv_2d(filters = 64,
kernel_size = c(3, 3),
activation = 'relu') %>%
layer_max_pooling_2d(pool_size = c(2, 2)) %>%
layer_dropout(rate = 0.25) %>%
layer_flatten() %>%
layer_dense(units = 784*4, activation = 'relu', name = 'features1') %>%
layer_dropout(rate = 0.4) %>%
layer_dense(units = 784*2, activation = 'relu', name = 'features2') %>%
layer_dropout(rate = 0.2) %>%
layer_dense(units = 784, activation = 'relu', name = 'features3') %>%
layer_dropout(rate = 0.1) %>%
layer_dense(units = 10, activation = 'softmax')
# Compile model
model_cnn %>% compile(
loss = "categorical_crossentropy",
optimizer = optimizer_adadelta(),
metrics = c('accuracy')
)
history <- model_cnn %>% fit(
x_train_cnn, rbind(y_train,y_train,y_train),
epochs = 2, batch_size = 128,
validation_split = 0.2
)
Testando a eficiencia do modelo
model_cnn %>% evaluate(x_test_cnn, y_test)