Esta atividade visa construir uma rede neural para identificar a espécie do conjunto de dados Íris (Banco de dados que possui 50 linhas para cada uma das 3 espécies do conjunto de dados), com apenas 4 variáveis. Nesse projeto, foi-se utilizado os pacotes tidyverse e keras e foram testados 3 modelos, sendo eles:

1 - Aumentando o número de neurônios na camada de entrada. 2 - Utilizando bootstrap 3 - Trabalhando com a base extendida.

#Modelo 1

Foi-se separado a base de treinamento e de teste e o modelo abaixo foi elaborado.

set.seed(123)
base <- iris %>%
  mutate(Set = sample(x = c("train", "test"),
                      size = n(),
                      replace = TRUE,
                      prob = c(.8,.2))) %>%
  sample_frac(1, replace = FALSE)

Train_Features <- base %>%
  filter(Set == "train") %>%
  select(-Species, -Set) %>%
  mutate_all(function(x) (x-min(x))/(max(x) - min(x))) %>%
  as.matrix()

Train_Labels <- base %>%
  filter(Set == "train") %>%
  select(Species) %>%
  mutate(Species = as.numeric(Species) - 1) %>%
  as.matrix() %>%
  to_categorical()


Test_Features <- base %>%
  filter(Set == "test") %>%
  select(-Species, -Set) %>%
  mutate_all(function(x) (x-min(x))/(max(x) - min(x))) %>%
  as.matrix()

Test_Labels <- base %>%
  filter(Set == "test") %>%
  select(Species) %>%
  mutate(Species = as.numeric(Species) - 1) %>%
  as.matrix() %>%
  to_categorical()

new_data_1 <- base %>% filter(Set == "train") %>%
  sample_frac(1, replace = TRUE) %>%
  mutate_if(is.numeric, function(x) x + rnorm(length(x), mean = 0, sd = sd(x)))

new_data <- new_data_1

Train_Features_Augmented <- base %>%
  filter(Set == "train") %>%
  bind_rows(new_data) %>%
  select(-Species, -Set) %>%
  mutate_all(function(x) (x-min(x))/(max(x) - min(x))) %>%
  as.matrix()

Train_Labels_Augmented <- base %>%
  filter(Set == "train") %>%
  bind_rows(new_data) %>%
  select(Species) %>%
  mutate(Species = as.numeric(Species) - 1) %>%
  as.matrix() %>%  to_categorical()

Utilizando o modelo aumentando o número de neurônios na 1ª camada e utilizando apenas 3 camadas (1ª com 64 neurônios, a 2ª com 32 neurônios e a terceira com apenas 3 camadas), chegamos à um resutado bem melhor em relação à acurácia, chegando há quase duplicá-la(Passando de 0.34 para 0.62).

model <- keras_model_sequential()

model %>%
  layer_dense(units= 32, activation = "relu", input_shape = ncol(Train_Features_Augmented)) %>%
  layer_dense(units= 32, activation = "relu") %>%
  layer_dense(units = 3, activation = "softmax")


summary(model)
## Model: "sequential"
## ___________________________________________________________________________
## Layer (type)                     Output Shape                  Param #     
## ===========================================================================
## dense (Dense)                    (None, 32)                    160         
## ___________________________________________________________________________
## dense_1 (Dense)                  (None, 32)                    1056        
## ___________________________________________________________________________
## dense_2 (Dense)                  (None, 3)                     99          
## ===========================================================================
## Total params: 1,315
## Trainable params: 1,315
## Non-trainable params: 0
## ___________________________________________________________________________
model %>% compile(loss = "categorical_crossentropy",
                  optimizer = optimizer_adagrad(),
                  metrics = c('accuracy')
)

model %>% evaluate(Test_Features,Test_Labels)
## $loss
## [1] 1.033165
## 
## $acc
## [1] 0.3793103
model <- keras_model_sequential()

model %>%
  layer_dense(units= 64, activation = "relu", input_shape = ncol(Train_Features_Augmented)) %>%
  layer_dense(units= 32, activation = "relu") %>%
  layer_dense(units = 3, activation = "softmax")


summary(model)
## Model: "sequential_1"
## ___________________________________________________________________________
## Layer (type)                     Output Shape                  Param #     
## ===========================================================================
## dense_3 (Dense)                  (None, 64)                    320         
## ___________________________________________________________________________
## dense_4 (Dense)                  (None, 32)                    2080        
## ___________________________________________________________________________
## dense_5 (Dense)                  (None, 3)                     99          
## ===========================================================================
## Total params: 2,499
## Trainable params: 2,499
## Non-trainable params: 0
## ___________________________________________________________________________
model %>% compile(loss = "categorical_crossentropy",
                  optimizer = optimizer_adagrad(),
                  metrics = c('accuracy')
)

model %>% evaluate(Test_Features,Test_Labels)
## $loss
## [1] 1.043359
## 
## $acc
## [1] 0.6206896

#Modelo 2

Nesse 2ª modelo foi-se utilizado a estratégia do bootstrap, que possui o intuito de gerar réplicas, visando possuir uma melhor acurácia do modelo. Também foi elaborado a separação da base de treinamento e teste.

###### Modelo 2 ######

new_data_2 <- base %>% filter(Set == "train") %>%
  group_split(Species) %>%
  map_dfr(.f = function(x) {
    x %>%
      sample_frac(10, replace = TRUE) %>%
      mutate_if(is.numeric, function(y){sample(y, length(y), replace = TRUE)})
  })

new_data <- new_data_2

Train_Features_Augmented <- base %>%
  filter(Set == "train") %>%
  bind_rows(new_data) %>%
  select(-Species, -Set) %>%
  mutate_all(function(x) (x-min(x))/(max(x) - min(x))) %>%
  as.matrix()

Train_Labels_Augmented <- base %>%
  filter(Set == "train") %>%
  bind_rows(new_data) %>%
  select(Species) %>%
  mutate(Species = as.numeric(Species) - 1) %>%
  as.matrix() %>%
  to_categorical()

model <- keras_model_sequential()

model %>%
  layer_dense(units= 32, activation = "relu", input_shape = ncol(Train_Features_Augmented)) %>%
  layer_dense(units= 32, activation = "relu") %>%
  layer_dense(units = 3, activation = "softmax")


summary(model)
## Model: "sequential_2"
## ___________________________________________________________________________
## Layer (type)                     Output Shape                  Param #     
## ===========================================================================
## dense_6 (Dense)                  (None, 32)                    160         
## ___________________________________________________________________________
## dense_7 (Dense)                  (None, 32)                    1056        
## ___________________________________________________________________________
## dense_8 (Dense)                  (None, 3)                     99          
## ===========================================================================
## Total params: 1,315
## Trainable params: 1,315
## Non-trainable params: 0
## ___________________________________________________________________________
model %>% compile(loss = "categorical_crossentropy",
                  optimizer = optimizer_adagrad(),
                  metrics = c('accuracy')
)

# history <- model %>% fit(Train_Features, Train_Labels,
# validation_split = 0.20,
# epochs=300, batch_size = 15,
# shuffle = TRUE)


model %>% evaluate(Test_Features,Test_Labels)
## $loss
## [1] 1.163562
## 
## $acc
## [1] 0

A acurácia passou a ser de 0.41, não possuindo um resultado muito bom.

#Modelo 3

Para este terceiro modelo, foi-se criado com a ideia do aumento de camadas, como pode ser visto abaixo:

###### Modelo 3 ######

new_data_2 <- base %>% filter(Set == "train") %>%
  group_split(Species) %>%
  map_dfr(.f = function(x) {
    x %>%
      sample_frac(10, replace = TRUE) %>%
      mutate_if(is.numeric, function(y){sample(y, length(y), replace = TRUE)})
  })

new_data <- new_data_2

Train_Features_Augmented <- base %>%
  filter(Set == "train") %>%
  bind_rows(new_data) %>%
  select(-Species, -Set) %>%
  mutate_all(function(x) (x-min(x))/(max(x) - min(x))) %>%
  as.matrix()

Train_Labels_Augmented <- base %>%
  filter(Set == "train") %>%
  bind_rows(new_data) %>%
  select(Species) %>%
  mutate(Species = as.numeric(Species) - 1) %>%
  as.matrix() %>%
  to_categorical()

model <- keras_model_sequential()

model %>%
  layer_dense(units= 32, activation = "relu", input_shape = ncol(Train_Features_Augmented)) %>%
  layer_dense(units= 32, activation = "relu") %>%
  layer_dense(units = 3, activation = "softmax")


summary(model)
## Model: "sequential_3"
## ___________________________________________________________________________
## Layer (type)                     Output Shape                  Param #     
## ===========================================================================
## dense_9 (Dense)                  (None, 32)                    160         
## ___________________________________________________________________________
## dense_10 (Dense)                 (None, 32)                    1056        
## ___________________________________________________________________________
## dense_11 (Dense)                 (None, 3)                     99          
## ===========================================================================
## Total params: 1,315
## Trainable params: 1,315
## Non-trainable params: 0
## ___________________________________________________________________________
model %>% compile(loss = "categorical_crossentropy",
                  optimizer = optimizer_adagrad(),
                  metrics = c('accuracy')
)

# history <- model %>% fit(Train_Features, Train_Labels,
# validation_split = 0.20,
# epochs=300, batch_size = 15,
# shuffle = TRUE)


model %>% evaluate(Test_Features,Test_Labels)
## $loss
## [1] 0.9784837
## 
## $acc
## [1] 0.3793103

Obteve-se uma acurácia de 0.62, obtendo conjuntamente com o modelo 1 (Aumentando o número de neurônios na camada de entrada), o melhor resultado.