Funções auxiliares

# Bibliotecas
library(nnet)
library(plotly)
## Loading required package: ggplot2
## 
## Attaching package: 'plotly'
## The following object is masked from 'package:ggplot2':
## 
##     last_plot
## The following object is masked from 'package:stats':
## 
##     filter
## The following object is masked from 'package:graphics':
## 
##     layout
library(Metrics)
library(monmlp)
## Loading required package: optimx
# Função para iterar sob os dados e aplicar a função a ser aproximada
get_y_or_z <-  function(x1_list, x2_list, func) {

  index <- 1
  z_list <- c()

  for (x1 in x1_list){
    x2 <- x2_list[[index]]
    z <- func(x1,x2)
    z_list <- c(z_list,z)
    index <- index + 1
}
  return(z_list)
} 

# Separar dados em treino e teste
get_train_test <- function(df){
  smp_size <- floor(0.75 * nrow(df))
  
  ## set the seed to make your partition reproducible
  set.seed(123)
  train_ind <- sample(seq_len(nrow(df)), size = smp_size)
  
  train <- df[train_ind, ]
  test <- df[-train_ind, ]
  
  return(c(train,test))

}

5.5

Aproximar as funções a seguir usando MLP:

a) f(x_{1}, x_{2}) = max({e{-x_{1}{2}}, e{-2x_{2}{2}}, 2e{(-0.5(x_{1}{2} + x_{2}^{2}))}})

# Função para aproximação

fx <- function(x1, x2){
 return(max(c(exp(-(x1^2)), exp(-2 * (x2^2)), (2 * exp(-0.5 * ((x1^2) + (x2^2)))) ) ))
}

# Dataset de treino

x1 <- (runif(10000,-1,1))
x2 <- (runif(10000,-1,1))


grid <- data.frame(x1,x2)

train_test <- get_train_test(grid)
train <- data.frame(train_test[1],train_test[2]) 

y <- get_y_or_z(train$x1,train$x2,fx)


# Dataset de teste

test <- data.frame(train_test[3],train_test[4]) 

y_test <-  get_y_or_z(test$x1,test$x2,fx)


# Rede Neural
r <- monmlp.fit(as.matrix(train), as.matrix(y), hidden1=3, n.ensemble=15, monotone=0, bag=TRUE)
## ** Ensemble 1 
## ** Bagging on
## 0.00094201 
## ** 0.00094201 
## 
## ** Ensemble 2 
## ** Bagging on
## 0.0009050873 
## ** 0.0009050873 
## 
## ** Ensemble 3 
## ** Bagging on
## 0.0009104344 
## ** 0.0009104344 
## 
## ** Ensemble 4 
## ** Bagging on
## 0.000946098 
## ** 0.000946098 
## 
## ** Ensemble 5 
## ** Bagging on
## 0.000925214 
## ** 0.000925214 
## 
## ** Ensemble 6 
## ** Bagging on
## 0.0009400728 
## ** 0.0009400728 
## 
## ** Ensemble 7 
## ** Bagging on
## 0.0009355122 
## ** 0.0009355122 
## 
## ** Ensemble 8 
## ** Bagging on
## 0.0009206485 
## ** 0.0009206485 
## 
## ** Ensemble 9 
## ** Bagging on
## 0.0009484372 
## ** 0.0009484372 
## 
## ** Ensemble 10 
## ** Bagging on
## 0.0009421707 
## ** 0.0009421707 
## 
## ** Ensemble 11 
## ** Bagging on
## 0.0009163181 
## ** 0.0009163181 
## 
## ** Ensemble 12 
## ** Bagging on
## 0.0009485744 
## ** 0.0009485744 
## 
## ** Ensemble 13 
## ** Bagging on
## 0.0009434932 
## ** 0.0009434932 
## 
## ** Ensemble 14 
## ** Bagging on
## 0.0008986663 
## ** 0.0008986663 
## 
## ** Ensemble 15 
## ** Bagging on
## 0.0009367858 
## ** 0.0009367858
y_pred <- monmlp.predict(x = as.matrix(test), weights = r)


# dataset teste + pred
df <- data.frame(test$x1,test$x2, y_test, y_pred)

# Métricas
mse_metric <- mse(y_test,y_pred)
rmse_metric <- rmse(y_test, y_pred)
mae_metric <-mae(y_test, y_pred)

MSE

print(mse_metric)
## [1] 1.352159e-05

RMSE

print(rmse_metric)
## [1] 0.003677171

MAE

print(mae_metric)
## [1] 0.002965715

Gráfico da função x função aproximada

b) \(f(x,y) = 0.5 + 0.1x^{2}cos(y +3) + 0.4xye^{1-y^{2}}\)

# Função para aproximação

fx <-  function(x, y) {
  return(0.5 + 0.1 * ((x^2) * cos(y + 3)) + (0.4 * x * y* exp(1-(y^2))))
} 



# Dataset de treino

x1 <- (runif(10000,-1,1))
x2 <- (runif(10000,-1,1))


grid <- data.frame(x1,x2)

train_test <- get_train_test(grid)
train <- data.frame(train_test[1],train_test[2]) 

y <- get_y_or_z(train$x1,train$x2,fx)


# Dataset de teste

test <- data.frame(train_test[3],train_test[4]) 

y_test <-  get_y_or_z(test$x1,test$x2,fx)


# Rede Neural
r <- monmlp.fit(as.matrix(train), as.matrix(y), hidden1=10, n.ensemble=15, monotone=0, bag=TRUE)
## ** Ensemble 1 
## ** Bagging on
## 1.888923e-06 
## ** 1.888923e-06 
## 
## ** Ensemble 2 
## ** Bagging on
## 2.911593e-06 
## ** 2.911593e-06 
## 
## ** Ensemble 3 
## ** Bagging on
## 2.033747e-06 
## ** 2.033747e-06 
## 
## ** Ensemble 4 
## ** Bagging on
## 3.039769e-07 
## ** 3.039769e-07 
## 
## ** Ensemble 5 
## ** Bagging on
## 3.166534e-06 
## ** 3.166534e-06 
## 
## ** Ensemble 6 
## ** Bagging on
## 2.173655e-06 
## ** 2.173655e-06 
## 
## ** Ensemble 7 
## ** Bagging on
## 2.080309e-06 
## ** 2.080309e-06 
## 
## ** Ensemble 8 
## ** Bagging on
## 1.716308e-06 
## ** 1.716308e-06 
## 
## ** Ensemble 9 
## ** Bagging on
## 3.377854e-06 
## ** 3.377854e-06 
## 
## ** Ensemble 10 
## ** Bagging on
## 1.025009e-06 
## ** 1.025009e-06 
## 
## ** Ensemble 11 
## ** Bagging on
## 2.376187e-06 
## ** 2.376187e-06 
## 
## ** Ensemble 12 
## ** Bagging on
## 6.328533e-07 
## ** 6.328533e-07 
## 
## ** Ensemble 13 
## ** Bagging on
## 4.683839e-07 
## ** 4.683839e-07 
## 
## ** Ensemble 14 
## ** Bagging on
## 4.572277e-06 
## ** 4.572277e-06 
## 
## ** Ensemble 15 
## ** Bagging on
## 1.601989e-06 
## ** 1.601989e-06
y_pred <- monmlp.predict(x = as.matrix(test), weights = r)


# dataset teste + pred
df <- data.frame(test$x1,test$x2, y_test, y_pred)


# Métricas
mse_metric <- mse(y_test,y_pred)
rmse_metric <- rmse(y_test, y_pred)
mae_metric <-mae(y_test, y_pred)

MSE

print(mse_metric)
## [1] 3.221427e-08

RMSE

print(rmse_metric)
## [1] 0.0001794833

MAE

print(mae_metric)
## [1] 0.0001414735

Gráfico da função x função aproximada

c) \(f(x_{1}, x_{2}) = \frac{sin(\sqrt{x_{1}^{2} + x_{2}^{2}})}{ \sqrt{x_{1}^{2} + x_{2}^{2}}}\)

# Função para aproximação

fx <-  function(x1, x2) {
  return((sin(sqrt((x1^2 ) + (x2^2)))) / (sqrt((x1^2 ) + (x2^2))))
} 


# Dataset de treino

x1 <- runif(10000,-5,5)
x2 <- runif(10000,-5,5)

grid <- data.frame(x1,x2)

train_test <- get_train_test(grid)
train <- data.frame(train_test[1],train_test[2]) 

y <- get_y_or_z(train$x1,train$x2,fx)


# Dataset de teste

test <- data.frame(train_test[3],train_test[4]) 

y_test <-  get_y_or_z(test$x1,test$x2,fx)


# Rede Neural
r <- monmlp.fit(as.matrix(train), as.matrix(y), hidden1=5, n.ensemble=15, monotone=0, bag=TRUE)
## ** Ensemble 1 
## ** Bagging on
## 0.07951542 
## ** 0.07951542 
## 
## ** Ensemble 2 
## ** Bagging on
## 0.07044224 
## ** 0.07044224 
## 
## ** Ensemble 3 
## ** Bagging on
## 0.1421439 
## ** 0.1421439 
## 
## ** Ensemble 4 
## ** Bagging on
## 0.07725459 
## ** 0.07725459 
## 
## ** Ensemble 5 
## ** Bagging on
## 0.07980558 
## ** 0.07980558 
## 
## ** Ensemble 6 
## ** Bagging on
## 0.09835959 
## ** 0.09835959 
## 
## ** Ensemble 7 
## ** Bagging on
## 0.06850995 
## ** 0.06850995 
## 
## ** Ensemble 8 
## ** Bagging on
## 0.07197521 
## ** 0.07197521 
## 
## ** Ensemble 9 
## ** Bagging on
## 0.08922837 
## ** 0.08922837 
## 
## ** Ensemble 10 
## ** Bagging on
## 0.09030784 
## ** 0.09030784 
## 
## ** Ensemble 11 
## ** Bagging on
## 0.08007851 
## ** 0.08007851 
## 
## ** Ensemble 12 
## ** Bagging on
## 0.1447431 
## ** 0.1447431 
## 
## ** Ensemble 13 
## ** Bagging on
## 0.136188 
## ** 0.136188 
## 
## ** Ensemble 14 
## ** Bagging on
## 0.06876578 
## ** 0.06876578 
## 
## ** Ensemble 15 
## ** Bagging on
## 0.1426282 
## ** 0.1426282
y_pred <- monmlp.predict(x = as.matrix(test), weights = r)


# dataset teste + pred
df <- data.frame(test$x1,test$x2, y_test, y_pred)

# Métricas
mse_metric <- mse(y_test, y_pred)
rmse_metric <- rmse(y_test, y_pred)
mae_metric <-mae(y_test, y_pred)

MSE

print(mse_metric)
## [1] 0.004707768

RMSE

print(rmse_metric)
## [1] 0.06861318

MAE

print(mae_metric)
## [1] 0.05651384

Gráfico da função x função aproximada

d) \(f(x) = \sqrt{2}sin(x) + \sqrt{2}cos(x) - \sqrt{2}sin(3x) + \sqrt{2}cos(3x)\)

# Função para aproximação

fx <-  function(x_list) {
  
  z_list <- c()

  for (x in x_list){
    z <- sqrt(2) * sin(x) + sqrt(2) * cos(x) - sqrt(2) * sin(3*x) +  sqrt(2) * cos(3*x) 
    z_list <- c(z_list,z)
}
  return(z_list)
} 


# Dataset de treino

x1 <- (runif(10000,-1,1))
df <- data.frame(x1)

smp_size <- floor(0.75 * nrow(df))

## set the seed to make your partition reproducible
set.seed(123)
train_ind <- sample(seq_len(nrow(df)), size = smp_size)

train <- df[train_ind, ]
test <- df[-train_ind, ]

x1 <- train
y <- fx(x1)

train <- data.frame(x1)


# Dataset de teste

x11 <- test

test <-  data.frame(x11)
y_test <- fx(x11)


# Rede Neural 

r <- monmlp.fit(as.matrix(train), as.matrix(y), hidden1=10, n.ensemble=15, monotone=0, bag=TRUE)
## ** Ensemble 1 
## ** Bagging on
## 1.532327e-07 
## ** 1.532327e-07 
## 
## ** Ensemble 2 
## ** Bagging on
## 7.452294e-08 
## ** 7.452294e-08 
## 
## ** Ensemble 3 
## ** Bagging on
## 1.208284e-08 
## ** 1.208284e-08 
## 
## ** Ensemble 4 
## ** Bagging on
## 2.903615e-07 
## ** 2.903615e-07 
## 
## ** Ensemble 5 
## ** Bagging on
## 3.589163e-08 
## ** 3.589163e-08 
## 
## ** Ensemble 6 
## ** Bagging on
## 6.342113e-08 
## ** 6.342113e-08 
## 
## ** Ensemble 7 
## ** Bagging on
## 6.264442e-09 
## ** 6.264442e-09 
## 
## ** Ensemble 8 
## ** Bagging on
## 1.333688e-07 
## ** 1.333688e-07 
## 
## ** Ensemble 9 
## ** Bagging on
## 6.124036e-08 
## ** 6.124036e-08 
## 
## ** Ensemble 10 
## ** Bagging on
## 7.993191e-08 
## ** 7.993191e-08 
## 
## ** Ensemble 11 
## ** Bagging on
## 1.118193e-06 
## ** 1.118193e-06 
## 
## ** Ensemble 12 
## ** Bagging on
## 3.385408e-08 
## ** 3.385408e-08 
## 
## ** Ensemble 13 
## ** Bagging on
## 6.91946e-08 
## ** 6.91946e-08 
## 
## ** Ensemble 14 
## ** Bagging on
## 1.297363e-07 
## ** 1.297363e-07 
## 
## ** Ensemble 15 
## ** Bagging on
## 2.146825e-08 
## ** 2.146825e-08
y_pred <- monmlp.predict(x = as.matrix(test), weights = r)

# Dataset teste + pred
df <- data.frame(x11, y_test, y_pred)


# Métricas
mse_metric <- mse(y_test, y_pred)
rmse_metric <- rmse(y_test, y_pred)
mae_metric <-mae(y_test, y_pred)

MSE

print(mse_metric)
## [1] 1.062703e-07

RMSE

print(rmse_metric)
## [1] 0.0003259913

MAE

print(mae_metric)
## [1] 0.0001834614

Gráfico da função x função aproximada