Funções auxiliares

# Bibliotecas
library(nnet)
library(plotly)
## Loading required package: ggplot2
## 
## Attaching package: 'plotly'
## The following object is masked from 'package:ggplot2':
## 
##     last_plot
## The following object is masked from 'package:stats':
## 
##     filter
## The following object is masked from 'package:graphics':
## 
##     layout
library(Metrics)
library(monmlp)
## Loading required package: optimx
# Função para iterar sob os dados e aplicar a função a ser aproximada
get_y_or_z <-  function(x1_list, x2_list, func) {

  index <- 1
  z_list <- c()

  for (x1 in x1_list){
    x2 <- x2_list[[index]]
    z <- func(x1,x2)
    z_list <- c(z_list,z)
    index <- index + 1
}
  return(z_list)
} 

# Separar dados em treino e teste
get_train_test <- function(df){
  smp_size <- floor(0.75 * nrow(df))
  
  ## set the seed to make your partition reproducible
  set.seed(123)
  train_ind <- sample(seq_len(nrow(df)), size = smp_size)
  
  train <- df[train_ind, ]
  test <- df[-train_ind, ]
  
  return(c(train,test))

}

5.5

Aproximar as funções a seguir usando MLP:

a) \(f(x_{1}, x_{2}) = max\left{e^{-x_{1}^{2}}, e^{-2x_{2}^{2}, 2e^{(-0.5(x_{1}^{2} + x_{2}^{2}))}\right }\)

# Função para aproximação

fx <- function(x1, x2){
 return(max(c(exp(-(x1^2)), exp(-2 * (x2^2)), (2 * exp(-0.5 * ((x1^2) + (x2^2)))) ) ))
}

# Dataset de treino

x1 <- (runif(10000,-30,30))
x2 <- (runif(10000,-30,30))


grid <- data.frame(x1,x2)

train_test <- get_train_test(grid)
train <- data.frame(train_test[1],train_test[2]) 

y <- get_y_or_z(train$x1,train$x2,fx)


# Dataset de teste

test <- data.frame(train_test[3],train_test[4]) 

y_test <-  get_y_or_z(test$x1,test$x2,fx)


# Rede Neural
r <- monmlp.fit(as.matrix(train), as.matrix(y), hidden1=3, n.ensemble=15, monotone=0, bag=TRUE)
## ** Ensemble 1 
## ** Bagging on
## 0.4140654 
## ** 0.4140654 
## 
## ** Ensemble 2 
## ** Bagging on
## 0.4193926 
## ** 0.4193926 
## 
## ** Ensemble 3 
## ** Bagging on
## 0.4939935 
## ** 0.4939935 
## 
## ** Ensemble 4 
## ** Bagging on
## 0.4653441 
## ** 0.4653441 
## 
## ** Ensemble 5 
## ** Bagging on
## 0.4190155 
## ** 0.4190155 
## 
## ** Ensemble 6 
## ** Bagging on
## 0.5208676 
## ** 0.5208676 
## 
## ** Ensemble 7 
## ** Bagging on
## 0.8731387 
## ** 0.8731387 
## 
## ** Ensemble 8 
## ** Bagging on
## 0.5109987 
## ** 0.5109987 
## 
## ** Ensemble 9 
## ** Bagging on
## 0.4723719 
## ** 0.4723719 
## 
## ** Ensemble 10 
## ** Bagging on
## 0.5315665 
## ** 0.5315665 
## 
## ** Ensemble 11 
## ** Bagging on
## 0.5211998 
## ** 0.5211998 
## 
## ** Ensemble 12 
## ** Bagging on
## 0.4011905 
## ** 0.4011905 
## 
## ** Ensemble 13 
## ** Bagging on
## 0.3982484 
## ** 0.3982484 
## 
## ** Ensemble 14 
## ** Bagging on
## 0.4322943 
## ** 0.4322943 
## 
## ** Ensemble 15 
## ** Bagging on
## 0.5074687 
## ** 0.5074687
y_pred <- monmlp.predict(x = as.matrix(test), weights = r)


# dataset teste + pred
df <- data.frame(test$x1,test$x2, y_test, y_pred)

# Métricas
mse_metric <- mse(y_test,y_pred)
rmse_metric <- rmse(y_test, y_pred)
mae_metric <-mae(y_test, y_pred)

MSE

print(mse_metric)
## [1] 0.007617217

RMSE

print(rmse_metric)
## [1] 0.08727667

MAE

print(mae_metric)
## [1] 0.04117252

Gráfico da função x função aproximada

b) \(f(x,y) = 0.5 + 0.1x^{2}cos(y +3) + 0.4xye^{1-y^{2}}\)

# Função para aproximação

fx <-  function(x, y) {
  return(0.5 + 0.1 * ((x^2) * cos(y + 3)) + (0.4 * x * y* exp(1-(y^2))))
} 



# Dataset de treino

x1 <- (runif(10000,-30,30))
x2 <- (runif(10000,-30,30))


grid <- data.frame(x1,x2)

train_test <- get_train_test(grid)
train <- data.frame(train_test[1],train_test[2]) 

y <- get_y_or_z(train$x1,train$x2,fx)


# Dataset de teste

test <- data.frame(train_test[3],train_test[4]) 

y_test <-  get_y_or_z(test$x1,test$x2,fx)


# Rede Neural
r <- monmlp.fit(as.matrix(train), as.matrix(y), hidden1=25, n.ensemble=35, monotone=0, bag=TRUE)
## ** Ensemble 1 
## ** Bagging on
## 0.2401277 
## ** 0.2401277 
## 
## ** Ensemble 2 
## ** Bagging on
## 0.4647629 
## ** 0.4647629 
## 
## ** Ensemble 3 
## ** Bagging on
## 0.4765848 
## ** 0.4765848 
## 
## ** Ensemble 4 
## ** Bagging on
## 0.3021106 
## ** 0.3021106 
## 
## ** Ensemble 5 
## ** Bagging on
## 0.2416921 
## ** 0.2416921 
## 
## ** Ensemble 6 
## ** Bagging on
## 0.3225883 
## ** 0.3225883 
## 
## ** Ensemble 7 
## ** Bagging on
## 0.3887273 
## ** 0.3887273 
## 
## ** Ensemble 8 
## ** Bagging on
## 0.3926387 
## ** 0.3926387 
## 
## ** Ensemble 9 
## ** Bagging on
## 0.4086138 
## ** 0.4086138 
## 
## ** Ensemble 10 
## ** Bagging on
## 0.7519869 
## ** 0.7519869 
## 
## ** Ensemble 11 
## ** Bagging on
## 0.5007939 
## ** 0.5007939 
## 
## ** Ensemble 12 
## ** Bagging on
## 0.2708671 
## ** 0.2708671 
## 
## ** Ensemble 13 
## ** Bagging on
## 0.3637553 
## ** 0.3637553 
## 
## ** Ensemble 14 
## ** Bagging on
## 0.3162258 
## ** 0.3162258 
## 
## ** Ensemble 15 
## ** Bagging on
## 0.3683496 
## ** 0.3683496 
## 
## ** Ensemble 16 
## ** Bagging on
## 0.3724145 
## ** 0.3724145 
## 
## ** Ensemble 17 
## ** Bagging on
## 0.4223674 
## ** 0.4223674 
## 
## ** Ensemble 18 
## ** Bagging on
## 0.525903 
## ** 0.525903 
## 
## ** Ensemble 19 
## ** Bagging on
## 0.4317585 
## ** 0.4317585 
## 
## ** Ensemble 20 
## ** Bagging on
## 0.3090496 
## ** 0.3090496 
## 
## ** Ensemble 21 
## ** Bagging on
## 0.4350019 
## ** 0.4350019 
## 
## ** Ensemble 22 
## ** Bagging on
## 0.3139113 
## ** 0.3139113 
## 
## ** Ensemble 23 
## ** Bagging on
## 0.3010206 
## ** 0.3010206 
## 
## ** Ensemble 24 
## ** Bagging on
## 0.5003427 
## ** 0.5003427 
## 
## ** Ensemble 25 
## ** Bagging on
## 0.5441884 
## ** 0.5441884 
## 
## ** Ensemble 26 
## ** Bagging on
## 0.6819372 
## ** 0.6819372 
## 
## ** Ensemble 27 
## ** Bagging on
## 0.4669044 
## ** 0.4669044 
## 
## ** Ensemble 28 
## ** Bagging on
## 0.1960592 
## ** 0.1960592 
## 
## ** Ensemble 29 
## ** Bagging on
## 0.5965426 
## ** 0.5965426 
## 
## ** Ensemble 30 
## ** Bagging on
## 0.3940054 
## ** 0.3940054 
## 
## ** Ensemble 31 
## ** Bagging on
## 0.1643108 
## ** 0.1643108 
## 
## ** Ensemble 32 
## ** Bagging on
## 0.4612124 
## ** 0.4612124 
## 
## ** Ensemble 33 
## ** Bagging on
## 0.4260376 
## ** 0.4260376 
## 
## ** Ensemble 34 
## ** Bagging on
## 0.4587179 
## ** 0.4587179 
## 
## ** Ensemble 35 
## ** Bagging on
## 0.2016256 
## ** 0.2016256
y_pred <- monmlp.predict(x = as.matrix(test), weights = r)


# dataset teste + pred
df <- data.frame(test$x1,test$x2, y_test, y_pred)


# Métricas
mse_metric <- mse(y_test,y_pred)
rmse_metric <- rmse(y_test, y_pred)
mae_metric <-mae(y_test, y_pred)

MSE

print(mse_metric)
## [1] 206.0578

RMSE

print(rmse_metric)
## [1] 14.35471

MAE

print(mae_metric)
## [1] 9.73723

Gráfico da função x função aproximada

c) \(f(x_{1}, x_{2}) = \frac{sin(\sqrt{x_{1}^{2} + x_{2}^{2}})}{ \sqrt{x_{1}^{2} + x_{2}^{2}}}\)

# Função para aproximação

fx <-  function(x1, x2) {
  return((sin(sqrt((x1^2 ) + (x2^2)))) / (sqrt((x1^2 ) + (x2^2))))
} 


# Dataset de treino

x1 <- runif(10000,-5,5)
x2 <- runif(10000,-5,5)

grid <- data.frame(x1,x2)

train_test <- get_train_test(grid)
train <- data.frame(train_test[1],train_test[2]) 

y <- get_y_or_z(train$x1,train$x2,fx)


# Dataset de teste

test <- data.frame(train_test[3],train_test[4]) 

y_test <-  get_y_or_z(test$x1,test$x2,fx)


# Rede Neural
r <- monmlp.fit(as.matrix(train), as.matrix(y), hidden1=5, n.ensemble=20, monotone=0, bag=TRUE)
## ** Ensemble 1 
## ** Bagging on
## 0.07864659 
## ** 0.07864659 
## 
## ** Ensemble 2 
## ** Bagging on
## 0.09843363 
## ** 0.09843363 
## 
## ** Ensemble 3 
## ** Bagging on
## 0.1367151 
## ** 0.1367151 
## 
## ** Ensemble 4 
## ** Bagging on
## 0.09110765 
## ** 0.09110765 
## 
## ** Ensemble 5 
## ** Bagging on
## 0.07784006 
## ** 0.07784006 
## 
## ** Ensemble 6 
## ** Bagging on
## 0.09197364 
## ** 0.09197364 
## 
## ** Ensemble 7 
## ** Bagging on
## 0.06846986 
## ** 0.06846986 
## 
## ** Ensemble 8 
## ** Bagging on
## 0.09484885 
## ** 0.09484885 
## 
## ** Ensemble 9 
## ** Bagging on
## 0.1119614 
## ** 0.1119614 
## 
## ** Ensemble 10 
## ** Bagging on
## 0.08923574 
## ** 0.08923574 
## 
## ** Ensemble 11 
## ** Bagging on
## 0.1262062 
## ** 0.1262062 
## 
## ** Ensemble 12 
## ** Bagging on
## 0.1108534 
## ** 0.1108534 
## 
## ** Ensemble 13 
## ** Bagging on
## 0.08026694 
## ** 0.08026694 
## 
## ** Ensemble 14 
## ** Bagging on
## 0.1106713 
## ** 0.1106713 
## 
## ** Ensemble 15 
## ** Bagging on
## 0.07503359 
## ** 0.07503359 
## 
## ** Ensemble 16 
## ** Bagging on
## 0.09097546 
## ** 0.09097546 
## 
## ** Ensemble 17 
## ** Bagging on
## 0.07838048 
## ** 0.07838048 
## 
## ** Ensemble 18 
## ** Bagging on
## 0.08707131 
## ** 0.08707131 
## 
## ** Ensemble 19 
## ** Bagging on
## 0.07080235 
## ** 0.07080235 
## 
## ** Ensemble 20 
## ** Bagging on
## 0.06872177 
## ** 0.06872177
y_pred <- monmlp.predict(x = as.matrix(test), weights = r)


# dataset teste + pred
df <- data.frame(test$x1,test$x2, y_test, y_pred)

# Métricas
mse_metric <- mse(y_test, y_pred)
rmse_metric <- rmse(y_test, y_pred)
mae_metric <-mae(y_test, y_pred)

MSE

print(mse_metric)
## [1] 0.004781062

RMSE

print(rmse_metric)
## [1] 0.06914522

MAE

print(mae_metric)
## [1] 0.05702388

Gráfico da função x função aproximada

d) \(f(x) = \sqrt{2}sin(x) + \sqrt{2}cos(x) - \sqrt{2}sin(3x) + \sqrt{2}cos(3x)\)

# Função para aproximação

fx <-  function(x_list) {
  
  z_list <- c()

  for (x in x_list){
    z <- sqrt(2) * sin(x) + sqrt(2) * cos(x) - sqrt(2) * sin(3*x) +  sqrt(2) * cos(3*x) 
    z_list <- c(z_list,z)
}
  return(z_list)
} 


# Dataset de treino

x1 <- (runif(10000,-30,30))
df <- data.frame(x1)

smp_size <- floor(0.75 * nrow(df))

## set the seed to make your partition reproducible
set.seed(123)
train_ind <- sample(seq_len(nrow(df)), size = smp_size)

train <- df[train_ind, ]
test <- df[-train_ind, ]

x1 <- train
y <- fx(x1)

train <- data.frame(x1)


# Dataset de teste

x11 <- test

test <-  data.frame(x11)
y_test <- fx(x11)


# Rede Neural 

r <- monmlp.fit(as.matrix(train), as.matrix(y), hidden1=15, n.ensemble=25, monotone=0, bag=TRUE)
## ** Ensemble 1 
## ** Bagging on
## 0.5330431 
## ** 0.5330431 
## 
## ** Ensemble 2 
## ** Bagging on
## 0.5065573 
## ** 0.5065573 
## 
## ** Ensemble 3 
## ** Bagging on
## 0.9995131 
## ** 0.9995131 
## 
## ** Ensemble 4 
## ** Bagging on
## 0.4760191 
## ** 0.4760191 
## 
## ** Ensemble 5 
## ** Bagging on
## 0.9844741 
## ** 0.9844741 
## 
## ** Ensemble 6 
## ** Bagging on
## 0.5915894 
## ** 0.5915894 
## 
## ** Ensemble 7 
## ** Bagging on
## 0.7420523 
## ** 0.7420523 
## 
## ** Ensemble 8 
## ** Bagging on
## 0.7029739 
## ** 0.7029739 
## 
## ** Ensemble 9 
## ** Bagging on
## 0.9914983 
## ** 0.9914983 
## 
## ** Ensemble 10 
## ** Bagging on
## 0.5590313 
## ** 0.5590313 
## 
## ** Ensemble 11 
## ** Bagging on
## 0.5554233 
## ** 0.5554233 
## 
## ** Ensemble 12 
## ** Bagging on
## 1.00924 
## ** 1.00924 
## 
## ** Ensemble 13 
## ** Bagging on
## 1.015936 
## ** 1.015936 
## 
## ** Ensemble 14 
## ** Bagging on
## 0.9849611 
## ** 0.9849611 
## 
## ** Ensemble 15 
## ** Bagging on
## 0.9966364 
## ** 0.9966364 
## 
## ** Ensemble 16 
## ** Bagging on
## 0.703071 
## ** 0.703071 
## 
## ** Ensemble 17 
## ** Bagging on
## 0.5370529 
## ** 0.5370529 
## 
## ** Ensemble 18 
## ** Bagging on
## 0.6466607 
## ** 0.6466607 
## 
## ** Ensemble 19 
## ** Bagging on
## 0.6088507 
## ** 0.6088507 
## 
## ** Ensemble 20 
## ** Bagging on
## 0.7741149 
## ** 0.7741149 
## 
## ** Ensemble 21 
## ** Bagging on
## 0.9965466 
## ** 0.9965466 
## 
## ** Ensemble 22 
## ** Bagging on
## 0.5655937 
## ** 0.5655937 
## 
## ** Ensemble 23 
## ** Bagging on
## 0.9965974 
## ** 0.9965974 
## 
## ** Ensemble 24 
## ** Bagging on
## 0.574451 
## ** 0.574451 
## 
## ** Ensemble 25 
## ** Bagging on
## 0.5282978 
## ** 0.5282978
y_pred <- monmlp.predict(x = as.matrix(test), weights = r)

# Dataset teste + pred
df <- data.frame(x11, y_test, y_pred)


# Métricas
mse_metric <- mse(y_test, y_pred)
rmse_metric <- rmse(y_test, y_pred)
mae_metric <-mae(y_test, y_pred)

MSE

print(mse_metric)
## [1] 2.50044

RMSE

print(rmse_metric)
## [1] 1.581278

MAE

print(mae_metric)
## [1] 1.354107

Gráfico da função x função aproximada