knitr::opts_chunk$set(echo = TRUE)
options(repos = c(CRAN = "https://cloud.r-project.org/"))
library(neuralnet)
library(tidyverse)
## ── Attaching core tidyverse packages ──────────────────────── tidyverse 2.0.0 ──
## ✔ dplyr     1.1.4     ✔ readr     2.1.5
## ✔ forcats   1.0.0     ✔ stringr   1.5.1
## ✔ ggplot2   3.5.1     ✔ tibble    3.2.1
## ✔ lubridate 1.9.3     ✔ tidyr     1.3.1
## ✔ purrr     1.0.2     
## ── Conflicts ────────────────────────────────────────── tidyverse_conflicts() ──
## ✖ dplyr::compute() masks neuralnet::compute()
## ✖ dplyr::filter()  masks stats::filter()
## ✖ dplyr::lag()     masks stats::lag()
## ℹ Use the conflicted package (<http://conflicted.r-lib.org/>) to force all conflicts to become errors
library(GGally)
## Registered S3 method overwritten by 'GGally':
##   method from   
##   +.gg   ggplot2
# Generate training data
set.seed(123) # for reproducibility
traininginput <- as.data.frame(matrix(runif(100, min = 0, max = 100), ncol = 2))
trainingoutput <- sqrt(rowSums(traininginput))

# Combine input and output into a single dataframe
trainingdata <- cbind(traininginput, trainingoutput)
colnames(trainingdata) <- c("Input1", "Input2", "Output")

# Display the training data
print(trainingdata)
##       Input1      Input2    Output
## 1  28.757752  4.58311667  5.774155
## 2  78.830514 44.22000742 11.092814
## 3  40.897692 79.89248456 10.990458
## 4  88.301740 12.18992600 10.024553
## 5  94.046728 56.09479838 12.253225
## 6   4.555650 20.65313896  5.020835
## 7  52.810549 12.75316502  8.097142
## 8  89.241904 75.33078643 12.828589
## 9  55.143501 89.50453592 12.026971
## 10 45.661474 37.44627759  9.116345
## 11 95.683335 66.51151946 12.735574
## 12 45.333416  9.48406609  7.403883
## 13 67.757064 38.39696378 10.303108
## 14 57.263340 27.43836446  9.203353
## 15 10.292468 81.46400389  9.578960
## 16 89.982497 44.85163414 11.611810
## 17 24.608773 81.00643530 10.276926
## 18  4.205953 81.23895095  9.243641
## 19 32.792072 79.43423211 10.593692
## 20 95.450365 43.98316876 11.808198
## 21 88.953932 75.44751586 12.821913
## 22 69.280341 62.92211316 11.497933
## 23 64.050681 71.01824014 11.621916
## 24 99.426978  0.06247733  9.974440
## 25 65.570580 47.53165741 10.634954
## 26 70.853047 22.01188852  9.636645
## 27 54.406602 37.98165377  9.611881
## 28 59.414202 61.27710033 10.985959
## 29 28.915974 35.17979092  8.005983
## 30 14.711365 11.11354243  5.081821
## 31 96.302423 24.36194727 10.984734
## 32 90.229905 66.80555874 12.531379
## 33 69.070528 41.76467797 10.527830
## 34 79.546742 78.81958340 12.584368
## 35  2.461368 10.28646443  3.570411
## 36 47.779597 43.48927415  9.553474
## 37 75.845954 98.49569800 13.203850
## 38 21.640794 89.30511144 10.533086
## 39 31.818101 88.64690608 10.975655
## 40 23.162579 17.50526503  6.377134
## 41 14.280002 13.06956916  5.229682
## 42 41.454634 65.31019250 10.332707
## 43 41.372433 34.35164723  8.701958
## 44 36.884545 65.67581280 10.127209
## 45 15.244475 32.03732425  6.876176
## 46 13.880606 18.76911193  5.713993
## 47 23.303410 78.22943013 10.076351
## 48 46.596245  9.35949867  7.480357
## 49 26.597264 46.67790416  8.560092
## 50 85.782772 51.15054599 11.701851
# Define the hidden layers configuration
cat("Will train a NN with the following configuration of hidden layer(s)\n")
## Will train a NN with the following configuration of hidden layer(s)
netLayers <- c(3)
print(netLayers)
## [1] 3
# Train the neural network
net.sqrt <- neuralnet(Output ~ Input1 + Input2, 
                      data = trainingdata, 
                      hidden = netLayers, 
                      threshold = 0.01)

# Print the network results
cat("NN training results\n")
## NN training results
print(net.sqrt)
## $call
## neuralnet(formula = Output ~ Input1 + Input2, data = trainingdata, 
##     hidden = netLayers, threshold = 0.01)
## 
## $response
##       Output
## 1   5.774155
## 2  11.092814
## 3  10.990458
## 4  10.024553
## 5  12.253225
## 6   5.020835
## 7   8.097142
## 8  12.828589
## 9  12.026971
## 10  9.116345
## 11 12.735574
## 12  7.403883
## 13 10.303108
## 14  9.203353
## 15  9.578960
## 16 11.611810
## 17 10.276926
## 18  9.243641
## 19 10.593692
## 20 11.808198
## 21 12.821913
## 22 11.497933
## 23 11.621916
## 24  9.974440
## 25 10.634954
## 26  9.636645
## 27  9.611881
## 28 10.985959
## 29  8.005983
## 30  5.081821
## 31 10.984734
## 32 12.531379
## 33 10.527830
## 34 12.584368
## 35  3.570411
## 36  9.553474
## 37 13.203850
## 38 10.533086
## 39 10.975655
## 40  6.377134
## 41  5.229682
## 42 10.332707
## 43  8.701958
## 44 10.127209
## 45  6.876176
## 46  5.713993
## 47 10.076351
## 48  7.480357
## 49  8.560092
## 50 11.701851
## 
## $covariate
##          Input1      Input2
##  [1,] 28.757752  4.58311667
##  [2,] 78.830514 44.22000742
##  [3,] 40.897692 79.89248456
##  [4,] 88.301740 12.18992600
##  [5,] 94.046728 56.09479838
##  [6,]  4.555650 20.65313896
##  [7,] 52.810549 12.75316502
##  [8,] 89.241904 75.33078643
##  [9,] 55.143501 89.50453592
## [10,] 45.661474 37.44627759
## [11,] 95.683335 66.51151946
## [12,] 45.333416  9.48406609
## [13,] 67.757064 38.39696378
## [14,] 57.263340 27.43836446
## [15,] 10.292468 81.46400389
## [16,] 89.982497 44.85163414
## [17,] 24.608773 81.00643530
## [18,]  4.205953 81.23895095
## [19,] 32.792072 79.43423211
## [20,] 95.450365 43.98316876
## [21,] 88.953932 75.44751586
## [22,] 69.280341 62.92211316
## [23,] 64.050681 71.01824014
## [24,] 99.426978  0.06247733
## [25,] 65.570580 47.53165741
## [26,] 70.853047 22.01188852
## [27,] 54.406602 37.98165377
## [28,] 59.414202 61.27710033
## [29,] 28.915974 35.17979092
## [30,] 14.711365 11.11354243
## [31,] 96.302423 24.36194727
## [32,] 90.229905 66.80555874
## [33,] 69.070528 41.76467797
## [34,] 79.546742 78.81958340
## [35,]  2.461368 10.28646443
## [36,] 47.779597 43.48927415
## [37,] 75.845954 98.49569800
## [38,] 21.640794 89.30511144
## [39,] 31.818101 88.64690608
## [40,] 23.162579 17.50526503
## [41,] 14.280002 13.06956916
## [42,] 41.454634 65.31019250
## [43,] 41.372433 34.35164723
## [44,] 36.884545 65.67581280
## [45,] 15.244475 32.03732425
## [46,] 13.880606 18.76911193
## [47,] 23.303410 78.22943013
## [48,] 46.596245  9.35949867
## [49,] 26.597264 46.67790416
## [50,] 85.782772 51.15054599
## 
## $model.list
## $model.list$response
## [1] "Output"
## 
## $model.list$variables
## [1] "Input1" "Input2"
## 
## 
## $err.fct
## function (x, y) 
## {
##     1/2 * (y - x)^2
## }
## <bytecode: 0x113c8a2a0>
## <environment: 0x113c8caf0>
## attr(,"type")
## [1] "sse"
## 
## $act.fct
## function (x) 
## {
##     1/(1 + exp(-x))
## }
## <bytecode: 0x113c85ba0>
## <environment: 0x113c85270>
## attr(,"type")
## [1] "logistic"
## 
## $linear.output
## [1] TRUE
## 
## $data
##       Input1      Input2    Output
## 1  28.757752  4.58311667  5.774155
## 2  78.830514 44.22000742 11.092814
## 3  40.897692 79.89248456 10.990458
## 4  88.301740 12.18992600 10.024553
## 5  94.046728 56.09479838 12.253225
## 6   4.555650 20.65313896  5.020835
## 7  52.810549 12.75316502  8.097142
## 8  89.241904 75.33078643 12.828589
## 9  55.143501 89.50453592 12.026971
## 10 45.661474 37.44627759  9.116345
## 11 95.683335 66.51151946 12.735574
## 12 45.333416  9.48406609  7.403883
## 13 67.757064 38.39696378 10.303108
## 14 57.263340 27.43836446  9.203353
## 15 10.292468 81.46400389  9.578960
## 16 89.982497 44.85163414 11.611810
## 17 24.608773 81.00643530 10.276926
## 18  4.205953 81.23895095  9.243641
## 19 32.792072 79.43423211 10.593692
## 20 95.450365 43.98316876 11.808198
## 21 88.953932 75.44751586 12.821913
## 22 69.280341 62.92211316 11.497933
## 23 64.050681 71.01824014 11.621916
## 24 99.426978  0.06247733  9.974440
## 25 65.570580 47.53165741 10.634954
## 26 70.853047 22.01188852  9.636645
## 27 54.406602 37.98165377  9.611881
## 28 59.414202 61.27710033 10.985959
## 29 28.915974 35.17979092  8.005983
## 30 14.711365 11.11354243  5.081821
## 31 96.302423 24.36194727 10.984734
## 32 90.229905 66.80555874 12.531379
## 33 69.070528 41.76467797 10.527830
## 34 79.546742 78.81958340 12.584368
## 35  2.461368 10.28646443  3.570411
## 36 47.779597 43.48927415  9.553474
## 37 75.845954 98.49569800 13.203850
## 38 21.640794 89.30511144 10.533086
## 39 31.818101 88.64690608 10.975655
## 40 23.162579 17.50526503  6.377134
## 41 14.280002 13.06956916  5.229682
## 42 41.454634 65.31019250 10.332707
## 43 41.372433 34.35164723  8.701958
## 44 36.884545 65.67581280 10.127209
## 45 15.244475 32.03732425  6.876176
## 46 13.880606 18.76911193  5.713993
## 47 23.303410 78.22943013 10.076351
## 48 46.596245  9.35949867  7.480357
## 49 26.597264 46.67790416  8.560092
## 50 85.782772 51.15054599 11.701851
## 
## $exclude
## NULL
## 
## $net.result
## $net.result[[1]]
##            [,1]
##  [1,]  5.748322
##  [2,] 11.102411
##  [3,] 11.004885
##  [4,] 10.007254
##  [5,] 12.263155
##  [6,]  4.984282
##  [7,]  8.124886
##  [8,] 12.813639
##  [9,] 12.076767
## [10,]  9.096312
## [11,] 12.716401
## [12,]  7.447377
## [13,] 10.286677
## [14,]  9.184876
## [15,]  9.567167
## [16,] 11.626149
## [17,] 10.260850
## [18,]  9.248538
## [19,] 10.588492
## [20,] 11.815875
## [21,] 12.808168
## [22,] 11.533442
## [23,] 11.664829
## [24,]  9.962696
## [25,] 10.631140
## [26,]  9.616442
## [27,]  9.585174
## [28,] 11.001165
## [29,]  8.033233
## [30,]  5.027575
## [31,] 10.970698
## [32,] 12.538518
## [33,] 10.518657
## [34,] 12.604826
## [35,]  3.651414
## [36,]  9.526042
## [37,] 13.150569
## [38,] 10.522982
## [39,] 10.985190
## [40,]  6.397742
## [41,]  5.183073
## [42,] 10.318405
## [43,]  8.696345
## [44,] 10.106737
## [45,]  6.928958
## [46,]  5.700146
## [47,] 10.056632
## [48,]  7.523600
## [49,]  8.565485
## [50,] 11.725628
## 
## 
## $weights
## $weights[[1]]
## $weights[[1]][[1]]
##             [,1]        [,2]        [,3]
## [1,] -2.22478310 -1.97691781 -0.60221442
## [2,]  0.02636953  0.01384385  0.05208740
## [3,]  0.01698215  0.02287318  0.05283744
## 
## $weights[[1]][[2]]
##           [,1]
## [1,] -1.042224
## [2,]  4.704062
## [3,]  4.885698
## [4,]  6.524719
## 
## 
## 
## $generalized.weights
## $generalized.weights[[1]]
##                [,1]          [,2]
##  [1,] -0.0033904100 -0.0034099192
##  [2,] -0.0004115838 -0.0004197046
##  [3,] -0.0004354286 -0.0004323459
##  [4,] -0.0005489131 -0.0005455452
##  [5,] -0.0002725877 -0.0002888737
##  [6,] -0.0050728873 -0.0051910323
##  [7,] -0.0010335422 -0.0010172781
##  [8,] -0.0002187503 -0.0002304702
##  [9,] -0.0003111672 -0.0003088810
## [10,] -0.0007254386 -0.0007236057
## [11,] -0.0002252410 -0.0002406462
## [12,] -0.0014030672 -0.0013896329
## [13,] -0.0005215075 -0.0005223190
## [14,] -0.0007085484 -0.0007012051
## [15,] -0.0006136288 -0.0006246751
## [16,] -0.0003437473 -0.0003580167
## [17,] -0.0005237144 -0.0005237594
## [18,] -0.0006635982 -0.0006824349
## [19,] -0.0004849305 -0.0004827491
## [20,] -0.0003189746 -0.0003360227
## [21,] -0.0002194459 -0.0002310539
## [22,] -0.0003667276 -0.0003722922
## [23,] -0.0003540711 -0.0003572713
## [24,] -0.0005454804 -0.0005402298
## [25,] -0.0004767831 -0.0004794702
## [26,] -0.0006232040 -0.0006161410
## [27,] -0.0006319218 -0.0006291904
## [28,] -0.0004331138 -0.0004352242
## [29,] -0.0010524943 -0.0010631406
## [30,] -0.0049766179 -0.0050551450
## [31,] -0.0004136001 -0.0004257450
## [32,] -0.0002463382 -0.0002598826
## [33,] -0.0004897791 -0.0004923555
## [34,] -0.0002449045 -0.0002528033
## [35,] -0.0110335477 -0.0112674667
## [36,] -0.0006417363 -0.0006407833
## [37,] -0.0001902211 -0.0001934136
## [38,] -0.0004889522 -0.0004860236
## [39,] -0.0004368325 -0.0004314046
## [40,] -0.0023773446 -0.0024053526
## [41,] -0.0045684548 -0.0046437036
## [42,] -0.0005212977 -0.0005212967
## [43,] -0.0008234178 -0.0008226939
## [44,] -0.0005492323 -0.0005500683
## [45,] -0.0017800819 -0.0018184948
## [46,] -0.0034463944 -0.0035091372
## [47,] -0.0005505103 -0.0005526721
## [48,] -0.0013532352 -0.0013385658
## [49,] -0.0008512251 -0.0008627593
## [50,] -0.0003356613 -0.0003483321
## 
## 
## $startweights
## $startweights[[1]]
## $startweights[[1]][[1]]
##             [,1]      [,2]       [,3]
## [1,]  0.25331851  1.368602 -1.5487528
## [2,] -0.02854676 -0.225771  0.5846137
## [3,] -0.04287046  1.516471  0.1238542
## 
## $startweights[[1]][[2]]
##            [,1]
## [1,]  0.2159416
## [2,]  0.3796395
## [3,] -0.5023235
## [4,] -0.3332074
## 
## 
## 
## $result.matrix
##                                [,1]
## error                  1.941042e-02
## reached.threshold      8.618101e-03
## steps                  2.344300e+04
## Intercept.to.1layhid1 -2.224783e+00
## Input1.to.1layhid1     2.636953e-02
## Input2.to.1layhid1     1.698215e-02
## Intercept.to.1layhid2 -1.976918e+00
## Input1.to.1layhid2     1.384385e-02
## Input2.to.1layhid2     2.287318e-02
## Intercept.to.1layhid3 -6.022144e-01
## Input1.to.1layhid3     5.208740e-02
## Input2.to.1layhid3     5.283744e-02
## Intercept.to.Output   -1.042224e+00
## 1layhid1.to.Output     4.704062e+00
## 1layhid2.to.Output     4.885698e+00
## 1layhid3.to.Output     6.524719e+00
## 
## attr(,"class")
## [1] "nn"
# Plot the neural network
plot(net.sqrt)

The coding workflow includes setup and packaging. -Install and load the necessary software for neural networks, data management, and visualization. -To create training data, generate random inputs and then calculate the square root of the total of the inputs. -Prepare the data by combining input and output into a single dataframe and assigning appropriate column names. -Configure the Neural Network: Choose the hidden layer structure and number of neurons. -Train the Neural Network: Use input data to predict output, halting when the error reaches a certain level. -Printing network data and exhibiting its structure enables the presentation and visualization of results.