load library start h2o
library(h2o)
h2o.init()
H2O is not running yet, starting it now...
Note: In case of errors look at the following log files:
C:\Users\r631758\AppData\Local\Temp\1\Rtmp4y1xDl/h2o_r631758_started_from_r.out
C:\Users\r631758\AppData\Local\Temp\1\Rtmp4y1xDl/h2o_r631758_started_from_r.err
java version "1.8.0_144"
Java(TM) SE Runtime Environment (build 1.8.0_144-b01)
Java HotSpot(TM) 64-Bit Server VM (build 25.144-b01, mixed mode)
Starting H2O JVM and connecting: . Connection successful!
R is connected to the H2O cluster:
H2O cluster uptime: 1 seconds 897 milliseconds
H2O cluster version: 3.14.0.3
H2O cluster version age: 13 days
H2O cluster name: H2O_started_from_R_r631758_mjf733
H2O cluster total nodes: 1
H2O cluster total memory: 3.48 GB
H2O cluster total cores: 8
H2O cluster allowed cores: 8
H2O cluster healthy: TRUE
H2O Connection ip: localhost
H2O Connection port: 54321
H2O Connection proxy: NA
H2O Internal Security: FALSE
H2O API Extensions: Algos, AutoML, Core V3, Core V4
R Version: R version 3.4.2 (2017-09-28)
h2o.removeAll()
[1] 0
demo(h2o.deeplearning)
demo(h2o.deeplearning)
---- ~~~~~~~~~~~~~~~~
> # This is a demo of H2O's Deep Learning function
> # It imports a data set, parses it, and prints a summary
> # Then, it runs Deep Learning on the dataset
> # Note: This demo runs H2O on localhost:54321
> library(h2o)
> h2o.init()
Connection successful!
R is connected to the H2O cluster:
H2O cluster uptime: 1 minutes 58 seconds
H2O cluster version: 3.14.0.3
H2O cluster version age: 12 days
H2O cluster name: H2O_started_from_R_r631758_bqi699
H2O cluster total nodes: 1
H2O cluster total memory: 3.46 GB
H2O cluster total cores: 8
H2O cluster allowed cores: 8
H2O cluster healthy: TRUE
H2O Connection ip: localhost
H2O Connection port: 54321
H2O Connection proxy: NA
H2O Internal Security: FALSE
H2O API Extensions: Algos, AutoML, Core V3, Core V4
R Version: R version 3.4.2 (2017-09-28)
> prostate.hex = h2o.uploadFile(path = system.file("extdata", "prostate.csv", package="h2o"), destination_frame = "prostate.hex")
|
| | 0%
|
|======================================================================================| 100%
> summary(prostate.hex)
Approximated quantiles computed! If you are interested in exact quantiles, please pass the `exact_quantiles=TRUE` parameter.
ID CAPSULE AGE RACE DPROS
Min. : 1.00 Min. :0.0000 Min. :43.00 Min. :0.000 Min. :1.000
1st Qu.: 95.75 1st Qu.:0.0000 1st Qu.:62.00 1st Qu.:1.000 1st Qu.:1.000
Median :190.50 Median :0.0000 Median :67.00 Median :1.000 Median :2.000
Mean :190.50 Mean :0.4026 Mean :66.04 Mean :1.087 Mean :2.271
3rd Qu.:285.25 3rd Qu.:1.0000 3rd Qu.:71.00 3rd Qu.:1.000 3rd Qu.:3.000
Max. :380.00 Max. :1.0000 Max. :79.00 Max. :2.000 Max. :4.000
DCAPS PSA VOL GLEASON
Min. :1.000 Min. : 0.300 Min. : 0.00 Min. :0.000
1st Qu.:1.000 1st Qu.: 4.900 1st Qu.: 0.00 1st Qu.:6.000
Median :1.000 Median : 8.664 Median :14.20 Median :6.000
Mean :1.108 Mean : 15.409 Mean :15.81 Mean :6.384
3rd Qu.:1.000 3rd Qu.: 17.063 3rd Qu.:26.40 3rd Qu.:7.000
Max. :2.000 Max. :139.700 Max. :97.60 Max. :9.000
> # Set the CAPSULE column to be a factor column then build model.
> prostate.hex$CAPSULE = as.factor(prostate.hex$CAPSULE)
> model = h2o.deeplearning(x = setdiff(colnames(prostate.hex), c("ID","CAPSULE")), y = "CAPSULE", training_frame = prostate.hex, activation = "Tanh", hidden = c(10, 10, 10), epochs = 10000)
|
| | 0%
|
|= | 1%
|
|========= | 10%
|
|================== | 21%
|
|=========================== | 31%
|
|=================================== | 40%
|
|=========================================== | 50%
|
|=================================================== | 60%
|
|=========================================================== | 68%
|
|================================================================== | 77%
|
|========================================================================= | 85%
|
|================================================================================= | 94%
|
|======================================================================================| 100%
> print(model@model$model_summary)
Status of Neuron Layers: predicting CAPSULE, 2-class classification, bernoulli distribution, CrossEntropy loss, 322 weights/biases, 8.5 KB, 3,800,000 training samples, mini-batch size 1
layer units type dropout l1 l2 mean_rate rate_rms momentum mean_weight
1 1 7 Input 0.00 %
2 2 10 Tanh 0.00 % 0.000000 0.000000 0.011487 0.023725 0.000000 0.094133
3 3 10 Tanh 0.00 % 0.000000 0.000000 0.015676 0.020728 0.000000 -0.055270
4 4 10 Tanh 0.00 % 0.000000 0.000000 0.051680 0.071301 0.000000 0.090554
5 5 2 Softmax 0.000000 0.000000 0.005545 0.000859 0.000000 0.036777
weight_rms mean_bias bias_rms
1
2 1.525877 -0.387521 0.686546
3 1.536213 0.432871 1.124933
4 1.830349 0.431010 1.177110
5 3.853290 -0.109997 0.315070
> # Make predictions with the trained model with training data.
> predictions = predict(object = model, newdata = prostate.hex)
|
| | 0%
|
|======================================================================================| 100%
> # Export predictions from H2O Cluster as R dataframe.
> predictions.R = as.data.frame(predictions)
> head(predictions.R)
predict p0 p1
1 0 9.994378e-01 5.621654e-04
2 0 9.999996e-01 3.673545e-07
3 0 1.000000e+00 1.174420e-18
4 0 9.995415e-01 4.584792e-04
5 0 9.988428e-01 1.157197e-03
6 1 1.905897e-06 9.999981e-01
> tail(predictions.R)
predict p0 p1
375 0 9.996771e-01 3.228955e-04
376 0 1.000000e+00 2.526000e-14
377 0 1.000000e+00 2.472152e-19
378 1 2.678242e-09 1.000000e+00
379 0 1.000000e+00 5.539717e-19
380 0 9.999997e-01 3.255905e-07
> # Check performance of classification model.
> performance = h2o.performance(model = model)
> print(performance)
H2OBinomialMetrics: deeplearning
** Reported on training data. **
** Metrics reported on full training frame **
MSE: 0.0105719
RMSE: 0.1028198
LogLoss: 0.03906477
Mean Per-Class Error: 0.009875903
AUC: 0.9991362
Gini: 0.9982724
Confusion Matrix (vertical: actual; across: predicted) for F1-optimal threshold:
0 1 Error Rate
0 224 3 0.013216 =3/227
1 1 152 0.006536 =1/153
Totals 225 155 0.010526 =4/380
Maximum Metrics: Maximum metrics at their respective thresholds
metric threshold value idx
1 max f1 0.140100 0.987013 114
2 max f2 0.140100 0.990874 114
3 max f0point5 0.941564 0.993289 107
4 max accuracy 0.876176 0.989474 110
5 max precision 1.000000 1.000000 0
6 max recall 0.004372 1.000000 134
7 max specificity 1.000000 1.000000 0
8 max absolute_mcc 0.140100 0.978222 114
9 max min_per_class_accuracy 0.675828 0.986784 113
10 max mean_per_class_accuracy 0.140100 0.990124 114
Gains/Lift Table: Extract with `h2o.gainsLift(<model>, <data>)` or `h2o.gainsLift(<model>, valid=<T/F>, xval=<T/F>)`
load sample data
grid<-h2o.importFile(path="Z:\\HealthCare Informatics\\r631758\\R codes\\H2O\\exercise\\grid.csv")
|
| | 0%
|
|======================================================================================| 100%
Define helper to plot contours

dev.new(noRStudioGD=FALSE) #direct plotting output to a new window
par(mfrow=c(2,2)) #set up the canvas for 2x2 plots
plotC( "DL", h2o.deeplearning(1:2,3,spiral,epochs=1e3))
|
| | 0%
|
|=== | 3%
|
|============ | 14%
|
|============================ | 32%
|
|============================================ | 51%
|
|============================================================= | 71%
|
|============================================================================= | 90%
|
|======================================================================================| 100%
|
| | 0%
|
|======================================================================================| 100%
plotC("GBM", h2o.gbm (1:2,3,spiral))
|
| | 0%
|
|===== | 6%
|
|======================================================================================| 100%
|
| | 0%
|
|======================================================================================| 100%
plotC("DRF", h2o.randomForest(1:2,3,spiral))
|
| | 0%
|
|====================== | 26%
|
|======================================================================================| 100%
|
| | 0%
|
|======================================================================================| 100%
plotC("GLM", h2o.glm (1:2,3,spiral,family="binomial"))
|
| | 0%
|
|======================================================================================| 100%
|
| | 0%
|
|======================================================================================| 100%

dev.new(noRStudioGD=FALSE) #direct plotting output to a new window
par(mfrow=c(2,2)) #set up the canvas for 2x2 plots
ep <- c(1,250,500,750)
plotC(paste0("DL ",ep[1]," epochs"),
h2o.deeplearning(1:2,3,spiral,epochs=ep[1],
model_id="dl_1"))
|
| | 0%
|
|======================================================================================| 100%
|
| | 0%
|
|======================================================================================| 100%
plotC(paste0("DL ",ep[2]," epochs"),
h2o.deeplearning(1:2,3,spiral,epochs=ep[2],
checkpoint="dl_1",model_id="dl_2"))
|
| | 0%
|
|======= | 8%
|
|==================================================== | 60%
|
|======================================================================================| 100%
|
| | 0%
|
|======================================================================================| 100%
plotC(paste0("DL ",ep[3]," epochs"),
h2o.deeplearning(1:2,3,spiral,epochs=ep[3],
checkpoint="dl_2",model_id="dl_3"))
|
| | 0%
|
|============ | 14%
|
|======================================================================================| 100%
|
| | 0%
|
|======================================================================================| 100%
plotC(paste0("DL ",ep[4]," epochs"),
h2o.deeplearning(1:2,3,spiral,epochs=ep[4],
checkpoint="dl_3",model_id="dl_4"))
|
| | 0%
|
|========================== | 31%
|
|======================================================================================| 100%
|
| | 0%
|
|======================================================================================| 100%

You can see how the network learns the structure of the spirals with enough training time. We explore different network architectures next:
dev.new(noRStudioGD=FALSE) #direct plotting output to a new window
par(mfrow=c(2,2)) #set up the canvas for 2x2 plots
for (hidden in list(c(11,13,17,19),c(42,42,42),c(200,200),c(1000))) {
plotC(paste0("DL hidden=",paste0(hidden, collapse="x")),
h2o.deeplearning(1:2,3,spiral,hidden=hidden,epochs=500))
}
|
| | 0%
|
|========================================================== | 68%
|
|======================================================================================| 100%
|
| | 0%
|
|======================================================================================| 100%
|
| | 0%
|
|===================== | 24%
|
|======================================================================================| 100%
|
| | 0%
|
|======================================================================================| 100%
|
| | 0%
|
|=== | 4%
|
|================= | 20%
|
|====================================== | 44%
|
|================================================================ | 74%
|
|======================================================================================| 100%
|
| | 0%
|
|======================================================================================| 100%
|
| | 0%
|
|=== | 4%
|
|====================================== | 44%
|
|=============================================================================== | 92%
|
|======================================================================================| 100%
|
| | 0%
|
|======================================================================================| 100%

It is clear that different configurations can achieve similar performance, and that tuning will be required for optimal performance. Next, we compare between different activation functions, including one with 50% dropout regularization in the hidden layers:
dev.new(noRStudioGD=FALSE) #direct plotting output to a new window
par(mfrow=c(2,2)) #set up the canvas for 2x2 plots
for (act in c("Tanh","Maxout","Rectifier","RectifierWithDropout")) {
plotC(paste0("DL ",act," activation"),
h2o.deeplearning(1:2,3,spiral,
activation=act,hidden=c(100,100),epochs=1000))
}
|
| | 0%
|
|== | 2%
|
|============= | 15%
|
|======================== | 28%
|
|=================================== | 41%
|
|============================================== | 54%
|
|========================================================== | 67%
|
|==================================================================== | 79%
|
|================================================================================ | 93%
|
|======================================================================================| 100%
|
| | 0%
|
|======================================================================================| 100%
|
| | 0%
|
|== | 2%
|
|============ | 14%
|
|====================== | 26%
|
|================================== | 39%
|
|=========================================== | 50%
|
|==================================================== | 61%
|
|============================================================= | 71%
|
|======================================================================= | 82%
|
|================================================================================= | 94%
|
|======================================================================================| 100%
|
| | 0%
|
|======================================================================================| 100%
|
| | 0%
|
|======= | 8%
|
|================================== | 40%
|
|===================================================================== | 80%
|
|======================================================================================| 100%
|
| | 0%
|
|======================================================================================| 100%
|
| | 0%
|
|============== | 16%
|
|======================================================================================| 100%
|
| | 0%
|
|======================================================================================| 100%

To predict the 80-th percentile of the petal length of the Iris dataset in R
dl1
Model Details:
==============
H2ORegressionModel: deeplearning
Model ID: DeepLearning_model_R_1507322206419_2
Status of Neuron Layers: predicting petal_len, regression, quantile distribution, Quantile loss, 41,001 weights/biases, 488.5 KB, 1,100 training samples, mini-batch size 1
layer units type dropout l1 l2 mean_rate rate_rms momentum mean_weight
1 1 2 Input 0.00 %
2 2 200 Rectifier 0.00 % 0.000000 0.000000 0.026735 0.013758 0.000000 0.007604
3 3 200 Rectifier 0.00 % 0.000000 0.000000 0.160165 0.230002 0.000000 -0.005706
4 4 1 Linear 0.000000 0.000000 0.008027 0.061922 0.000000 0.000605
weight_rms mean_bias bias_rms
1
2 0.098877 0.470853 0.018668
3 0.069888 0.988443 0.007587
4 0.063543 0.000329 0.000000
H2ORegressionMetrics: deeplearning
** Reported on training data. **
** Metrics reported on full training frame **
MSE: 1.156228
RMSE: 1.075281
MAE: 0.9202413
RMSLE: 0.2506345
Mean Residual Deviance : 0.2099247
handwriting example
summary(train)
Approximated quantiles computed! If you are interested in exact quantiles, please pass the `exact_quantiles=TRUE` parameter.
C1 C2 C3 C4 C5 C6
Min. : 0 Min. : 0 Min. : 0 Min. : 0 Min. : 0 Min. : 0
C7 C8 C9 C10 C11 C12
Min. : 0 Min. : 0 Min. : 0 Min. : 0 Min. : 0 Min. : 0
C13 C14 C15 C16 C17
Min. : 0.0000 Min. :0.000e+00 Min. : 0.0000 Min. :0.00000 Min. : 0
C18 C19 C20 C21 C22 C23
Min. : 0 Min. : 0 Min. : 0 Min. : 0 Min. : 0 Min. : 0
C24 C25 C26 C27 C28 C29
Min. : 0 Min. : 0 Min. : 0 Min. : 0 Min. : 0 Min. : 0
C30 C31 C32 C33 C34
Min. : 0 Min. : 0 Min. : 0 Min. :0.000e+00 Min. :0.000e+00
C35 C36 C37 C38
Min. :0.000e+00 Min. : 0.00000 Min. : 0.00000 Min. : 0.0000
C39 C40 C41 C42
Min. : 0.0000 Min. : 0.0000 Min. : 0.0000 Min. : 0.0000
C43 C44 C45 C46
Min. : 0.0000 Min. : 0.0000 Min. : 0.0000 Min. : 0.0000
C47 C48 C49 C50
Min. : 0.0000 Min. : 0.00000 Min. : 0.00000 Min. : 0.00000
C51 C52 C53 C54 C55
Min. : 0.00000 Min. :0.000e+00 Min. : 0 Min. : 0 Min. : 0
C56 C57 C58 C59 C60
Min. : 0 Min. : 0 Min. : 0 Min. : 0.000000 Min. : 0.0000
C61 C62 C63 C64
Min. :0.00e+00 Min. : 0.000000 Min. : 0.0000 Min. : 0.0000
C65 C66 C67 C68 C69
Min. : 0.0000 Min. : 0.0000 Min. : 0.0000 Min. : 0.00 Min. : 0.00
C70 C71 C72 C73 C74
Min. : 0.00 Min. : 0.000 Min. : 0.000 Min. : 0.000 Min. : 0.000
C75 C76 C77 C78 C79
Min. : 0.000 Min. : 0.000 Min. : 0.000 Min. : 0.0000 Min. : 0.0000
C80 C81 C82 C83 C84
Min. : 0.00000 Min. : 0.00000 Min. :0.000e+00 Min. : 0 Min. : 0
C85 C86 C87 C88 C89
Min. : 0 Min. : 0 Min. :0.000e+00 Min. :0.00e+00 Min. : 0.00000
C90 C91 C92 C93 C94
Min. : 0.00000 Min. : 0.0000 Min. : 0.0000 Min. : 0.000 Min. : 0.000
C95 C96 C97 C98 C99
Min. : 0.00 Min. : 0.000 Min. : 0.000 Min. : 0.000 Min. : 0.00
C100 C101 C102 C103 C104
Min. : 0.00 Min. : 0.00 Min. : 0.00 Min. : 0.000 Min. : 0.000
C105 C106 C107 C108 C109
Min. : 0.000 Min. : 0.000 Min. : 0.000 Min. : 0.0000 Min. : 0.0000
C110 C111 C112 C113 C114
Min. : 0.00000 Min. : 0.0000 Min. : 0 Min. : 0 Min. :0.000e+00
C115 C116 C117 C118
Min. :0.000e+00 Min. : 0.00000 Min. : 0.00000 Min. : 0.0000
C119 C120 C121 C122 C123
Min. : 0.000 Min. : 0.000 Min. : 0.000 Min. : 0.000 Min. : 0.00
C124 C125 C126 C127 C128
Min. : 0.00 Min. : 0.00 Min. : 0.00 Min. : 0.00 Min. : 0.00
C129 C130 C131 C132 C133
Min. : 0.00 Min. : 0.00 Min. : 0.00 Min. : 0.00 Min. : 0.00
C134 C135 C136 C137 C138
Min. : 0.000 Min. : 0.000 Min. : 0.000 Min. : 0.0000 Min. : 0.0000
C139 C140 C141 C142 C143
Min. : 0.00000 Min. : 0.000000 Min. : 0 Min. : 0 Min. : 0.00000
C144 C145 C146 C147 C148
Min. : 0.00000 Min. : 0.0000 Min. : 0.000 Min. : 0.000 Min. : 0.000
C149 C150 C151 C152 C153
Min. : 0.00 Min. : 0.00 Min. : 0.0 Min. : 0.00 Min. : 0.0
C154 C155 C156 C157 C158
Min. : 0.0 Min. : 0.0 Min. : 0.0 Min. : 0.00 Min. : 0.00
C159 C160 C161 C162 C163
Min. : 0.00 Min. : 0.00 Min. : 0.00 Min. : 0.00 Min. : 0.00
C164 C165 C166 C167 C168
Min. : 0.000 Min. : 0.000 Min. : 0.0000 Min. : 0.0000 Min. : 0.0000
C169 C170 C171 C172 C173
Min. : 0 Min. :0.0000000 Min. : 0.0000 Min. : 0.0000 Min. : 0.00
C174 C175 C176 C177 C178
Min. : 0.00 Min. : 0.000 Min. : 0.00 Min. : 0.00 Min. : 0.00
C179 C180 C181 C182 C183
Min. : 0.00 Min. : 0.00 Min. : 0.00 Min. : 0.0 Min. : 0.0
C184 C185 C186 C187 C188
Min. : 0.0 Min. : 0.0 Min. : 0 Min. : 0.00 Min. : 0.00
C189 C190 C191 C192 C193
Min. : 0.00 Min. : 0.00 Min. : 0.00 Min. : 0.00 Min. : 0.000
C194 C195 C196 C197
Min. : 0.000 Min. : 0.0000 Min. : 0.0000 Min. :0.000e+00
C198 C199 C200 C201 C202
Min. : 0.00000 Min. : 0.0000 Min. : 0.0000 Min. : 0.000 Min. : 0.000
C203 C204 C205 C206 C207
Min. : 0.00 Min. : 0.00 Min. : 0.00 Min. : 0.00 Min. : 0.00
C208 C209 C210 C211 C212 C213
Min. : 0.0 Min. : 0.0 Min. : 0 Min. : 0.0 Min. : 0.0 Min. : 0.0
C214 C215 C216 C217 C218
Min. : 0.0 Min. : 0.0 Min. : 0.00 Min. : 0.00 Min. : 0.00
C219 C220 C221 C222 C223
Min. : 0.00 Min. : 0.0 Min. : 0.000 Min. : 0.000 Min. : 0.0000
C224 C225 C226 C227
Min. : 0.00000 Min. :0.000e+00 Min. : 0.00000 Min. : 0.0000
C228 C229 C230 C231 C232
Min. : 0.000 Min. : 0.000 Min. : 0.000 Min. : 0.00 Min. : 0.0
C233 C234 C235 C236 C237
Min. : 0.00 Min. : 0.00 Min. : 0.00 Min. : 0.0 Min. : 0.0
C238 C239 C240 C241 C242 C243
Min. : 0.0 Min. : 0 Min. : 0.0 Min. : 0.0 Min. : 0.0 Min. : 0.0
C244 C245 C246 C247 C248
Min. : 0.0 Min. : 0.00 Min. : 0.0 Min. : 0.00 Min. : 0.00
C249 C250 C251 C252
Min. : 0.000 Min. : 0.000 Min. : 0.0000 Min. : 0.00000
C253 C254 C255 C256 C257
Min. :0.000e+00 Min. : 0.00000 Min. : 0.000 Min. : 0.000 Min. : 0.000
C258 C259 C260 C261 C262
Min. : 0.000 Min. : 0.00 Min. : 0.0 Min. : 0.0 Min. : 0.00
C263 C264 C265 C266 C267
Min. : 0.00 Min. : 0.0 Min. : 0.0 Min. : 0.0 Min. : 0.00
C268 C269 C270 C271 C272
Min. : 0.00 Min. : 0.0 Min. : 0.0 Min. : 0.0 Min. : 0.00
C273 C274 C275 C276 C277
Min. : 0.00 Min. : 0.00 Min. : 0.00 Min. : 0.00 Min. : 0.000
C278 C279 C280 C281
Min. : 0.000 Min. : 0.0000 Min. : 0.00000 Min. :0.000e+00
C282 C283 C284 C285 C286
Min. : 0.00000 Min. : 0.000 Min. : 0.000 Min. : 0.000 Min. : 0.000
C287 C288 C289 C290 C291
Min. : 0.00 Min. : 0.00 Min. : 0.00 Min. : 0.0 Min. : 0.0
C292 C293 C294 C295 C296
Min. : 0.0 Min. : 0.00 Min. : 0.00 Min. : 0.00 Min. : 0.00
C297 C298 C299 C300 C301
Min. : 0.00 Min. : 0.0 Min. : 0.0 Min. : 0.00 Min. : 0.00
C302 C303 C304 C305 C306
Min. : 0.00 Min. : 0.00 Min. : 0.0 Min. : 0.000 Min. : 0.00
C307 C308 C309 C310
Min. : 0.0000 Min. : 0.00000 Min. :0.000e+00 Min. : 0.0000
C311 C312 C313 C314 C315
Min. : 0.0000 Min. : 0.00 Min. : 0.000 Min. : 0.000 Min. : 0.00
C316 C317 C318 C319 C320
Min. : 0.00 Min. : 0.00 Min. : 0.00 Min. : 0.00 Min. : 0.00
C321 C322 C323 C324 C325
Min. : 0.00 Min. : 0.00 Min. : 0.00 Min. : 0.00 Min. : 0.00
C326 C327 C328 C329 C330
Min. : 0.0 Min. : 0.0 Min. : 0.00 Min. : 0.00 Min. : 0.0
C331 C332 C333 C334 C335
Min. : 0.00 Min. : 0.00 Min. : 0.00 Min. : 0.0000 Min. : 0.0000
C336 C337 C338 C339
Min. :0.000e+00 Min. : 0.0000 Min. : 0.00000 Min. : 0.000
C340 C341 C342 C343 C344
Min. : 0.0000 Min. : 0.000 Min. : 0.000 Min. : 0.00 Min. : 0.00
C345 C346 C347 C348 C349
Min. : 0.00 Min. : 0.00 Min. : 0.00 Min. : 0.00 Min. : 0.00
C350 C351 C352 C353 C354
Min. : 0.00 Min. : 0.00 Min. : 0.0 Min. : 0 Min. : 0.0
C355 C356 C357 C358 C359
Min. : 0.0 Min. : 0.00 Min. : 0.00 Min. : 0.00 Min. : 0.00
C360 C361 C362 C363 C364
Min. : 0.00 Min. : 0.000 Min. : 0.0000 Min. : 0.00000 Min. : 0.00000
C365 C366 C367 C368
Min. :0.000e+00 Min. : 0.00000 Min. : 0.0000 Min. : 0.0000
C369 C370 C371 C372 C373
Min. : 0.000 Min. : 0.00 Min. : 0.00 Min. : 0.00 Min. : 0.00
C374 C375 C376 C377 C378
Min. : 0.00 Min. : 0.00 Min. : 0.0 Min. : 0.00 Min. : 0.00
C379 C380 C381 C382 C383
Min. : 0.0 Min. : 0 Min. : 0.0 Min. : 0.0 Min. : 0.0
C384 C385 C386 C387 C388
Min. : 0.00 Min. : 0.00 Min. : 0.0 Min. : 0.00 Min. : 0.00
C389 C390 C391 C392
Min. : 0.000 Min. : 0.0000 Min. : 0.00000 Min. : 0.00000
C393 C394 C395 C396
Min. :0.000e+00 Min. :0.00e+00 Min. : 0.00000 Min. : 0.0000
C397 C398 C399 C400 C401
Min. : 0.000 Min. : 0.00 Min. : 0.00 Min. : 0.00 Min. : 0.00
C402 C403 C404 C405 C406
Min. : 0.00 Min. : 0.00 Min. : 0.00 Min. : 0.00 Min. : 0.0
C407 C408 C409 C410 C411
Min. : 0.0 Min. : 0.0 Min. : 0.0 Min. : 0.0 Min. : 0
C412 C413 C414 C415 C416
Min. : 0.00 Min. : 0.0 Min. : 0.00 Min. : 0.00 Min. : 0.00
C417 C418 C419 C420
Min. : 0.000 Min. : 0.0000 Min. : 0.00000 Min. : 0.000000
C421 C422 C423 C424
Min. :0.000e+00 Min. :0.000e+00 Min. : 0.0000 Min. : 0.0000
C425 C426 C427 C428 C429
Min. : 0.000 Min. : 0.00 Min. : 0.00 Min. : 0.00 Min. : 0.00
C430 C431 C432 C433 C434
Min. : 0.00 Min. : 0.00 Min. : 0.00 Min. : 0 Min. : 0.0
C435 C436 C437 C438 C439
Min. : 0.0 Min. : 0.0 Min. : 0.0 Min. : 0.0 Min. : 0.0
C440 C441 C442 C443 C444
Min. : 0.00 Min. : 0.00 Min. : 0.00 Min. : 0.00 Min. : 0.00
C445 C446 C447 C448 C449
Min. : 0.000 Min. : 0.00 Min. : 0.0000 Min. : 0.00000 Min. :0.000e+00
C450 C451 C452 C453 C454
Min. :0.000e+00 Min. : 0.00000 Min. : 0.000 Min. : 0.00 Min. : 0.00
C455 C456 C457 C458 C459
Min. : 0.00 Min. : 0.00 Min. : 0.00 Min. : 0.00 Min. : 0.00
C460 C461 C462 C463 C464
Min. : 0.00 Min. : 0.0 Min. : 0.0 Min. : 0.0 Min. : 0.0
C465 C466 C467 C468 C469
Min. : 0.0 Min. : 0.0 Min. : 0.00 Min. : 0.00 Min. : 0.00
C470 C471 C472 C473 C474
Min. : 0.00 Min. : 0.00 Min. : 0.00 Min. : 0.000 Min. : 0.000
C475 C476 C477 C478 C479
Min. : 0.0000 Min. : 0.0000 Min. : 0 Min. :0.000e+00 Min. : 0.0000
C480 C481 C482 C483 C484
Min. : 0.0000 Min. : 0.000 Min. : 0.00 Min. : 0.00 Min. : 0.00
C485 C486 C487 C488 C489
Min. : 0.00 Min. : 0.00 Min. : 0.00 Min. : 0.00 Min. : 0.00
C490 C491 C492 C493 C494
Min. : 0 Min. : 0.0 Min. : 0.0 Min. : 0.0 Min. : 0.0
C495 C496 C497 C498 C499
Min. : 0.00 Min. : 0.00 Min. : 0.00 Min. : 0.00 Min. : 0.00
C500 C501 C502 C503 C504
Min. : 0.00 Min. : 0.000 Min. : 0.000 Min. : 0.00 Min. :0.000e+00
C505 C506 C507 C508 C509
Min. : 0.0000 Min. :0.000e+00 Min. : 0.0000 Min. : 0.00 Min. : 0.000
C510 C511 C512 C513 C514
Min. : 0.00 Min. : 0.00 Min. : 0.00 Min. : 0.00 Min. : 0.00
C515 C516 C517 C518 C519
Min. : 0.00 Min. : 0.00 Min. : 0.00 Min. : 0.00 Min. : 0.0
C520 C521 C522 C523 C524
Min. : 0.0 Min. : 0.0 Min. : 0 Min. : 0.00 Min. : 0.00
C525 C526 C527 C528 C529
Min. : 0.0 Min. : 0.00 Min. : 0.00 Min. : 0.00 Min. : 0.000
C530 C531 C532 C533
Min. : 0.000 Min. : 0.0000 Min. : 0.00000 Min. :0.0e+00
C534 C535 C536 C537 C538
Min. : 0.00000 Min. : 0.0000 Min. : 0.000 Min. : 0.000 Min. : 0.00
C539 C540 C541 C542 C543
Min. : 0.00 Min. : 0.00 Min. : 0.00 Min. : 0.00 Min. : 0.00
C544 C545 C546 C547 C548
Min. : 0.00 Min. : 0.00 Min. : 0.0 Min. : 0.0 Min. : 0.0
C549 C550 C551 C552 C553
Min. : 0.0 Min. : 0.0 Min. : 0.00 Min. : 0.00 Min. : 0.0
C554 C555 C556 C557 C558
Min. : 0.00 Min. : 0.00 Min. : 0.000 Min. : 0.00 Min. : 0.000
C559 C560 C561 C562 C563
Min. : 0.0000 Min. :0.000e+00 Min. : 0 Min. : 0.00000 Min. : 0.000
C564 C565 C566 C567 C568
Min. : 0.000 Min. : 0.000 Min. : 0.00 Min. : 0.00 Min. : 0.00
C569 C570 C571 C572 C573
Min. : 0.00 Min. : 0.00 Min. : 0.00 Min. : 0.00 Min. : 0.0
C574 C575 C576 C577 C578
Min. : 0.0 Min. : 0 Min. : 0.0 Min. : 0.0 Min. : 0.0
C579 C580 C581 C582 C583
Min. : 0.00 Min. : 0.00 Min. : 0.00 Min. : 0.00 Min. : 0.00
C584 C585 C586 C587 C588
Min. : 0.000 Min. : 0.000 Min. : 0.0000 Min. : 0.0000 Min. : 0.000000
C589 C590 C591 C592 C593
Min. :0.000e+00 Min. : 0.0000 Min. : 0.0000 Min. : 0.000 Min. : 0.000
C594 C595 C596 C597 C598
Min. : 0.0 Min. : 0.00 Min. : 0.00 Min. : 0.00 Min. : 0.00
C599 C600 C601 C602 C603 C604
Min. : 0 Min. : 0.0 Min. : 0.0 Min. : 0.0 Min. : 0.0 Min. : 0.0
C605 C606 C607 C608 C609
Min. : 0 Min. : 0.00 Min. : 0.00 Min. : 0.0 Min. : 0.00
C610 C611 C612 C613 C614
Min. : 0.0 Min. : 0.000 Min. : 0.000 Min. : 0.000 Min. : 0.0000
C615 C616 C617 C618
Min. : 0.00000 Min. :0.0e+00 Min. :0.000e+00 Min. :0.000e+00
C619 C620 C621 C622 C623
Min. : 0.0000 Min. : 0.000 Min. : 0.000 Min. : 0.00 Min. : 0.00
C624 C625 C626 C627 C628
Min. : 0.00 Min. : 0.00 Min. : 0.00 Min. : 0.00 Min. : 0.0
C629 C630 C631 C632 C633
Min. : 0.0 Min. : 0.0 Min. : 0.0 Min. : 0.0 Min. : 0.00
C634 C635 C636 C637 C638
Min. : 0.00 Min. : 0.00 Min. : 0.00 Min. : 0.0 Min. : 0.000
C639 C640 C641 C642 C643
Min. : 0.000 Min. : 0.000 Min. : 0.0000 Min. : 0.0000 Min. : 0.00000
C644 C645 C646 C647 C648
Min. : 0.0000 Min. : 0 Min. : 0 Min. : 0.00000 Min. : 0.0000
C649 C650 C651 C652 C653
Min. : 0.000 Min. : 0.000 Min. : 0.00 Min. : 0.00 Min. : 0.00
C654 C655 C656 C657 C658
Min. : 0.00 Min. : 0.00 Min. : 0.00 Min. : 0.0 Min. : 0.00
C659 C660 C661 C662 C663
Min. : 0.00 Min. : 0.00 Min. : 0.00 Min. : 0.00 Min. : 0.0
C664 C665 C666 C667 C668
Min. : 0.00 Min. : 0.000 Min. : 0.000 Min. : 0.000 Min. : 0.0000
C669 C670 C671 C672 C673
Min. : 0.0000 Min. : 0.00000 Min. :0.000e+00 Min. : 0 Min. : 0
C674 C675 C676 C677 C678
Min. : 0 Min. : 0.00000 Min. : 0.0000 Min. : 0.0000 Min. : 0.000
C679 C680 C681 C682 C683
Min. : 0.000 Min. : 0.000 Min. : 0.00 Min. : 0.00 Min. : 0.0
C684 C685 C686 C687 C688
Min. : 0.00 Min. : 0.00 Min. : 0.00 Min. : 0.00 Min. : 0.00
C689 C690 C691 C692 C693
Min. : 0.00 Min. : 0.00 Min. : 0.00 Min. : 0.000 Min. : 0.000
C694 C695 C696 C697
Min. : 0.000 Min. : 0.0000 Min. : 0.0000 Min. : 0.0000
C698 C699 C700 C701 C702
Min. : 0.00000 Min. : 0.000000 Min. : 0 Min. : 0 Min. : 0
C703 C704 C705 C706
Min. : 0.000000 Min. : 0.00000 Min. : 0.0000 Min. : 0.0000
C707 C708 C709 C710 C711
Min. : 0.000 Min. : 0.000 Min. : 0.000 Min. : 0.000 Min. : 0.00
C712 C713 C714 C715 C716
Min. : 0.00 Min. : 0.0 Min. : 0.00 Min. : 0.00 Min. : 0.00
C717 C718 C719 C720 C721
Min. : 0.0 Min. : 0.000 Min. : 0.000 Min. : 0.000 Min. : 0.000
C722 C723 C724 C725
Min. : 0.0000 Min. : 0.0000 Min. : 0.0000 Min. : 0.00000
C726 C727 C728 C729 C730
Min. :0.000e+00 Min. :0.000e+00 Min. : 0 Min. : 0 Min. : 0
C731 C732 C733 C734 C735
Min. : 0 Min. :0.000e+00 Min. : 0.00000 Min. : 0.0000 Min. : 0.0000
C736 C737 C738 C739 C740
Min. : 0.000 Min. : 0.000 Min. : 0.000 Min. : 0.00 Min. : 0.00
C741 C742 C743 C744 C745
Min. : 0.000 Min. : 0.000 Min. : 0.000 Min. : 0.000 Min. : 0.000
C746 C747 C748 C749 C750
Min. : 0.000 Min. : 0.000 Min. : 0.000 Min. : 0.0000 Min. : 0.0000
C751 C752 C753 C754 C755
Min. : 0.00000 Min. : 0.00000 Min. :0.000e+00 Min. :0.000e+00 Min. : 0
C756 C757 C758 C759 C760 C761
Min. : 0 Min. : 0 Min. : 0 Min. : 0 Min. : 0 Min. : 0.000000
C762 C763 C764 C765
Min. : 0.00000 Min. : 0.00000 Min. : 0.0000 Min. : 0.0000
C766 C767 C768 C769 C770
Min. : 0.0000 Min. : 0.000 Min. : 0.00 Min. : 0.000 Min. : 0.0000
C771 C772 C773 C774 C775
Min. : 0.000 Min. : 0.0000 Min. : 0.0000 Min. : 0.0000 Min. : 0.0000
C776 C777 C778 C779
Min. : 0.00000 Min. : 0.00000 Min. : 0.00000 Min. : 0.00000
C780 C781 C782 C783 C784 C785
Min. : 0.000 Min. : 0 Min. : 0 Min. : 0 Min. : 0 Min. :0.000
[ reached getOption("max.print") -- omitted 5 rows ]
summary(train)
Approximated quantiles computed! If you are interested in exact quantiles, please pass the `exact_quantiles=TRUE` parameter.
C1 C2 C3 C4 C5 C6
Min. : 0 Min. : 0 Min. : 0 Min. : 0 Min. : 0 Min. : 0
C7 C8 C9 C10 C11 C12
Min. : 0 Min. : 0 Min. : 0 Min. : 0 Min. : 0 Min. : 0
C13 C14 C15 C16 C17
Min. : 0.0000 Min. :0.000e+00 Min. : 0.0000 Min. :0.00000 Min. : 0
C18 C19 C20 C21 C22 C23
Min. : 0 Min. : 0 Min. : 0 Min. : 0 Min. : 0 Min. : 0
C24 C25 C26 C27 C28 C29
Min. : 0 Min. : 0 Min. : 0 Min. : 0 Min. : 0 Min. : 0
C30 C31 C32 C33 C34
Min. : 0 Min. : 0 Min. : 0 Min. :0.000e+00 Min. :0.000e+00
C35 C36 C37 C38
Min. :0.000e+00 Min. : 0.00000 Min. : 0.00000 Min. : 0.0000
C39 C40 C41 C42
Min. : 0.0000 Min. : 0.0000 Min. : 0.0000 Min. : 0.0000
C43 C44 C45 C46
Min. : 0.0000 Min. : 0.0000 Min. : 0.0000 Min. : 0.0000
C47 C48 C49 C50
Min. : 0.0000 Min. : 0.00000 Min. : 0.00000 Min. : 0.00000
C51 C52 C53 C54 C55
Min. : 0.00000 Min. :0.000e+00 Min. : 0 Min. : 0 Min. : 0
C56 C57 C58 C59 C60
Min. : 0 Min. : 0 Min. : 0 Min. : 0.000000 Min. : 0.0000
C61 C62 C63 C64
Min. :0.00e+00 Min. : 0.000000 Min. : 0.0000 Min. : 0.0000
C65 C66 C67 C68 C69
Min. : 0.0000 Min. : 0.0000 Min. : 0.0000 Min. : 0.00 Min. : 0.00
C70 C71 C72 C73 C74
Min. : 0.00 Min. : 0.000 Min. : 0.000 Min. : 0.000 Min. : 0.000
C75 C76 C77 C78 C79
Min. : 0.000 Min. : 0.000 Min. : 0.000 Min. : 0.0000 Min. : 0.0000
C80 C81 C82 C83 C84
Min. : 0.00000 Min. : 0.00000 Min. :0.000e+00 Min. : 0 Min. : 0
C85 C86 C87 C88 C89
Min. : 0 Min. : 0 Min. :0.000e+00 Min. :0.00e+00 Min. : 0.00000
C90 C91 C92 C93 C94
Min. : 0.00000 Min. : 0.0000 Min. : 0.0000 Min. : 0.000 Min. : 0.000
C95 C96 C97 C98 C99
Min. : 0.00 Min. : 0.000 Min. : 0.000 Min. : 0.000 Min. : 0.00
C100 C101 C102 C103 C104
Min. : 0.00 Min. : 0.00 Min. : 0.00 Min. : 0.000 Min. : 0.000
C105 C106 C107 C108 C109
Min. : 0.000 Min. : 0.000 Min. : 0.000 Min. : 0.0000 Min. : 0.0000
C110 C111 C112 C113 C114
Min. : 0.00000 Min. : 0.0000 Min. : 0 Min. : 0 Min. :0.000e+00
C115 C116 C117 C118
Min. :0.000e+00 Min. : 0.00000 Min. : 0.00000 Min. : 0.0000
C119 C120 C121 C122 C123
Min. : 0.000 Min. : 0.000 Min. : 0.000 Min. : 0.000 Min. : 0.00
C124 C125 C126 C127 C128
Min. : 0.00 Min. : 0.00 Min. : 0.00 Min. : 0.00 Min. : 0.00
C129 C130 C131 C132 C133
Min. : 0.00 Min. : 0.00 Min. : 0.00 Min. : 0.00 Min. : 0.00
C134 C135 C136 C137 C138
Min. : 0.000 Min. : 0.000 Min. : 0.000 Min. : 0.0000 Min. : 0.0000
C139 C140 C141 C142 C143
Min. : 0.00000 Min. : 0.000000 Min. : 0 Min. : 0 Min. : 0.00000
C144 C145 C146 C147 C148
Min. : 0.00000 Min. : 0.0000 Min. : 0.000 Min. : 0.000 Min. : 0.000
C149 C150 C151 C152 C153
Min. : 0.00 Min. : 0.00 Min. : 0.0 Min. : 0.00 Min. : 0.0
C154 C155 C156 C157 C158
Min. : 0.0 Min. : 0.0 Min. : 0.0 Min. : 0.00 Min. : 0.00
C159 C160 C161 C162 C163
Min. : 0.00 Min. : 0.00 Min. : 0.00 Min. : 0.00 Min. : 0.00
C164 C165 C166 C167 C168
Min. : 0.000 Min. : 0.000 Min. : 0.0000 Min. : 0.0000 Min. : 0.0000
C169 C170 C171 C172 C173
Min. : 0 Min. :0.0000000 Min. : 0.0000 Min. : 0.0000 Min. : 0.00
C174 C175 C176 C177 C178
Min. : 0.00 Min. : 0.000 Min. : 0.00 Min. : 0.00 Min. : 0.00
C179 C180 C181 C182 C183
Min. : 0.00 Min. : 0.00 Min. : 0.00 Min. : 0.0 Min. : 0.0
C184 C185 C186 C187 C188
Min. : 0.0 Min. : 0.0 Min. : 0 Min. : 0.00 Min. : 0.00
C189 C190 C191 C192 C193
Min. : 0.00 Min. : 0.00 Min. : 0.00 Min. : 0.00 Min. : 0.000
C194 C195 C196 C197
Min. : 0.000 Min. : 0.0000 Min. : 0.0000 Min. :0.000e+00
C198 C199 C200 C201 C202
Min. : 0.00000 Min. : 0.0000 Min. : 0.0000 Min. : 0.000 Min. : 0.000
C203 C204 C205 C206 C207
Min. : 0.00 Min. : 0.00 Min. : 0.00 Min. : 0.00 Min. : 0.00
C208 C209 C210 C211 C212 C213
Min. : 0.0 Min. : 0.0 Min. : 0 Min. : 0.0 Min. : 0.0 Min. : 0.0
C214 C215 C216 C217 C218
Min. : 0.0 Min. : 0.0 Min. : 0.00 Min. : 0.00 Min. : 0.00
C219 C220 C221 C222 C223
Min. : 0.00 Min. : 0.0 Min. : 0.000 Min. : 0.000 Min. : 0.0000
C224 C225 C226 C227
Min. : 0.00000 Min. :0.000e+00 Min. : 0.00000 Min. : 0.0000
C228 C229 C230 C231 C232
Min. : 0.000 Min. : 0.000 Min. : 0.000 Min. : 0.00 Min. : 0.0
C233 C234 C235 C236 C237
Min. : 0.00 Min. : 0.00 Min. : 0.00 Min. : 0.0 Min. : 0.0
C238 C239 C240 C241 C242 C243
Min. : 0.0 Min. : 0 Min. : 0.0 Min. : 0.0 Min. : 0.0 Min. : 0.0
C244 C245 C246 C247 C248
Min. : 0.0 Min. : 0.00 Min. : 0.0 Min. : 0.00 Min. : 0.00
C249 C250 C251 C252
Min. : 0.000 Min. : 0.000 Min. : 0.0000 Min. : 0.00000
C253 C254 C255 C256 C257
Min. :0.000e+00 Min. : 0.00000 Min. : 0.000 Min. : 0.000 Min. : 0.000
C258 C259 C260 C261 C262
Min. : 0.000 Min. : 0.00 Min. : 0.0 Min. : 0.0 Min. : 0.00
C263 C264 C265 C266 C267
Min. : 0.00 Min. : 0.0 Min. : 0.0 Min. : 0.0 Min. : 0.00
C268 C269 C270 C271 C272
Min. : 0.00 Min. : 0.0 Min. : 0.0 Min. : 0.0 Min. : 0.00
C273 C274 C275 C276 C277
Min. : 0.00 Min. : 0.00 Min. : 0.00 Min. : 0.00 Min. : 0.000
C278 C279 C280 C281
Min. : 0.000 Min. : 0.0000 Min. : 0.00000 Min. :0.000e+00
C282 C283 C284 C285 C286
Min. : 0.00000 Min. : 0.000 Min. : 0.000 Min. : 0.000 Min. : 0.000
C287 C288 C289 C290 C291
Min. : 0.00 Min. : 0.00 Min. : 0.00 Min. : 0.0 Min. : 0.0
C292 C293 C294 C295 C296
Min. : 0.0 Min. : 0.00 Min. : 0.00 Min. : 0.00 Min. : 0.00
C297 C298 C299 C300 C301
Min. : 0.00 Min. : 0.0 Min. : 0.0 Min. : 0.00 Min. : 0.00
C302 C303 C304 C305 C306
Min. : 0.00 Min. : 0.00 Min. : 0.0 Min. : 0.000 Min. : 0.00
C307 C308 C309 C310
Min. : 0.0000 Min. : 0.00000 Min. :0.000e+00 Min. : 0.0000
C311 C312 C313 C314 C315
Min. : 0.0000 Min. : 0.00 Min. : 0.000 Min. : 0.000 Min. : 0.00
C316 C317 C318 C319 C320
Min. : 0.00 Min. : 0.00 Min. : 0.00 Min. : 0.00 Min. : 0.00
C321 C322 C323 C324 C325
Min. : 0.00 Min. : 0.00 Min. : 0.00 Min. : 0.00 Min. : 0.00
C326 C327 C328 C329 C330
Min. : 0.0 Min. : 0.0 Min. : 0.00 Min. : 0.00 Min. : 0.0
C331 C332 C333 C334 C335
Min. : 0.00 Min. : 0.00 Min. : 0.00 Min. : 0.0000 Min. : 0.0000
C336 C337 C338 C339
Min. :0.000e+00 Min. : 0.0000 Min. : 0.00000 Min. : 0.000
C340 C341 C342 C343 C344
Min. : 0.0000 Min. : 0.000 Min. : 0.000 Min. : 0.00 Min. : 0.00
C345 C346 C347 C348 C349
Min. : 0.00 Min. : 0.00 Min. : 0.00 Min. : 0.00 Min. : 0.00
C350 C351 C352 C353 C354
Min. : 0.00 Min. : 0.00 Min. : 0.0 Min. : 0 Min. : 0.0
C355 C356 C357 C358 C359
Min. : 0.0 Min. : 0.00 Min. : 0.00 Min. : 0.00 Min. : 0.00
C360 C361 C362 C363 C364
Min. : 0.00 Min. : 0.000 Min. : 0.0000 Min. : 0.00000 Min. : 0.00000
C365 C366 C367 C368
Min. :0.000e+00 Min. : 0.00000 Min. : 0.0000 Min. : 0.0000
C369 C370 C371 C372 C373
Min. : 0.000 Min. : 0.00 Min. : 0.00 Min. : 0.00 Min. : 0.00
C374 C375 C376 C377 C378
Min. : 0.00 Min. : 0.00 Min. : 0.0 Min. : 0.00 Min. : 0.00
C379 C380 C381 C382 C383
Min. : 0.0 Min. : 0 Min. : 0.0 Min. : 0.0 Min. : 0.0
C384 C385 C386 C387 C388
Min. : 0.00 Min. : 0.00 Min. : 0.0 Min. : 0.00 Min. : 0.00
C389 C390 C391 C392
Min. : 0.000 Min. : 0.0000 Min. : 0.00000 Min. : 0.00000
C393 C394 C395 C396
Min. :0.000e+00 Min. :0.00e+00 Min. : 0.00000 Min. : 0.0000
C397 C398 C399 C400 C401
Min. : 0.000 Min. : 0.00 Min. : 0.00 Min. : 0.00 Min. : 0.00
C402 C403 C404 C405 C406
Min. : 0.00 Min. : 0.00 Min. : 0.00 Min. : 0.00 Min. : 0.0
C407 C408 C409 C410 C411
Min. : 0.0 Min. : 0.0 Min. : 0.0 Min. : 0.0 Min. : 0
C412 C413 C414 C415 C416
Min. : 0.00 Min. : 0.0 Min. : 0.00 Min. : 0.00 Min. : 0.00
C417 C418 C419 C420
Min. : 0.000 Min. : 0.0000 Min. : 0.00000 Min. : 0.000000
C421 C422 C423 C424
Min. :0.000e+00 Min. :0.000e+00 Min. : 0.0000 Min. : 0.0000
C425 C426 C427 C428 C429
Min. : 0.000 Min. : 0.00 Min. : 0.00 Min. : 0.00 Min. : 0.00
C430 C431 C432 C433 C434
Min. : 0.00 Min. : 0.00 Min. : 0.00 Min. : 0 Min. : 0.0
C435 C436 C437 C438 C439
Min. : 0.0 Min. : 0.0 Min. : 0.0 Min. : 0.0 Min. : 0.0
C440 C441 C442 C443 C444
Min. : 0.00 Min. : 0.00 Min. : 0.00 Min. : 0.00 Min. : 0.00
C445 C446 C447 C448 C449
Min. : 0.000 Min. : 0.00 Min. : 0.0000 Min. : 0.00000 Min. :0.000e+00
C450 C451 C452 C453 C454
Min. :0.000e+00 Min. : 0.00000 Min. : 0.000 Min. : 0.00 Min. : 0.00
C455 C456 C457 C458 C459
Min. : 0.00 Min. : 0.00 Min. : 0.00 Min. : 0.00 Min. : 0.00
C460 C461 C462 C463 C464
Min. : 0.00 Min. : 0.0 Min. : 0.0 Min. : 0.0 Min. : 0.0
C465 C466 C467 C468 C469
Min. : 0.0 Min. : 0.0 Min. : 0.00 Min. : 0.00 Min. : 0.00
C470 C471 C472 C473 C474
Min. : 0.00 Min. : 0.00 Min. : 0.00 Min. : 0.000 Min. : 0.000
C475 C476 C477 C478 C479
Min. : 0.0000 Min. : 0.0000 Min. : 0 Min. :0.000e+00 Min. : 0.0000
C480 C481 C482 C483 C484
Min. : 0.0000 Min. : 0.000 Min. : 0.00 Min. : 0.00 Min. : 0.00
C485 C486 C487 C488 C489
Min. : 0.00 Min. : 0.00 Min. : 0.00 Min. : 0.00 Min. : 0.00
C490 C491 C492 C493 C494
Min. : 0 Min. : 0.0 Min. : 0.0 Min. : 0.0 Min. : 0.0
C495 C496 C497 C498 C499
Min. : 0.00 Min. : 0.00 Min. : 0.00 Min. : 0.00 Min. : 0.00
C500 C501 C502 C503 C504
Min. : 0.00 Min. : 0.000 Min. : 0.000 Min. : 0.00 Min. :0.000e+00
C505 C506 C507 C508 C509
Min. : 0.0000 Min. :0.000e+00 Min. : 0.0000 Min. : 0.00 Min. : 0.000
C510 C511 C512 C513 C514
Min. : 0.00 Min. : 0.00 Min. : 0.00 Min. : 0.00 Min. : 0.00
C515 C516 C517 C518 C519
Min. : 0.00 Min. : 0.00 Min. : 0.00 Min. : 0.00 Min. : 0.0
C520 C521 C522 C523 C524
Min. : 0.0 Min. : 0.0 Min. : 0 Min. : 0.00 Min. : 0.00
C525 C526 C527 C528 C529
Min. : 0.0 Min. : 0.00 Min. : 0.00 Min. : 0.00 Min. : 0.000
C530 C531 C532 C533
Min. : 0.000 Min. : 0.0000 Min. : 0.00000 Min. :0.0e+00
C534 C535 C536 C537 C538
Min. : 0.00000 Min. : 0.0000 Min. : 0.000 Min. : 0.000 Min. : 0.00
C539 C540 C541 C542 C543
Min. : 0.00 Min. : 0.00 Min. : 0.00 Min. : 0.00 Min. : 0.00
C544 C545 C546 C547 C548
Min. : 0.00 Min. : 0.00 Min. : 0.0 Min. : 0.0 Min. : 0.0
C549 C550 C551 C552 C553
Min. : 0.0 Min. : 0.0 Min. : 0.00 Min. : 0.00 Min. : 0.0
C554 C555 C556 C557 C558
Min. : 0.00 Min. : 0.00 Min. : 0.000 Min. : 0.00 Min. : 0.000
C559 C560 C561 C562 C563
Min. : 0.0000 Min. :0.000e+00 Min. : 0 Min. : 0.00000 Min. : 0.000
C564 C565 C566 C567 C568
Min. : 0.000 Min. : 0.000 Min. : 0.00 Min. : 0.00 Min. : 0.00
C569 C570 C571 C572 C573
Min. : 0.00 Min. : 0.00 Min. : 0.00 Min. : 0.00 Min. : 0.0
C574 C575 C576 C577 C578
Min. : 0.0 Min. : 0 Min. : 0.0 Min. : 0.0 Min. : 0.0
C579 C580 C581 C582 C583
Min. : 0.00 Min. : 0.00 Min. : 0.00 Min. : 0.00 Min. : 0.00
C584 C585 C586 C587 C588
Min. : 0.000 Min. : 0.000 Min. : 0.0000 Min. : 0.0000 Min. : 0.000000
C589 C590 C591 C592 C593
Min. :0.000e+00 Min. : 0.0000 Min. : 0.0000 Min. : 0.000 Min. : 0.000
C594 C595 C596 C597 C598
Min. : 0.0 Min. : 0.00 Min. : 0.00 Min. : 0.00 Min. : 0.00
C599 C600 C601 C602 C603 C604
Min. : 0 Min. : 0.0 Min. : 0.0 Min. : 0.0 Min. : 0.0 Min. : 0.0
C605 C606 C607 C608 C609
Min. : 0 Min. : 0.00 Min. : 0.00 Min. : 0.0 Min. : 0.00
C610 C611 C612 C613 C614
Min. : 0.0 Min. : 0.000 Min. : 0.000 Min. : 0.000 Min. : 0.0000
C615 C616 C617 C618
Min. : 0.00000 Min. :0.0e+00 Min. :0.000e+00 Min. :0.000e+00
C619 C620 C621 C622 C623
Min. : 0.0000 Min. : 0.000 Min. : 0.000 Min. : 0.00 Min. : 0.00
C624 C625 C626 C627 C628
Min. : 0.00 Min. : 0.00 Min. : 0.00 Min. : 0.00 Min. : 0.0
C629 C630 C631 C632 C633
Min. : 0.0 Min. : 0.0 Min. : 0.0 Min. : 0.0 Min. : 0.00
C634 C635 C636 C637 C638
Min. : 0.00 Min. : 0.00 Min. : 0.00 Min. : 0.0 Min. : 0.000
C639 C640 C641 C642 C643
Min. : 0.000 Min. : 0.000 Min. : 0.0000 Min. : 0.0000 Min. : 0.00000
C644 C645 C646 C647 C648
Min. : 0.0000 Min. : 0 Min. : 0 Min. : 0.00000 Min. : 0.0000
C649 C650 C651 C652 C653
Min. : 0.000 Min. : 0.000 Min. : 0.00 Min. : 0.00 Min. : 0.00
C654 C655 C656 C657 C658
Min. : 0.00 Min. : 0.00 Min. : 0.00 Min. : 0.0 Min. : 0.00
C659 C660 C661 C662 C663
Min. : 0.00 Min. : 0.00 Min. : 0.00 Min. : 0.00 Min. : 0.0
C664 C665 C666 C667 C668
Min. : 0.00 Min. : 0.000 Min. : 0.000 Min. : 0.000 Min. : 0.0000
C669 C670 C671 C672 C673
Min. : 0.0000 Min. : 0.00000 Min. :0.000e+00 Min. : 0 Min. : 0
C674 C675 C676 C677 C678
Min. : 0 Min. : 0.00000 Min. : 0.0000 Min. : 0.0000 Min. : 0.000
C679 C680 C681 C682 C683
Min. : 0.000 Min. : 0.000 Min. : 0.00 Min. : 0.00 Min. : 0.0
C684 C685 C686 C687 C688
Min. : 0.00 Min. : 0.00 Min. : 0.00 Min. : 0.00 Min. : 0.00
C689 C690 C691 C692 C693
Min. : 0.00 Min. : 0.00 Min. : 0.00 Min. : 0.000 Min. : 0.000
C694 C695 C696 C697
Min. : 0.000 Min. : 0.0000 Min. : 0.0000 Min. : 0.0000
C698 C699 C700 C701 C702
Min. : 0.00000 Min. : 0.000000 Min. : 0 Min. : 0 Min. : 0
C703 C704 C705 C706
Min. : 0.000000 Min. : 0.00000 Min. : 0.0000 Min. : 0.0000
C707 C708 C709 C710 C711
Min. : 0.000 Min. : 0.000 Min. : 0.000 Min. : 0.000 Min. : 0.00
C712 C713 C714 C715 C716
Min. : 0.00 Min. : 0.0 Min. : 0.00 Min. : 0.00 Min. : 0.00
C717 C718 C719 C720 C721
Min. : 0.0 Min. : 0.000 Min. : 0.000 Min. : 0.000 Min. : 0.000
C722 C723 C724 C725
Min. : 0.0000 Min. : 0.0000 Min. : 0.0000 Min. : 0.00000
C726 C727 C728 C729 C730
Min. :0.000e+00 Min. :0.000e+00 Min. : 0 Min. : 0 Min. : 0
C731 C732 C733 C734 C735
Min. : 0 Min. :0.000e+00 Min. : 0.00000 Min. : 0.0000 Min. : 0.0000
C736 C737 C738 C739 C740
Min. : 0.000 Min. : 0.000 Min. : 0.000 Min. : 0.00 Min. : 0.00
C741 C742 C743 C744 C745
Min. : 0.000 Min. : 0.000 Min. : 0.000 Min. : 0.000 Min. : 0.000
C746 C747 C748 C749 C750
Min. : 0.000 Min. : 0.000 Min. : 0.000 Min. : 0.0000 Min. : 0.0000
C751 C752 C753 C754 C755
Min. : 0.00000 Min. : 0.00000 Min. :0.000e+00 Min. :0.000e+00 Min. : 0
C756 C757 C758 C759 C760 C761
Min. : 0 Min. : 0 Min. : 0 Min. : 0 Min. : 0 Min. : 0.000000
C762 C763 C764 C765
Min. : 0.00000 Min. : 0.00000 Min. : 0.0000 Min. : 0.0000
C766 C767 C768 C769 C770
Min. : 0.0000 Min. : 0.000 Min. : 0.00 Min. : 0.000 Min. : 0.0000
C771 C772 C773 C774 C775
Min. : 0.000 Min. : 0.0000 Min. : 0.0000 Min. : 0.0000 Min. : 0.0000
C776 C777 C778 C779
Min. : 0.00000 Min. : 0.00000 Min. : 0.00000 Min. : 0.00000
C780 C781 C782 C783 C784 C785
Min. : 0.000 Min. : 0 Min. : 0 Min. : 0 Min. : 0 Min. :0.000
[ reached getOption("max.print") -- omitted 5 rows ]
summary(test)
Approximated quantiles computed! If you are interested in exact quantiles, please pass the `exact_quantiles=TRUE` parameter.
C1 C2 C3 C4 C5 C6
Min. : 0 Min. : 0 Min. : 0 Min. : 0 Min. : 0 Min. : 0
C7 C8 C9 C10 C11 C12
Min. : 0 Min. : 0 Min. : 0 Min. : 0 Min. : 0 Min. : 0
C13 C14 C15 C16 C17 C18
Min. : 0 Min. : 0 Min. : 0 Min. : 0 Min. : 0 Min. : 0
C19 C20 C21 C22 C23 C24
Min. : 0 Min. : 0 Min. : 0 Min. : 0 Min. : 0 Min. : 0
C25 C26 C27 C28 C29 C30
Min. : 0 Min. : 0 Min. : 0 Min. : 0 Min. : 0 Min. : 0
C31 C32 C33 C34 C35
Min. : 0 Min. : 0 Min. : 0 Min. : 0.0000 Min. : 0.0000
C36 C37 C38 C39
Min. : 0.0000 Min. : 0.0000 Min. : 0.0000 Min. : 0.0000
C40 C41 C42 C43 C44
Min. : 0.0000 Min. : 0.0000 Min. : 0.000 Min. : 0.0000 Min. : 0.0000
C45 C46 C47 C48 C49
Min. : 0.0000 Min. : 0.0000 Min. : 0.000 Min. : 0.0000 Min. : 0.0000
C50 C51 C52 C53 C54 C55
Min. : 0 Min. : 0 Min. : 0 Min. : 0 Min. : 0 Min. : 0
C56 C57 C58 C59 C60 C61
Min. : 0 Min. : 0 Min. : 0 Min. : 0 Min. : 0 Min. : 0
C62 C63 C64 C65
Min. : 0.0000 Min. : 0.0000 Min. : 0.0000 Min. : 0.0000
C66 C67 C68 C69 C70
Min. : 0.0000 Min. : 0.000 Min. : 0.000 Min. : 0.000 Min. : 0.000
C71 C72 C73 C74 C75
Min. : 0.000 Min. : 0.000 Min. : 0.000 Min. : 0.000 Min. : 0.000
C76 C77 C78 C79 C80
Min. : 0.000 Min. : 0.00 Min. : 0.0000 Min. : 0.0000 Min. : 0.0000
C81 C82 C83 C84 C85 C86
Min. :0e+00 Min. : 0 Min. : 0 Min. : 0 Min. : 0 Min. : 0
C87 C88 C89 C90 C91
Min. : 0 Min. : 0 Min. : 0.0000 Min. : 0.0000 Min. : 0.0000
C92 C93 C94 C95 C96
Min. : 0.0000 Min. : 0.000 Min. : 0.000 Min. : 0.000 Min. : 0.000
C97 C98 C99 C100 C101
Min. : 0.00 Min. : 0.00 Min. : 0.00 Min. : 0.00 Min. : 0.00
C102 C103 C104 C105 C106
Min. : 0.000 Min. : 0.000 Min. : 0.000 Min. : 0.000 Min. : 0.000
C107 C108 C109 C110 C111
Min. : 0.000 Min. : 0.0000 Min. : 0.000 Min. : 0.0000 Min. : 0.0000
C112 C113 C114 C115 C116 C117
Min. : 0 Min. : 0 Min. : 0 Min. : 0 Min. : 0.0000 Min. : 0.0000
C118 C119 C120 C121 C122
Min. : 0.0000 Min. : 0.0000 Min. : 0.000 Min. : 0.000 Min. : 0.000
C123 C124 C125 C126 C127
Min. : 0.00 Min. : 0.00 Min. : 0.00 Min. : 0.00 Min. : 0.00
C128 C129 C130 C131 C132
Min. : 0.00 Min. : 0.00 Min. : 0.0 Min. : 0.00 Min. : 0.0
C133 C134 C135 C136 C137
Min. : 0.00 Min. : 0.00 Min. : 0.000 Min. : 0.000 Min. : 0.000
C138 C139 C140 C141 C142
Min. : 0.0000 Min. : 0.0000 Min. : 0 Min. : 0 Min. : 0.0000
C143 C144 C145 C146 C147
Min. : 0.0000 Min. : 0.0000 Min. : 0.0000 Min. : 0.000 Min. : 0.000
C148 C149 C150 C151 C152
Min. : 0.000 Min. : 0.00 Min. : 0.00 Min. : 0.00 Min. : 0.00
C153 C154 C155 C156 C157
Min. : 0.00 Min. : 0.00 Min. : 0.00 Min. : 0.00 Min. : 0.00
C158 C159 C160 C161 C162
Min. : 0.0 Min. : 0.00 Min. : 0.00 Min. : 0.00 Min. : 0.00
C163 C164 C165 C166 C167
Min. : 0.00 Min. : 0.000 Min. : 0.000 Min. : 0.0000 Min. : 0.0000
C168 C169 C170 C171 C172
Min. : 0 Min. : 0 Min. : 0 Min. : 0.0000 Min. : 0.0000
C173 C174 C175 C176 C177
Min. : 0.000 Min. : 0.00 Min. : 0.000 Min. : 0.0 Min. : 0.00
C178 C179 C180 C181 C182
Min. : 0.00 Min. : 0.00 Min. : 0.00 Min. : 0.00 Min. : 0.0
C183 C184 C185 C186 C187
Min. : 0.0 Min. : 0.0 Min. : 0.0 Min. : 0.0 Min. : 0.00
C188 C189 C190 C191 C192
Min. : 0.0 Min. : 0.00 Min. : 0.00 Min. : 0.00 Min. : 0.00
C193 C194 C195 C196 C197
Min. : 0.000 Min. : 0.000 Min. : 0.0000 Min. : 0.0000 Min. : 0
C198 C199 C200 C201 C202
Min. : 0.0000 Min. : 0.0000 Min. : 0.0000 Min. : 0.000 Min. : 0.000
C203 C204 C205 C206 C207
Min. : 0.00 Min. : 0.00 Min. : 0.00 Min. : 0.00 Min. : 0.00
C208 C209 C210 C211 C212
Min. : 0.00 Min. : 0.0 Min. : 0 Min. : 0.0 Min. : 0.0
C213 C214 C215 C216 C217
Min. : 0.0 Min. : 0.0 Min. : 0.0 Min. : 0.00 Min. : 0.00
C218 C219 C220 C221 C222
Min. : 0.00 Min. : 0.00 Min. : 0.00 Min. : 0.000 Min. : 0.000
C223 C224 C225 C226 C227
Min. : 0.0000 Min. : 0.0000 Min. : 0 Min. : 0.0000 Min. : 0.0000
C228 C229 C230 C231 C232
Min. : 0.000 Min. : 0.000 Min. : 0.000 Min. : 0.00 Min. : 0.00
C233 C234 C235 C236 C237
Min. : 0.00 Min. : 0.0 Min. : 0.00 Min. : 0.0 Min. : 0
C238 C239 C240 C241 C242 C243
Min. : 0.0 Min. : 0.0 Min. : 0.0 Min. : 0.0 Min. : 0 Min. : 0.0
C244 C245 C246 C247 C248
Min. : 0.0 Min. : 0.00 Min. : 0.00 Min. : 0.00 Min. : 0.00
C249 C250 C251 C252 C253
Min. : 0.000 Min. : 0.000 Min. : 0.0000 Min. : 0.0000 Min. : 0.0000
C254 C255 C256 C257 C258
Min. : 0.0000 Min. : 0.0000 Min. : 0.000 Min. : 0.000 Min. : 0.000
C259 C260 C261 C262 C263
Min. : 0.00 Min. : 0.00 Min. : 0.00 Min. : 0.00 Min. : 0.0
C264 C265 C266 C267 C268
Min. : 0.0 Min. : 0 Min. : 0.0 Min. : 0.00 Min. : 0.00
C269 C270 C271 C272 C273
Min. : 0.0 Min. : 0.0 Min. : 0.0 Min. : 0.00 Min. : 0.00
C274 C275 C276 C277 C278
Min. : 0.0 Min. : 0.00 Min. : 0.00 Min. : 0.000 Min. : 0.000
C279 C280 C281 C282 C283
Min. : 0.000 Min. : 0.0000 Min. : 0.0000 Min. : 0.0000 Min. : 0.0000
C284 C285 C286 C287 C288
Min. : 0.000 Min. : 0.000 Min. : 0.000 Min. : 0.00 Min. : 0.0
C289 C290 C291 C292 C293
Min. : 0.00 Min. : 0.00 Min. : 0.0 Min. : 0.0 Min. : 0.00
C294 C295 C296 C297 C298
Min. : 0.0 Min. : 0.0 Min. : 0.00 Min. : 0.00 Min. : 0.0
C299 C300 C301 C302 C303
Min. : 0.0 Min. : 0.00 Min. : 0.00 Min. : 0.00 Min. : 0.00
C304 C305 C306 C307 C308
Min. : 0.00 Min. : 0.000 Min. : 0.000 Min. : 0.0000 Min. : 0.0000
C309 C310 C311 C312 C313
Min. : 0.0000 Min. : 0.0000 Min. : 0.0000 Min. : 0.000 Min. : 0.000
C314 C315 C316 C317 C318
Min. : 0.000 Min. : 0 Min. : 0.00 Min. : 0.00 Min. : 0.00
C319 C320 C321 C322 C323
Min. : 0.0 Min. : 0.00 Min. : 0.00 Min. : 0.00 Min. : 0.00
C324 C325 C326 C327 C328
Min. : 0.0 Min. : 0.00 Min. : 0.0 Min. : 0.0 Min. : 0.00
C329 C330 C331 C332 C333
Min. : 0.00 Min. : 0.00 Min. : 0.00 Min. : 0.00 Min. : 0.000
C334 C335 C336 C337 C338
Min. : 0.0000 Min. : 0.0000 Min. : 0.0000 Min. : 0 Min. : 0.0000
C339 C340 C341 C342 C343
Min. : 0.0000 Min. : 0.0000 Min. : 0.000 Min. : 0.000 Min. : 0.0
C344 C345 C346 C347 C348
Min. : 0.00 Min. : 0.00 Min. : 0.00 Min. : 0.0 Min. : 0.00
C349 C350 C351 C352 C353
Min. : 0.00 Min. : 0.00 Min. : 0.00 Min. : 0.0 Min. : 0.0
C354 C355 C356 C357 C358
Min. : 0.0 Min. : 0 Min. : 0.0 Min. : 0.00 Min. : 0.00
C359 C360 C361 C362 C363
Min. : 0.00 Min. : 0 Min. : 0.00 Min. : 0.0000 Min. : 0.0000
C364 C365 C366 C367 C368
Min. : 0 Min. : 0 Min. : 0.0000 Min. : 0.0000 Min. : 0.0000
C369 C370 C371 C372 C373
Min. : 0.000 Min. : 0.00 Min. : 0.00 Min. : 0.00 Min. : 0.00
C374 C375 C376 C377 C378
Min. : 0.00 Min. : 0 Min. : 0.00 Min. : 0.00 Min. : 0.00
C379 C380 C381 C382 C383 C384
Min. : 0.0 Min. : 0.0 Min. : 0.0 Min. : 0 Min. : 0 Min. : 0.00
C385 C386 C387 C388 C389
Min. : 0.00 Min. : 0.00 Min. : 0.00 Min. : 0.00 Min. : 0.000
C390 C391 C392 C393 C394
Min. : 0.0000 Min. : 0.0000 Min. : 0 Min. : 0 Min. : 0
C395 C396 C397 C398 C399
Min. : 0.000 Min. : 0.0000 Min. : 0.000 Min. : 0.00 Min. : 0.00
C400 C401 C402 C403 C404
Min. : 0.00 Min. : 0.0 Min. : 0.00 Min. : 0.0 Min. : 0.00
C405 C406 C407 C408 C409
Min. : 0.00 Min. : 0.0 Min. : 0.0 Min. : 0.0 Min. : 0.0
C410 C411 C412 C413 C414
Min. : 0.0 Min. : 0.0 Min. : 0 Min. : 0.00 Min. : 0.00
C415 C416 C417 C418 C419
Min. : 0.00 Min. : 0.00 Min. : 0.000 Min. : 0.0000 Min. : 0.0000
C420 C421 C422 C423 C424
Min. : 0 Min. : 0 Min. : 0 Min. : 0.0000 Min. : 0.00
C425 C426 C427 C428 C429
Min. : 0.000 Min. : 0.00 Min. : 0.00 Min. : 0.00 Min. : 0.00
C430 C431 C432 C433 C434
Min. : 0.00 Min. : 0.00 Min. : 0.00 Min. : 0.0 Min. : 0.0
C435 C436 C437 C438 C439
Min. : 0.0 Min. : 0.0 Min. : 0.0 Min. : 0.0 Min. : 0.0
C440 C441 C442 C443 C444
Min. : 0.0 Min. : 0.00 Min. : 0.00 Min. : 0.00 Min. : 0.00
C445 C446 C447 C448 C449
Min. : 0.000 Min. : 0.0000 Min. : 0.0000 Min. : 0.0000 Min. : 0
C450 C451 C452 C453 C454
Min. : 0.0000 Min. : 0.0000 Min. : 0.0000 Min. : 0.00 Min. : 0.00
C455 C456 C457 C458 C459
Min. : 0.0 Min. : 0.00 Min. : 0.00 Min. : 0.00 Min. : 0.00
C460 C461 C462 C463 C464
Min. : 0.00 Min. : 0.0 Min. : 0.0 Min. : 0.0 Min. : 0
C465 C466 C467 C468 C469
Min. : 0.0 Min. : 0.0 Min. : 0.0 Min. : 0.00 Min. : 0.00
C470 C471 C472 C473 C474
Min. : 0.00 Min. : 0.00 Min. : 0.0 Min. : 0.000 Min. : 0.000
C475 C476 C477 C478 C479
Min. : 0.0000 Min. : 0.000 Min. : 0 Min. : 0 Min. : 0.0000
C480 C481 C482 C483 C484
Min. : 0.0000 Min. : 0.000 Min. : 0.00 Min. : 0.00 Min. : 0.00
C485 C486 C487 C488 C489
Min. : 0.00 Min. : 0.00 Min. : 0.00 Min. : 0.00 Min. : 0.00
C490 C491 C492 C493 C494
Min. : 0.0 Min. : 0.0 Min. : 0.0 Min. : 0.0 Min. : 0.0
C495 C496 C497 C498 C499
Min. : 0.00 Min. : 0.00 Min. : 0.00 Min. : 0.00 Min. : 0.00
C500 C501 C502 C503 C504
Min. : 0.00 Min. : 0.000 Min. : 0.0000 Min. : 0.0000 Min. : 0.000
C505 C506 C507 C508 C509
Min. : 0 Min. : 0.0000 Min. : 0.0000 Min. : 0.000 Min. : 0.000
C510 C511 C512 C513 C514
Min. : 0.00 Min. : 0.00 Min. : 0.00 Min. : 0.00 Min. : 0.00
C515 C516 C517 C518 C519
Min. : 0.0 Min. : 0.00 Min. : 0.00 Min. : 0.00 Min. : 0
C520 C521 C522 C523 C524
Min. : 0.0 Min. : 0.0 Min. : 0.0 Min. : 0.0 Min. : 0.00
C525 C526 C527 C528 C529
Min. : 0.00 Min. : 0.00 Min. : 0.00 Min. : 0.00 Min. : 0.000
C530 C531 C532 C533 C534
Min. : 0.000 Min. : 0.0000 Min. : 0.0000 Min. : 0 Min. : 0.0000
C535 C536 C537 C538 C539
Min. : 0.0000 Min. : 0.000 Min. : 0.000 Min. : 0.00 Min. : 0.00
C540 C541 C542 C543 C544
Min. : 0.00 Min. : 0.00 Min. : 0.00 Min. : 0.00 Min. : 0.00
C545 C546 C547 C548 C549
Min. : 0.00 Min. : 0.0 Min. : 0.0 Min. : 0 Min. : 0.0
C550 C551 C552 C553 C554
Min. : 0.0 Min. : 0.00 Min. : 0 Min. : 0.00 Min. : 0.00
C555 C556 C557 C558 C559
Min. : 0.00 Min. : 0.00 Min. : 0.000 Min. : 0.0000 Min. : 0.0000
C560 C561 C562 C563 C564
Min. :0e+00 Min. : 0 Min. : 0.0000 Min. : 0.0000 Min. : 0.000
C565 C566 C567 C568 C569
Min. : 0.000 Min. : 0.00 Min. : 0.00 Min. : 0.00 Min. : 0.00
C570 C571 C572 C573 C574
Min. : 0.00 Min. : 0.00 Min. : 0.0 Min. : 0.0 Min. : 0.0
C575 C576 C577 C578 C579
Min. : 0.0 Min. : 0.0 Min. : 0.0 Min. : 0.0 Min. : 0.00
C580 C581 C582 C583 C584
Min. : 0.00 Min. : 0.00 Min. : 0.00 Min. : 0.00 Min. : 0.000
C585 C586 C587 C588 C589
Min. : 0.000 Min. : 0.0000 Min. : 0.0000 Min. : 0 Min. : 0
C590 C591 C592 C593 C594
Min. : 0 Min. : 0.0000 Min. : 0.000 Min. : 0.000 Min. : 0.00
C595 C596 C597 C598 C599
Min. : 0.00 Min. : 0.00 Min. : 0.00 Min. : 0.00 Min. : 0.0
C600 C601 C602 C603 C604 C605
Min. : 0.0 Min. : 0.0 Min. : 0.0 Min. : 0 Min. : 0.0 Min. : 0.0
C606 C607 C608 C609 C610
Min. : 0.0 Min. : 0.00 Min. : 0.00 Min. : 0.00 Min. : 0.00
C611 C612 C613 C614 C615
Min. : 0.000 Min. : 0.000 Min. : 0.000 Min. : 0.0000 Min. : 0.0000
C616 C617 C618 C619 C620
Min. : 0 Min. : 0 Min. : 0 Min. : 0.0000 Min. : 0.000
C621 C622 C623 C624 C625
Min. : 0.000 Min. : 0.00 Min. : 0.00 Min. : 0.00 Min. : 0.00
C626 C627 C628 C629 C630
Min. : 0.00 Min. : 0.00 Min. : 0.0 Min. : 0.0 Min. : 0.0
C631 C632 C633 C634 C635
Min. : 0.0 Min. : 0.0 Min. : 0.00 Min. : 0.00 Min. : 0.00
C636 C637 C638 C639 C640
Min. : 0.00 Min. : 0.00 Min. : 0.000 Min. : 0.000 Min. : 0.000
C641 C642 C643 C644 C645
Min. : 0.0000 Min. : 0.0000 Min. : 0.0000 Min. : 0 Min. : 0
C646 C647 C648 C649 C650
Min. : 0.0000 Min. : 0.0000 Min. : 0.0000 Min. : 0.000 Min. : 0.000
C651 C652 C653 C654 C655
Min. : 0.00 Min. : 0.00 Min. : 0.00 Min. : 0.00 Min. : 0.00
C656 C657 C658 C659 C660
Min. : 0.00 Min. : 0.0 Min. : 0.0 Min. : 0 Min. : 0.0
C661 C662 C663 C664 C665
Min. : 0.00 Min. : 0 Min. : 0.0 Min. : 0.00 Min. : 0.000
C666 C667 C668 C669 C670
Min. : 0.000 Min. : 0.000 Min. : 0.0000 Min. : 0.000 Min. : 0.0000
C671 C672 C673 C674 C675 C676
Min. :0e+00 Min. : 0 Min. : 0 Min. : 0 Min. : 0 Min. : 0.0000
C677 C678 C679 C680 C681
Min. : 0.000 Min. : 0.000 Min. : 0.000 Min. : 0.000 Min. : 0.00
C682 C683 C684 C685 C686
Min. : 0.00 Min. : 0.00 Min. : 0.00 Min. : 0.00 Min. : 0.00
C687 C688 C689 C690 C691
Min. : 0.00 Min. : 0.0 Min. : 0.00 Min. : 0.00 Min. : 0.00
C692 C693 C694 C695 C696
Min. : 0.000 Min. : 0.00 Min. : 0.000 Min. : 0.0000 Min. : 0.0000
C697 C698 C699 C700 C701 C702
Min. : 0.0000 Min. :0e+00 Min. : 0 Min. : 0 Min. : 0 Min. : 0
C703 C704 C705 C706 C707
Min. : 0 Min. : 0.0000 Min. : 0.0000 Min. : 0.0000 Min. : 0.000
C708 C709 C710 C711 C712
Min. : 0.000 Min. : 0.000 Min. : 0.000 Min. : 0.00 Min. : 0.00
C713 C714 C715 C716 C717
Min. : 0.00 Min. : 0.00 Min. : 0.00 Min. : 0.00 Min. : 0.00
C718 C719 C720 C721 C722
Min. : 0.000 Min. : 0.000 Min. : 0.000 Min. : 0.00 Min. : 0.0000
C723 C724 C725 C726 C727
Min. : 0.0000 Min. : 0.0000 Min. : 0.0000 Min. : 0.0000 Min. : 0
C728 C729 C730 C731 C732 C733
Min. : 0 Min. : 0 Min. : 0 Min. : 0 Min. : 0 Min. : 0.0000
C734 C735 C736 C737 C738
Min. : 0.0000 Min. : 0.0000 Min. : 0.000 Min. : 0.000 Min. : 0.000
C739 C740 C741 C742 C743
Min. : 0.000 Min. : 0.000 Min. : 0.000 Min. : 0.000 Min. : 0.000
C744 C745 C746 C747 C748
Min. : 0.000 Min. : 0.000 Min. : 0.000 Min. : 0.00 Min. : 0.0000
C749 C750 C751 C752 C753
Min. : 0.000 Min. : 0.0000 Min. : 0.0000 Min. : 0.00 Min. : 0.0000
C754 C755 C756 C757 C758 C759
Min. : 0 Min. : 0 Min. : 0 Min. : 0 Min. : 0 Min. : 0
C760 C761 C762 C763 C764
Min. : 0 Min. : 0 Min. : 0 Min. : 0.0000 Min. : 0.0000
C765 C766 C767 C768 C769
Min. : 0.0000 Min. : 0.0000 Min. : 0.00 Min. : 0.0000 Min. : 0.0000
C770 C771 C772 C773
Min. : 0.0000 Min. : 0.0000 Min. : 0.0000 Min. : 0.0000
C774 C775 C776 C777 C778
Min. : 0.0000 Min. : 0.0000 Min. : 0.0000 Min. : 0.0000 Min. :0e+00
C779 C780 C781 C782 C783 C784
Min. : 0 Min. : 0 Min. : 0 Min. : 0 Min. : 0 Min. : 0
C785
Min. :0.000
[ reached getOption("max.print") -- omitted 5 rows ]
specify response and predictor
y="C785"
x<-setdiff(names(train),y)
x
[1] "C1" "C2" "C3" "C4" "C5" "C6" "C7" "C8" "C9" "C10" "C11" "C12"
[13] "C13" "C14" "C15" "C16" "C17" "C18" "C19" "C20" "C21" "C22" "C23" "C24"
[25] "C25" "C26" "C27" "C28" "C29" "C30" "C31" "C32" "C33" "C34" "C35" "C36"
[37] "C37" "C38" "C39" "C40" "C41" "C42" "C43" "C44" "C45" "C46" "C47" "C48"
[49] "C49" "C50" "C51" "C52" "C53" "C54" "C55" "C56" "C57" "C58" "C59" "C60"
[61] "C61" "C62" "C63" "C64" "C65" "C66" "C67" "C68" "C69" "C70" "C71" "C72"
[73] "C73" "C74" "C75" "C76" "C77" "C78" "C79" "C80" "C81" "C82" "C83" "C84"
[85] "C85" "C86" "C87" "C88" "C89" "C90" "C91" "C92" "C93" "C94" "C95" "C96"
[97] "C97" "C98" "C99" "C100" "C101" "C102" "C103" "C104" "C105" "C106" "C107" "C108"
[109] "C109" "C110" "C111" "C112" "C113" "C114" "C115" "C116" "C117" "C118" "C119" "C120"
[121] "C121" "C122" "C123" "C124" "C125" "C126" "C127" "C128" "C129" "C130" "C131" "C132"
[133] "C133" "C134" "C135" "C136" "C137" "C138" "C139" "C140" "C141" "C142" "C143" "C144"
[145] "C145" "C146" "C147" "C148" "C149" "C150" "C151" "C152" "C153" "C154" "C155" "C156"
[157] "C157" "C158" "C159" "C160" "C161" "C162" "C163" "C164" "C165" "C166" "C167" "C168"
[169] "C169" "C170" "C171" "C172" "C173" "C174" "C175" "C176" "C177" "C178" "C179" "C180"
[181] "C181" "C182" "C183" "C184" "C185" "C186" "C187" "C188" "C189" "C190" "C191" "C192"
[193] "C193" "C194" "C195" "C196" "C197" "C198" "C199" "C200" "C201" "C202" "C203" "C204"
[205] "C205" "C206" "C207" "C208" "C209" "C210" "C211" "C212" "C213" "C214" "C215" "C216"
[217] "C217" "C218" "C219" "C220" "C221" "C222" "C223" "C224" "C225" "C226" "C227" "C228"
[229] "C229" "C230" "C231" "C232" "C233" "C234" "C235" "C236" "C237" "C238" "C239" "C240"
[241] "C241" "C242" "C243" "C244" "C245" "C246" "C247" "C248" "C249" "C250" "C251" "C252"
[253] "C253" "C254" "C255" "C256" "C257" "C258" "C259" "C260" "C261" "C262" "C263" "C264"
[265] "C265" "C266" "C267" "C268" "C269" "C270" "C271" "C272" "C273" "C274" "C275" "C276"
[277] "C277" "C278" "C279" "C280" "C281" "C282" "C283" "C284" "C285" "C286" "C287" "C288"
[289] "C289" "C290" "C291" "C292" "C293" "C294" "C295" "C296" "C297" "C298" "C299" "C300"
[301] "C301" "C302" "C303" "C304" "C305" "C306" "C307" "C308" "C309" "C310" "C311" "C312"
[313] "C313" "C314" "C315" "C316" "C317" "C318" "C319" "C320" "C321" "C322" "C323" "C324"
[325] "C325" "C326" "C327" "C328" "C329" "C330" "C331" "C332" "C333" "C334" "C335" "C336"
[337] "C337" "C338" "C339" "C340" "C341" "C342" "C343" "C344" "C345" "C346" "C347" "C348"
[349] "C349" "C350" "C351" "C352" "C353" "C354" "C355" "C356" "C357" "C358" "C359" "C360"
[361] "C361" "C362" "C363" "C364" "C365" "C366" "C367" "C368" "C369" "C370" "C371" "C372"
[373] "C373" "C374" "C375" "C376" "C377" "C378" "C379" "C380" "C381" "C382" "C383" "C384"
[385] "C385" "C386" "C387" "C388" "C389" "C390" "C391" "C392" "C393" "C394" "C395" "C396"
[397] "C397" "C398" "C399" "C400" "C401" "C402" "C403" "C404" "C405" "C406" "C407" "C408"
[409] "C409" "C410" "C411" "C412" "C413" "C414" "C415" "C416" "C417" "C418" "C419" "C420"
[421] "C421" "C422" "C423" "C424" "C425" "C426" "C427" "C428" "C429" "C430" "C431" "C432"
[433] "C433" "C434" "C435" "C436" "C437" "C438" "C439" "C440" "C441" "C442" "C443" "C444"
[445] "C445" "C446" "C447" "C448" "C449" "C450" "C451" "C452" "C453" "C454" "C455" "C456"
[457] "C457" "C458" "C459" "C460" "C461" "C462" "C463" "C464" "C465" "C466" "C467" "C468"
[469] "C469" "C470" "C471" "C472" "C473" "C474" "C475" "C476" "C477" "C478" "C479" "C480"
[481] "C481" "C482" "C483" "C484" "C485" "C486" "C487" "C488" "C489" "C490" "C491" "C492"
[493] "C493" "C494" "C495" "C496" "C497" "C498" "C499" "C500" "C501" "C502" "C503" "C504"
[505] "C505" "C506" "C507" "C508" "C509" "C510" "C511" "C512" "C513" "C514" "C515" "C516"
[517] "C517" "C518" "C519" "C520" "C521" "C522" "C523" "C524" "C525" "C526" "C527" "C528"
[529] "C529" "C530" "C531" "C532" "C533" "C534" "C535" "C536" "C537" "C538" "C539" "C540"
[541] "C541" "C542" "C543" "C544" "C545" "C546" "C547" "C548" "C549" "C550" "C551" "C552"
[553] "C553" "C554" "C555" "C556" "C557" "C558" "C559" "C560" "C561" "C562" "C563" "C564"
[565] "C565" "C566" "C567" "C568" "C569" "C570" "C571" "C572" "C573" "C574" "C575" "C576"
[577] "C577" "C578" "C579" "C580" "C581" "C582" "C583" "C584" "C585" "C586" "C587" "C588"
[589] "C589" "C590" "C591" "C592" "C593" "C594" "C595" "C596" "C597" "C598" "C599" "C600"
[601] "C601" "C602" "C603" "C604" "C605" "C606" "C607" "C608" "C609" "C610" "C611" "C612"
[613] "C613" "C614" "C615" "C616" "C617" "C618" "C619" "C620" "C621" "C622" "C623" "C624"
[625] "C625" "C626" "C627" "C628" "C629" "C630" "C631" "C632" "C633" "C634" "C635" "C636"
[637] "C637" "C638" "C639" "C640" "C641" "C642" "C643" "C644" "C645" "C646" "C647" "C648"
[649] "C649" "C650" "C651" "C652" "C653" "C654" "C655" "C656" "C657" "C658" "C659" "C660"
[661] "C661" "C662" "C663" "C664" "C665" "C666" "C667" "C668" "C669" "C670" "C671" "C672"
[673] "C673" "C674" "C675" "C676" "C677" "C678" "C679" "C680" "C681" "C682" "C683" "C684"
[685] "C685" "C686" "C687" "C688" "C689" "C690" "C691" "C692" "C693" "C694" "C695" "C696"
[697] "C697" "C698" "C699" "C700" "C701" "C702" "C703" "C704" "C705" "C706" "C707" "C708"
[709] "C709" "C710" "C711" "C712" "C713" "C714" "C715" "C716" "C717" "C718" "C719" "C720"
[721] "C721" "C722" "C723" "C724" "C725" "C726" "C727" "C728" "C729" "C730" "C731" "C732"
[733] "C733" "C734" "C735" "C736" "C737" "C738" "C739" "C740" "C741" "C742" "C743" "C744"
[745] "C745" "C746" "C747" "C748" "C749" "C750" "C751" "C752" "C753" "C754" "C755" "C756"
[757] "C757" "C758" "C759" "C760" "C761" "C762" "C763" "C764" "C765" "C766" "C767" "C768"
[769] "C769" "C770" "C771" "C772" "C773" "C774" "C775" "C776" "C777" "C778" "C779" "C780"
[781] "C781" "C782" "C783" "C784"
set y as factor
model_cv<-h2o.deeplearning(x=x,y=y, training_frame = train, distribution = "multinomial", activation="RectifierWithDropout", hidden=c(32,32,32), input_dropout_ratio=0.2, sparse=TRUE, l1=1e-5, epochs = 10, nfolds=5)
Dropping bad and constant columns: [C86, C85, C729, C728, C646, C645, C169, C760, C561, C53, C11, C55, C10, C54, C57, C12, C56, C58, C17, C19, C18, C731, C730, C20, C22, C21, C24, C23, C26, C25, C28, C27, C702, C701, C29, C700, C1, C2, C784, C3, C783, C4, C782, C5, C781, C6, C142, C7, C141, C8, C9, C31, C30, C32, C759, C758, C757, C756, C755, C477, C113, C674, C112, C673, C672, C84, C83].
|
| | 0%
|
|===== | 5%
|
|========== | 12%
|
|============= | 15%
|
|=============== | 17%
|
|================ | 19%
|
|====================== | 25%
|
|=========================== | 32%
|
|============================== | 35%
|
|================================ | 37%
|
|===================================== | 44%
|
|========================================== | 49%
|
|============================================ | 52%
|
|============================================= | 53%
|
|================================================== | 59%
|
|========================================================= | 67%
|
|============================================================ | 71%
|
|============================================================= | 72%
|
|================================================================== | 77%
|
|======================================================================= | 84%
|
|=========================================================================== | 88%
|
|============================================================================= | 91%
|
|================================================================================= | 96%
|
|=====================================================================================| 100%
model_cv
Model Details:
==============
H2OMultinomialModel: deeplearning
Model ID: DeepLearning_model_R_1507322206419_4
Status of Neuron Layers: predicting C785, 10-class classification, multinomial distribution, CrossEntropy loss, 25,418 weights/biases, 374.8 KB, 686,260 training samples, mini-batch size 1
layer units type dropout l1 l2 mean_rate rate_rms momentum
1 1 717 Input 20.00 %
2 2 32 RectifierDropout 50.00 % 0.000010 0.000000 0.032148 0.182078 0.000000
3 3 32 RectifierDropout 50.00 % 0.000010 0.000000 0.000316 0.000184 0.000000
4 4 32 RectifierDropout 50.00 % 0.000010 0.000000 0.000575 0.000316 0.000000
5 5 10 Softmax 0.000010 0.000000 0.002882 0.002806 0.000000
mean_weight weight_rms mean_bias bias_rms
1
2 -0.010521 0.067818 0.534664 0.228609
3 -0.036404 0.206775 0.633899 0.321838
4 -0.041562 0.222377 0.561128 0.422619
5 -0.503300 1.103185 -2.239528 1.251503
H2OMultinomialMetrics: deeplearning
** Reported on training data. **
** Metrics reported on temporary training frame with 10017 samples **
Training Set Metrics:
=====================
MSE: (Extract with `h2o.mse`) 0.14834
RMSE: (Extract with `h2o.rmse`) 0.3851493
Logloss: (Extract with `h2o.logloss`) 0.4727512
Mean Per-Class Error: 0.1157331
Confusion Matrix: Extract with `h2o.confusionMatrix(<model>,train = TRUE)`)
=========================================================================
Confusion Matrix: Row labels: Actual class; Column labels: Predicted class
0 1 2 3 4 5 6 7 8 9 Error Rate
0 918 0 6 2 4 7 4 0 33 2 0.0594 = 58 / 976
1 0 1131 3 14 0 0 1 2 27 2 0.0415 = 49 / 1,180
2 4 6 855 88 10 4 13 9 24 1 0.1568 = 159 / 1,014
3 0 1 11 985 0 9 0 5 9 3 0.0371 = 38 / 1,023
4 0 2 3 5 901 3 7 1 19 85 0.1218 = 125 / 1,026
5 5 1 10 193 2 560 4 0 63 6 0.3365 = 284 / 844
6 8 3 8 1 7 24 881 0 29 1 0.0842 = 81 / 962
7 3 1 9 38 3 0 0 940 3 29 0.0838 = 86 / 1,026
8 0 9 6 46 0 6 0 0 925 2 0.0694 = 69 / 994
9 2 1 0 97 19 1 0 21 21 810 0.1667 = 162 / 972
Totals 940 1155 911 1469 946 614 910 978 1153 941 0.1109 = 1,111 / 10,017
Hit Ratio Table: Extract with `h2o.hit_ratio_table(<model>,train = TRUE)`
=======================================================================
Top-10 Hit Ratios:
k hit_ratio
1 1 0.889089
2 2 0.948488
3 3 0.968853
4 4 0.980134
5 5 0.988020
6 6 0.993910
7 7 0.996606
8 8 0.998602
9 9 0.999601
10 10 1.000000
H2OMultinomialMetrics: deeplearning
** Reported on cross-validation data. **
** 5-fold cross-validation on training data (Metrics computed for combined holdout predictions) **
Cross-Validation Set Metrics:
=====================
Extract cross-validation frame with `h2o.getFrame("RTMP_sid_b48f_157")`
MSE: (Extract with `h2o.mse`) 0.1273986
RMSE: (Extract with `h2o.rmse`) 0.3569295
Logloss: (Extract with `h2o.logloss`) 0.4228517
Mean Per-Class Error: 0.09928181
Hit Ratio Table: Extract with `h2o.hit_ratio_table(<model>,xval = TRUE)`
=======================================================================
Top-10 Hit Ratios:
k hit_ratio
1 1 0.902633
2 2 0.953600
3 3 0.972850
4 4 0.982050
5 5 0.989433
6 6 0.994167
7 7 0.996483
8 8 0.998033
9 9 0.999283
10 10 1.000000
Cross-Validation Metrics Summary:
mean sd cv_1_valid cv_2_valid cv_3_valid
accuracy 0.9026182 0.0040974915 0.90128064 0.9097332 0.8924319
err 0.097381756 0.0040974915 0.098719366 0.09026681 0.10756806
err_count 1168.4 47.763165 1164.0 1086.0 1292.0
logloss 0.4229333 0.020707238 0.4337107 0.39469606 0.47525612
max_per_class_error 0.2157862 0.015042614 0.21295474 0.22273567 0.21661238
mean_per_class_accuracy 0.9006914 0.0039779264 0.89941067 0.90786743 0.89104694
mean_per_class_error 0.09930864 0.0039779264 0.10058931 0.092132546 0.108953066
mse 0.12742995 0.008456517 0.13102913 0.11791809 0.14929952
r2 0.98473376 0.0010199259 0.9843297 0.98585975 0.98209006
rmse 0.3565962 0.011599256 0.36197945 0.34339204 0.38639295
cv_4_valid cv_5_valid
accuracy 0.9038221 0.90582335
err 0.09617787 0.09417667
err_count 1155.0 1145.0
logloss 0.40169317 0.40931046
max_per_class_error 0.24643198 0.18019626
mean_per_class_accuracy 0.9009753 0.90415645
mean_per_class_error 0.09902473 0.09584354
mse 0.117703974 0.121199004
r2 0.985955 0.9854344
rmse 0.34308013 0.34813648
# View specified parameters of the deep learning model
dl1@parameters
$model_id
[1] "DeepLearning_model_R_1507322206419_3"
$training_frame
[1] "RTMP_sid_b48f_157"
$validation_frame
[1] "RTMP_sid_b48f_158"
$activation
[1] "RectifierWithDropout"
$hidden
[1] 32 32 32
$seed
[1] -5.165887e+17
$input_dropout_ratio
[1] 0.2
$l1
[1] 1e-05
$distribution
[1] "multinomial"
$sparse
[1] TRUE
$x
[1] "C13" "C14" "C15" "C16" "C33" "C34" "C35" "C36" "C37" "C38" "C39" "C40"
[13] "C41" "C42" "C43" "C44" "C45" "C46" "C47" "C48" "C49" "C50" "C51" "C52"
[25] "C59" "C60" "C61" "C62" "C63" "C64" "C65" "C66" "C67" "C68" "C69" "C70"
[37] "C71" "C72" "C73" "C74" "C75" "C76" "C77" "C78" "C79" "C80" "C81" "C82"
[49] "C87" "C88" "C89" "C90" "C91" "C92" "C93" "C94" "C95" "C96" "C97" "C98"
[61] "C99" "C100" "C101" "C102" "C103" "C104" "C105" "C106" "C107" "C108" "C109" "C110"
[73] "C111" "C114" "C115" "C116" "C117" "C118" "C119" "C120" "C121" "C122" "C123" "C124"
[85] "C125" "C126" "C127" "C128" "C129" "C130" "C131" "C132" "C133" "C134" "C135" "C136"
[97] "C137" "C138" "C139" "C140" "C143" "C144" "C145" "C146" "C147" "C148" "C149" "C150"
[109] "C151" "C152" "C153" "C154" "C155" "C156" "C157" "C158" "C159" "C160" "C161" "C162"
[121] "C163" "C164" "C165" "C166" "C167" "C168" "C170" "C171" "C172" "C173" "C174" "C175"
[133] "C176" "C177" "C178" "C179" "C180" "C181" "C182" "C183" "C184" "C185" "C186" "C187"
[145] "C188" "C189" "C190" "C191" "C192" "C193" "C194" "C195" "C196" "C197" "C198" "C199"
[157] "C200" "C201" "C202" "C203" "C204" "C205" "C206" "C207" "C208" "C209" "C210" "C211"
[169] "C212" "C213" "C214" "C215" "C216" "C217" "C218" "C219" "C220" "C221" "C222" "C223"
[181] "C224" "C225" "C226" "C227" "C228" "C229" "C230" "C231" "C232" "C233" "C234" "C235"
[193] "C236" "C237" "C238" "C239" "C240" "C241" "C242" "C243" "C244" "C245" "C246" "C247"
[205] "C248" "C249" "C250" "C251" "C252" "C253" "C254" "C255" "C256" "C257" "C258" "C259"
[217] "C260" "C261" "C262" "C263" "C264" "C265" "C266" "C267" "C268" "C269" "C270" "C271"
[229] "C272" "C273" "C274" "C275" "C276" "C277" "C278" "C279" "C280" "C281" "C282" "C283"
[241] "C284" "C285" "C286" "C287" "C288" "C289" "C290" "C291" "C292" "C293" "C294" "C295"
[253] "C296" "C297" "C298" "C299" "C300" "C301" "C302" "C303" "C304" "C305" "C306" "C307"
[265] "C308" "C309" "C310" "C311" "C312" "C313" "C314" "C315" "C316" "C317" "C318" "C319"
[277] "C320" "C321" "C322" "C323" "C324" "C325" "C326" "C327" "C328" "C329" "C330" "C331"
[289] "C332" "C333" "C334" "C335" "C336" "C337" "C338" "C339" "C340" "C341" "C342" "C343"
[301] "C344" "C345" "C346" "C347" "C348" "C349" "C350" "C351" "C352" "C353" "C354" "C355"
[313] "C356" "C357" "C358" "C359" "C360" "C361" "C362" "C363" "C364" "C365" "C366" "C367"
[325] "C368" "C369" "C370" "C371" "C372" "C373" "C374" "C375" "C376" "C377" "C378" "C379"
[337] "C380" "C381" "C382" "C383" "C384" "C385" "C386" "C387" "C388" "C389" "C390" "C391"
[349] "C392" "C393" "C394" "C395" "C396" "C397" "C398" "C399" "C400" "C401" "C402" "C403"
[361] "C404" "C405" "C406" "C407" "C408" "C409" "C410" "C411" "C412" "C413" "C414" "C415"
[373] "C416" "C417" "C418" "C419" "C420" "C421" "C422" "C423" "C424" "C425" "C426" "C427"
[385] "C428" "C429" "C430" "C431" "C432" "C433" "C434" "C435" "C436" "C437" "C438" "C439"
[397] "C440" "C441" "C442" "C443" "C444" "C445" "C446" "C447" "C448" "C449" "C450" "C451"
[409] "C452" "C453" "C454" "C455" "C456" "C457" "C458" "C459" "C460" "C461" "C462" "C463"
[421] "C464" "C465" "C466" "C467" "C468" "C469" "C470" "C471" "C472" "C473" "C474" "C475"
[433] "C476" "C478" "C479" "C480" "C481" "C482" "C483" "C484" "C485" "C486" "C487" "C488"
[445] "C489" "C490" "C491" "C492" "C493" "C494" "C495" "C496" "C497" "C498" "C499" "C500"
[457] "C501" "C502" "C503" "C504" "C505" "C506" "C507" "C508" "C509" "C510" "C511" "C512"
[469] "C513" "C514" "C515" "C516" "C517" "C518" "C519" "C520" "C521" "C522" "C523" "C524"
[481] "C525" "C526" "C527" "C528" "C529" "C530" "C531" "C532" "C533" "C534" "C535" "C536"
[493] "C537" "C538" "C539" "C540" "C541" "C542" "C543" "C544" "C545" "C546" "C547" "C548"
[505] "C549" "C550" "C551" "C552" "C553" "C554" "C555" "C556" "C557" "C558" "C559" "C560"
[517] "C562" "C563" "C564" "C565" "C566" "C567" "C568" "C569" "C570" "C571" "C572" "C573"
[529] "C574" "C575" "C576" "C577" "C578" "C579" "C580" "C581" "C582" "C583" "C584" "C585"
[541] "C586" "C587" "C588" "C589" "C590" "C591" "C592" "C593" "C594" "C595" "C596" "C597"
[553] "C598" "C599" "C600" "C601" "C602" "C603" "C604" "C605" "C606" "C607" "C608" "C609"
[565] "C610" "C611" "C612" "C613" "C614" "C615" "C616" "C617" "C618" "C619" "C620" "C621"
[577] "C622" "C623" "C624" "C625" "C626" "C627" "C628" "C629" "C630" "C631" "C632" "C633"
[589] "C634" "C635" "C636" "C637" "C638" "C639" "C640" "C641" "C642" "C643" "C644" "C647"
[601] "C648" "C649" "C650" "C651" "C652" "C653" "C654" "C655" "C656" "C657" "C658" "C659"
[613] "C660" "C661" "C662" "C663" "C664" "C665" "C666" "C667" "C668" "C669" "C670" "C671"
[625] "C675" "C676" "C677" "C678" "C679" "C680" "C681" "C682" "C683" "C684" "C685" "C686"
[637] "C687" "C688" "C689" "C690" "C691" "C692" "C693" "C694" "C695" "C696" "C697" "C698"
[649] "C699" "C703" "C704" "C705" "C706" "C707" "C708" "C709" "C710" "C711" "C712" "C713"
[661] "C714" "C715" "C716" "C717" "C718" "C719" "C720" "C721" "C722" "C723" "C724" "C725"
[673] "C726" "C727" "C732" "C733" "C734" "C735" "C736" "C737" "C738" "C739" "C740" "C741"
[685] "C742" "C743" "C744" "C745" "C746" "C747" "C748" "C749" "C750" "C751" "C752" "C753"
[697] "C754" "C761" "C762" "C763" "C764" "C765" "C766" "C767" "C768" "C769" "C770" "C771"
[709] "C772" "C773" "C774" "C775" "C776" "C777" "C778" "C779" "C780"
$y
[1] "C785"
# Examine the performance of the trained model
dl1 # display all performance metrics
Model Details:
==============
H2OMultinomialModel: deeplearning
Model ID: DeepLearning_model_R_1507322206419_3
Status of Neuron Layers: predicting C785, 10-class classification, multinomial distribution, CrossEntropy loss, 25,418 weights/biases, 409.8 KB, 600,000 training samples, mini-batch size 1
layer units type dropout l1 l2 mean_rate rate_rms momentum
1 1 717 Input 20.00 %
2 2 32 RectifierDropout 50.00 % 0.000010 0.000000 0.033193 0.184886 0.000000
3 3 32 RectifierDropout 50.00 % 0.000010 0.000000 0.000381 0.000238 0.000000
4 4 32 RectifierDropout 50.00 % 0.000010 0.000000 0.000562 0.000300 0.000000
5 5 10 Softmax 0.000010 0.000000 0.002876 0.002770 0.000000
mean_weight weight_rms mean_bias bias_rms
1
2 -0.012797 0.068251 0.489545 0.154301
3 -0.016404 0.211795 0.786406 0.364354
4 -0.047360 0.210892 0.593834 0.442483
5 -0.451849 1.028717 -2.187110 1.001856
H2OMultinomialMetrics: deeplearning
** Reported on training data. **
** Metrics reported on temporary training frame with 9896 samples **
Training Set Metrics:
=====================
MSE: (Extract with `h2o.mse`) 0.1733473
RMSE: (Extract with `h2o.rmse`) 0.41635
Logloss: (Extract with `h2o.logloss`) 0.5235443
Mean Per-Class Error: 0.1105624
Confusion Matrix: Extract with `h2o.confusionMatrix(<model>,train = TRUE)`)
=========================================================================
Confusion Matrix: Row labels: Actual class; Column labels: Predicted class
0 1 2 3 4 5 6 7 8 9 Error Rate
0 906 0 31 0 1 6 9 1 6 0 0.0563 = 54 / 960
1 0 1042 14 6 0 1 3 0 27 0 0.0467 = 51 / 1,093
2 4 2 948 6 1 2 18 8 21 0 0.0614 = 62 / 1,010
3 0 2 33 883 0 25 2 3 23 3 0.0934 = 91 / 974
4 3 0 14 1 726 103 9 1 39 66 0.2453 = 236 / 962
5 4 1 20 68 1 742 6 1 39 3 0.1616 = 143 / 885
6 3 2 30 0 2 11 911 0 6 0 0.0560 = 54 / 965
7 3 4 19 37 4 5 0 937 14 17 0.0990 = 103 / 1,040
8 6 10 24 29 0 27 6 0 889 0 0.1029 = 102 / 991
9 3 2 3 34 7 65 2 32 38 830 0.1831 = 186 / 1,016
Totals 932 1065 1136 1064 742 987 966 983 1102 919 0.1093 = 1,082 / 9,896
Hit Ratio Table: Extract with `h2o.hit_ratio_table(<model>,train = TRUE)`
=======================================================================
Top-10 Hit Ratios:
k hit_ratio
1 1 0.890663
2 2 0.939875
3 3 0.960085
4 4 0.976960
5 5 0.984640
6 6 0.990097
7 7 0.993836
8 8 0.997373
9 9 0.999293
10 10 1.000000
H2OMultinomialMetrics: deeplearning
** Reported on validation data. **
** Metrics reported on full validation frame **
Validation Set Metrics:
=====================
Extract validation frame with `h2o.getFrame("RTMP_sid_b48f_158")`
MSE: (Extract with `h2o.mse`) 0.1692767
RMSE: (Extract with `h2o.rmse`) 0.4114325
Logloss: (Extract with `h2o.logloss`) 0.5244263
Mean Per-Class Error: 0.1055435
Confusion Matrix: Extract with `h2o.confusionMatrix(<model>,valid = TRUE)`)
=========================================================================
Confusion Matrix: Row labels: Actual class; Column labels: Predicted class
0 1 2 3 4 5 6 7 8 9 Error Rate
0 944 0 21 1 0 4 5 3 2 0 0.0367 = 36 / 980
1 0 1092 9 3 0 1 4 0 26 0 0.0379 = 43 / 1,135
2 3 0 966 6 3 3 10 10 30 1 0.0640 = 66 / 1,032
3 2 0 33 930 0 14 1 7 21 2 0.0792 = 80 / 1,010
4 1 0 10 2 731 106 18 1 39 74 0.2556 = 251 / 982
5 7 1 8 68 0 751 7 4 46 0 0.1581 = 141 / 892
6 16 3 34 1 2 13 884 0 5 0 0.0772 = 74 / 958
7 0 5 33 28 1 2 0 936 6 17 0.0895 = 92 / 1,028
8 7 3 20 17 2 28 10 5 875 7 0.1016 = 99 / 974
9 6 3 3 30 4 56 2 21 32 852 0.1556 = 157 / 1,009
Totals 986 1107 1137 1086 743 978 941 987 1082 953 0.1039 = 1,039 / 10,000
Hit Ratio Table: Extract with `h2o.hit_ratio_table(<model>,valid = TRUE)`
=======================================================================
Top-10 Hit Ratios:
k hit_ratio
1 1 0.896100
2 2 0.940300
3 3 0.960000
4 4 0.975000
5 5 0.982500
6 6 0.988000
7 7 0.993300
8 8 0.996600
9 9 0.999600
10 10 1.000000
h2o.performance(dl1) # training metrics
H2OMultinomialMetrics: deeplearning
** Reported on training data. **
** Metrics reported on temporary training frame with 9896 samples **
Training Set Metrics:
=====================
MSE: (Extract with `h2o.mse`) 0.1733473
RMSE: (Extract with `h2o.rmse`) 0.41635
Logloss: (Extract with `h2o.logloss`) 0.5235443
Mean Per-Class Error: 0.1105624
Confusion Matrix: Extract with `h2o.confusionMatrix(<model>,train = TRUE)`)
=========================================================================
Confusion Matrix: Row labels: Actual class; Column labels: Predicted class
0 1 2 3 4 5 6 7 8 9 Error Rate
0 906 0 31 0 1 6 9 1 6 0 0.0563 = 54 / 960
1 0 1042 14 6 0 1 3 0 27 0 0.0467 = 51 / 1,093
2 4 2 948 6 1 2 18 8 21 0 0.0614 = 62 / 1,010
3 0 2 33 883 0 25 2 3 23 3 0.0934 = 91 / 974
4 3 0 14 1 726 103 9 1 39 66 0.2453 = 236 / 962
5 4 1 20 68 1 742 6 1 39 3 0.1616 = 143 / 885
6 3 2 30 0 2 11 911 0 6 0 0.0560 = 54 / 965
7 3 4 19 37 4 5 0 937 14 17 0.0990 = 103 / 1,040
8 6 10 24 29 0 27 6 0 889 0 0.1029 = 102 / 991
9 3 2 3 34 7 65 2 32 38 830 0.1831 = 186 / 1,016
Totals 932 1065 1136 1064 742 987 966 983 1102 919 0.1093 = 1,082 / 9,896
Hit Ratio Table: Extract with `h2o.hit_ratio_table(<model>,train = TRUE)`
=======================================================================
Top-10 Hit Ratios:
k hit_ratio
1 1 0.890663
2 2 0.939875
3 3 0.960085
4 4 0.976960
5 5 0.984640
6 6 0.990097
7 7 0.993836
8 8 0.997373
9 9 0.999293
10 10 1.000000
h2o.performance(dl1, valid = TRUE) # validation metrics
H2OMultinomialMetrics: deeplearning
** Reported on validation data. **
** Metrics reported on full validation frame **
Validation Set Metrics:
=====================
Extract validation frame with `h2o.getFrame("RTMP_sid_b48f_158")`
MSE: (Extract with `h2o.mse`) 0.1692767
RMSE: (Extract with `h2o.rmse`) 0.4114325
Logloss: (Extract with `h2o.logloss`) 0.5244263
Mean Per-Class Error: 0.1055435
Confusion Matrix: Extract with `h2o.confusionMatrix(<model>,valid = TRUE)`)
=========================================================================
Confusion Matrix: Row labels: Actual class; Column labels: Predicted class
0 1 2 3 4 5 6 7 8 9 Error Rate
0 944 0 21 1 0 4 5 3 2 0 0.0367 = 36 / 980
1 0 1092 9 3 0 1 4 0 26 0 0.0379 = 43 / 1,135
2 3 0 966 6 3 3 10 10 30 1 0.0640 = 66 / 1,032
3 2 0 33 930 0 14 1 7 21 2 0.0792 = 80 / 1,010
4 1 0 10 2 731 106 18 1 39 74 0.2556 = 251 / 982
5 7 1 8 68 0 751 7 4 46 0 0.1581 = 141 / 892
6 16 3 34 1 2 13 884 0 5 0 0.0772 = 74 / 958
7 0 5 33 28 1 2 0 936 6 17 0.0895 = 92 / 1,028
8 7 3 20 17 2 28 10 5 875 7 0.1016 = 99 / 974
9 6 3 3 30 4 56 2 21 32 852 0.1556 = 157 / 1,009
Totals 986 1107 1137 1086 743 978 941 987 1082 953 0.1039 = 1,039 / 10,000
Hit Ratio Table: Extract with `h2o.hit_ratio_table(<model>,valid = TRUE)`
=======================================================================
Top-10 Hit Ratios:
k hit_ratio
1 1 0.896100
2 2 0.940300
3 3 0.960000
4 4 0.975000
5 5 0.982500
6 6 0.988000
7 7 0.993300
8 8 0.996600
9 9 0.999600
10 10 1.000000
# Get MSE only
h2o.mse(dl1, valid = TRUE)
[1] 0.1692767
# Cross-validated MSE
h2o.mse(model_cv, xval = TRUE)
[1] 0.1273986
apply predication to test data
checkpint model
model_chkp
Model Details:
==============
H2OMultinomialModel: deeplearning
Model ID: DeepLearning_model_R_1507322206419_5
Status of Neuron Layers: predicting C785, 10-class classification, multinomial distribution, CrossEntropy loss, 25,418 weights/biases, 386.8 KB, 1,299,513 training samples, mini-batch size 1
layer units type dropout l1 l2 mean_rate rate_rms momentum
1 1 717 Input 20.00 %
2 2 32 RectifierDropout 50.00 % 0.000010 0.000000 0.030488 0.175945 0.000000
3 3 32 RectifierDropout 50.00 % 0.000010 0.000000 0.000350 0.000289 0.000000
4 4 32 RectifierDropout 50.00 % 0.000010 0.000000 0.000523 0.000361 0.000000
5 5 10 Softmax 0.000010 0.000000 0.003678 0.003975 0.000000
mean_weight weight_rms mean_bias bias_rms
1
2 -0.015660 0.072580 0.532219 0.247582
3 -0.010307 0.215607 0.707758 0.400854
4 -0.042087 0.220341 0.552796 0.537820
5 -0.921071 1.392229 -4.404200 1.106735
H2OMultinomialMetrics: deeplearning
** Reported on training data. **
** Metrics reported on temporary training frame with 10081 samples **
Training Set Metrics:
=====================
MSE: (Extract with `h2o.mse`) 0.1690245
RMSE: (Extract with `h2o.rmse`) 0.4111259
Logloss: (Extract with `h2o.logloss`) 0.5166035
Mean Per-Class Error: 0.1141356
Confusion Matrix: Extract with `h2o.confusionMatrix(<model>,train = TRUE)`)
=========================================================================
Confusion Matrix: Row labels: Actual class; Column labels: Predicted class
0 1 2 3 4 5 6 7 8 9 Error Rate
0 885 0 104 0 1 13 5 0 5 1 0.1272 = 129 / 1,014
1 0 1110 15 7 1 5 2 0 37 0 0.0569 = 67 / 1,177
2 2 0 926 3 3 3 9 0 17 2 0.0404 = 39 / 965
3 0 2 65 909 0 14 3 0 10 1 0.0946 = 95 / 1,004
4 0 1 17 1 829 55 14 1 7 33 0.1347 = 129 / 958
5 1 1 25 51 2 767 15 0 31 2 0.1430 = 128 / 895
6 5 0 23 0 1 11 971 0 1 0 0.0405 = 41 / 1,012
7 0 4 71 16 1 13 2 930 4 32 0.1333 = 143 / 1,073
8 0 10 38 15 0 23 10 1 891 1 0.0991 = 98 / 989
9 0 1 8 31 8 189 2 9 22 724 0.2716 = 270 / 994
Totals 893 1129 1292 1033 846 1093 1033 941 1025 796 0.1130 = 1,139 / 10,081
Hit Ratio Table: Extract with `h2o.hit_ratio_table(<model>,train = TRUE)`
=======================================================================
Top-10 Hit Ratios:
k hit_ratio
1 1 0.887015
2 2 0.943656
3 3 0.962305
4 4 0.976193
5 5 0.983037
6 6 0.990477
7 7 0.995338
8 8 0.997421
9 9 0.999008
10 10 1.000000
H2OMultinomialMetrics: deeplearning
** Reported on validation data. **
** Metrics reported on full validation frame **
Validation Set Metrics:
=====================
Extract validation frame with `h2o.getFrame("RTMP_sid_b48f_158")`
MSE: (Extract with `h2o.mse`) 0.1686103
RMSE: (Extract with `h2o.rmse`) 0.4106219
Logloss: (Extract with `h2o.logloss`) 0.5216791
Mean Per-Class Error: 0.1149102
Confusion Matrix: Extract with `h2o.confusionMatrix(<model>,valid = TRUE)`)
=========================================================================
Confusion Matrix: Row labels: Actual class; Column labels: Predicted class
0 1 2 3 4 5 6 7 8 9 Error Rate
0 844 0 109 1 0 14 8 2 2 0 0.1388 = 136 / 980
1 0 1097 11 2 0 1 4 0 20 0 0.0335 = 38 / 1,135
2 1 0 986 4 4 3 9 3 21 1 0.0446 = 46 / 1,032
3 0 0 48 921 0 16 2 5 17 1 0.0881 = 89 / 1,010
4 0 0 13 0 814 64 23 0 8 60 0.1711 = 168 / 982
5 1 1 18 59 0 766 13 2 31 1 0.1413 = 126 / 892
6 3 2 30 1 0 15 907 0 0 0 0.0532 = 51 / 958
7 0 4 84 20 3 7 2 872 4 32 0.1518 = 156 / 1,028
8 0 2 36 14 2 29 16 2 869 4 0.1078 = 105 / 974
9 2 3 7 17 8 158 2 5 19 788 0.2190 = 221 / 1,009
Totals 851 1109 1342 1039 831 1073 986 891 991 887 0.1136 = 1,136 / 10,000
Hit Ratio Table: Extract with `h2o.hit_ratio_table(<model>,valid = TRUE)`
=======================================================================
Top-10 Hit Ratios:
k hit_ratio
1 1 0.886400
2 2 0.945300
3 3 0.965300
4 4 0.976100
5 5 0.981800
6 6 0.989000
7 7 0.994500
8 8 0.997500
9 9 0.999300
10 10 1.000000
save model
model_path<-h2o.saveModel(object=dl1,path=getwd(), force=TRUE)
print(model_path)
[1] "C:\\Users\\r631758\\Desktop\\r631758\\R codes\\H2O\\exercise\\DeepLearning_model_R_1507322206419_3"
retrieve model by h2o key
model
Model Details:
==============
H2OMultinomialModel: deeplearning
Model ID: DeepLearning_model_R_1507322206419_5
Status of Neuron Layers: predicting C785, 10-class classification, multinomial distribution, CrossEntropy loss, 25,418 weights/biases, 386.8 KB, 1,299,513 training samples, mini-batch size 1
layer units type dropout l1 l2 mean_rate rate_rms momentum
1 1 717 Input 20.00 %
2 2 32 RectifierDropout 50.00 % 0.000010 0.000000 0.030488 0.175945 0.000000
3 3 32 RectifierDropout 50.00 % 0.000010 0.000000 0.000350 0.000289 0.000000
4 4 32 RectifierDropout 50.00 % 0.000010 0.000000 0.000523 0.000361 0.000000
5 5 10 Softmax 0.000010 0.000000 0.003678 0.003975 0.000000
mean_weight weight_rms mean_bias bias_rms
1
2 -0.015660 0.072580 0.532219 0.247582
3 -0.010307 0.215607 0.707758 0.400854
4 -0.042087 0.220341 0.552796 0.537820
5 -0.921071 1.392229 -4.404200 1.106735
H2OMultinomialMetrics: deeplearning
** Reported on training data. **
** Metrics reported on temporary training frame with 10081 samples **
Training Set Metrics:
=====================
MSE: (Extract with `h2o.mse`) 0.1690245
RMSE: (Extract with `h2o.rmse`) 0.4111259
Logloss: (Extract with `h2o.logloss`) 0.5166035
Mean Per-Class Error: 0.1141356
Confusion Matrix: Extract with `h2o.confusionMatrix(<model>,train = TRUE)`)
=========================================================================
Confusion Matrix: Row labels: Actual class; Column labels: Predicted class
0 1 2 3 4 5 6 7 8 9 Error Rate
0 885 0 104 0 1 13 5 0 5 1 0.1272 = 129 / 1,014
1 0 1110 15 7 1 5 2 0 37 0 0.0569 = 67 / 1,177
2 2 0 926 3 3 3 9 0 17 2 0.0404 = 39 / 965
3 0 2 65 909 0 14 3 0 10 1 0.0946 = 95 / 1,004
4 0 1 17 1 829 55 14 1 7 33 0.1347 = 129 / 958
5 1 1 25 51 2 767 15 0 31 2 0.1430 = 128 / 895
6 5 0 23 0 1 11 971 0 1 0 0.0405 = 41 / 1,012
7 0 4 71 16 1 13 2 930 4 32 0.1333 = 143 / 1,073
8 0 10 38 15 0 23 10 1 891 1 0.0991 = 98 / 989
9 0 1 8 31 8 189 2 9 22 724 0.2716 = 270 / 994
Totals 893 1129 1292 1033 846 1093 1033 941 1025 796 0.1130 = 1,139 / 10,081
Hit Ratio Table: Extract with `h2o.hit_ratio_table(<model>,train = TRUE)`
=======================================================================
Top-10 Hit Ratios:
k hit_ratio
1 1 0.887015
2 2 0.943656
3 3 0.962305
4 4 0.976193
5 5 0.983037
6 6 0.990477
7 7 0.995338
8 8 0.997421
9 9 0.999008
10 10 1.000000
H2OMultinomialMetrics: deeplearning
** Reported on validation data. **
** Metrics reported on full validation frame **
Validation Set Metrics:
=====================
Extract validation frame with `h2o.getFrame("RTMP_sid_b48f_158")`
MSE: (Extract with `h2o.mse`) 0.1686103
RMSE: (Extract with `h2o.rmse`) 0.4106219
Logloss: (Extract with `h2o.logloss`) 0.5216791
Mean Per-Class Error: 0.1149102
Confusion Matrix: Extract with `h2o.confusionMatrix(<model>,valid = TRUE)`)
=========================================================================
Confusion Matrix: Row labels: Actual class; Column labels: Predicted class
0 1 2 3 4 5 6 7 8 9 Error Rate
0 844 0 109 1 0 14 8 2 2 0 0.1388 = 136 / 980
1 0 1097 11 2 0 1 4 0 20 0 0.0335 = 38 / 1,135
2 1 0 986 4 4 3 9 3 21 1 0.0446 = 46 / 1,032
3 0 0 48 921 0 16 2 5 17 1 0.0881 = 89 / 1,010
4 0 0 13 0 814 64 23 0 8 60 0.1711 = 168 / 982
5 1 1 18 59 0 766 13 2 31 1 0.1413 = 126 / 892
6 3 2 30 1 0 15 907 0 0 0 0.0532 = 51 / 958
7 0 4 84 20 3 7 2 872 4 32 0.1518 = 156 / 1,028
8 0 2 36 14 2 29 16 2 869 4 0.1078 = 105 / 974
9 2 3 7 17 8 158 2 5 19 788 0.2190 = 221 / 1,009
Totals 851 1109 1342 1039 831 1073 986 891 991 887 0.1136 = 1,136 / 10,000
Hit Ratio Table: Extract with `h2o.hit_ratio_table(<model>,valid = TRUE)`
=======================================================================
Top-10 Hit Ratios:
k hit_ratio
1 1 0.886400
2 2 0.945300
3 3 0.965300
4 4 0.976100
5 5 0.981800
6 6 0.989000
7 7 0.994500
8 8 0.997500
9 9 0.999300
10 10 1.000000
world record run used epochs=8000
saved_model
Model Details:
==============
H2OMultinomialModel: deeplearning
Model ID: DeepLearning_model_R_1507322206419_6
Status of Neuron Layers: predicting C785, 10-class classification, multinomial distribution, CrossEntropy loss, 3,904,522 weights/biases, 44.8 MB, 600,000 training samples, mini-batch size 1
layer units type dropout l1 l2 mean_rate rate_rms momentum
1 1 717 Input 20.00 %
2 2 1024 RectifierDropout 50.00 % 0.000010 0.000000 0.191445 0.298210 0.000000
3 3 1024 RectifierDropout 50.00 % 0.000010 0.000000 0.006911 0.006140 0.000000
4 4 2048 RectifierDropout 50.00 % 0.000010 0.000000 0.028010 0.024994 0.000000
5 5 10 Softmax 0.000010 0.000000 0.016443 0.055269 0.000000
mean_weight weight_rms mean_bias bias_rms
1
2 0.005567 0.045159 0.232284 0.075551
3 -0.007659 0.038646 0.963923 0.035513
4 -0.005470 0.029926 0.786008 0.092608
5 -0.052810 0.046578 -1.113460 0.090975
H2OMultinomialMetrics: deeplearning
** Reported on training data. **
** Metrics reported on temporary training frame with 9949 samples **
Training Set Metrics:
=====================
MSE: (Extract with `h2o.mse`) 0.007523151
RMSE: (Extract with `h2o.rmse`) 0.0867361
Logloss: (Extract with `h2o.logloss`) 0.02923425
Mean Per-Class Error: 0.008487407
Confusion Matrix: Extract with `h2o.confusionMatrix(<model>,train = TRUE)`)
=========================================================================
Confusion Matrix: Row labels: Actual class; Column labels: Predicted class
0 1 2 3 4 5 6 7 8 9 Error Rate
0 980 0 0 1 1 0 1 0 0 1 0.0041 = 4 / 984
1 0 1097 3 0 0 0 0 1 0 0 0.0036 = 4 / 1,101
2 0 0 996 3 0 0 0 2 0 1 0.0060 = 6 / 1,002
3 0 0 3 977 0 3 0 1 2 0 0.0091 = 9 / 986
4 0 0 1 0 983 0 1 3 0 7 0.0121 = 12 / 995
5 1 1 1 2 0 844 2 1 1 0 0.0106 = 9 / 853
6 3 0 1 0 0 1 965 0 0 0 0.0052 = 5 / 970
7 0 3 6 0 0 0 0 1036 0 3 0.0115 = 12 / 1,048
8 2 0 1 3 0 2 1 1 979 0 0.0101 = 10 / 989
9 0 1 0 0 1 3 0 7 1 1008 0.0127 = 13 / 1,021
Totals 986 1102 1012 986 985 853 970 1052 983 1020 0.0084 = 84 / 9,949
Hit Ratio Table: Extract with `h2o.hit_ratio_table(<model>,train = TRUE)`
=======================================================================
Top-10 Hit Ratios:
k hit_ratio
1 1 0.991557
2 2 0.998291
3 3 0.999095
4 4 0.999699
5 5 1.000000
6 6 1.000000
7 7 1.000000
8 8 1.000000
9 9 1.000000
10 10 1.000000
H2OMultinomialMetrics: deeplearning
** Reported on validation data. **
** Metrics reported on full validation frame **
Validation Set Metrics:
=====================
Extract validation frame with `h2o.getFrame("RTMP_sid_b48f_158")`
MSE: (Extract with `h2o.mse`) 0.01601349
RMSE: (Extract with `h2o.rmse`) 0.1265444
Logloss: (Extract with `h2o.logloss`) 0.06764258
Mean Per-Class Error: 0.01966306
Confusion Matrix: Extract with `h2o.confusionMatrix(<model>,valid = TRUE)`)
=========================================================================
Confusion Matrix: Row labels: Actual class; Column labels: Predicted class
0 1 2 3 4 5 6 7 8 9 Error Rate
0 971 0 0 1 0 1 4 1 2 0 0.0092 = 9 / 980
1 0 1126 3 2 0 0 2 0 2 0 0.0079 = 9 / 1,135
2 5 0 1014 3 1 0 2 5 2 0 0.0174 = 18 / 1,032
3 0 0 1 996 0 3 0 5 3 2 0.0139 = 14 / 1,010
4 3 0 3 0 959 0 4 2 1 10 0.0234 = 23 / 982
5 2 0 0 10 1 874 2 1 1 1 0.0202 = 18 / 892
6 5 2 0 1 3 3 940 0 4 0 0.0188 = 18 / 958
7 1 5 10 3 0 0 0 1000 1 8 0.0272 = 28 / 1,028
8 4 1 3 6 1 8 1 3 943 4 0.0318 = 31 / 974
9 4 2 0 7 7 1 0 4 2 982 0.0268 = 27 / 1,009
Totals 995 1136 1034 1029 972 890 955 1021 961 1007 0.0195 = 195 / 10,000
Hit Ratio Table: Extract with `h2o.hit_ratio_table(<model>,valid = TRUE)`
=======================================================================
Top-10 Hit Ratios:
k hit_ratio
1 1 0.980500
2 2 0.994700
3 3 0.998200
4 4 0.999500
5 5 0.999600
6 6 0.999900
7 7 1.000000
8 8 1.000000
9 9 1.000000
10 10 1.000000
---
title: "Deep Learning h2o"
output: html_notebook
---

#load library start h2o
```{r}
library(h2o)
h2o.init()
h2o.removeAll()
```

```{r}
example(h2o.deeplearning)
demo(h2o.deeplearning)
```

#load sample data
```{r}
spiral<-h2o.importFile(path="Z:\\HealthCare Informatics\\r631758\\R codes\\H2O\\exercise\\spiral.csv")
grid<-h2o.importFile(path="Z:\\HealthCare Informatics\\r631758\\R codes\\H2O\\exercise\\grid.csv")
```

#Define helper to plot contours
```{r}
plotC<-function(name, model, data=spiral, g=grid){
  data<-as.data.frame(data)
  pred<-as.data.frame(h2o.predict(model,g))
  n=0.5*(sqrt(nrow(g))-1); d<-1.5; h<-d*(-n:n)/n
  plot(data[,-3],pch=19,col=data[,3],cex=0.5,xlim=c(-d,d), ylim=c(-d,d), main=name)
  contour(h,h,z=array(ifelse(pred[,1]=="Red",0,1),dim=c(2*n+1,2*n+1)),col="blue", lwd=2, add=T)
}
```

#dev.new(noRStudioGD=FALSE) #direct plotting output to a new window
```{r}
par(mfrow=c(2,2)) #set up the canvas for 2x2 plots
plotC( "DL", h2o.deeplearning(1:2,3,spiral,epochs=1e3))
plotC("GBM", h2o.gbm         (1:2,3,spiral))
plotC("DRF", h2o.randomForest(1:2,3,spiral))
plotC("GLM", h2o.glm         (1:2,3,spiral,family="binomial"))
```

#dev.new(noRStudioGD=FALSE) #direct plotting output to a new window
```{r}
par(mfrow=c(2,2)) #set up the canvas for 2x2 plots
ep <- c(1,250,500,750)
plotC(paste0("DL ",ep[1]," epochs"),
      h2o.deeplearning(1:2,3,spiral,epochs=ep[1],
                              model_id="dl_1"))
plotC(paste0("DL ",ep[2]," epochs"),
      h2o.deeplearning(1:2,3,spiral,epochs=ep[2],
            checkpoint="dl_1",model_id="dl_2"))
plotC(paste0("DL ",ep[3]," epochs"),
      h2o.deeplearning(1:2,3,spiral,epochs=ep[3],
            checkpoint="dl_2",model_id="dl_3"))
plotC(paste0("DL ",ep[4]," epochs"),
      h2o.deeplearning(1:2,3,spiral,epochs=ep[4],
            checkpoint="dl_3",model_id="dl_4"))
```

#You can see how the network learns the structure of the spirals with enough training time. We explore different network architectures next:
##dev.new(noRStudioGD=FALSE) #direct plotting output to a new window
```{r}
par(mfrow=c(2,2)) #set up the canvas for 2x2 plots
for (hidden in list(c(11,13,17,19),c(42,42,42),c(200,200),c(1000))) {
  plotC(paste0("DL hidden=",paste0(hidden, collapse="x")),
        h2o.deeplearning(1:2,3,spiral,hidden=hidden,epochs=500))
}
```

#It is clear that different configurations can achieve similar performance, and that tuning will be required for optimal performance. Next, we compare between different activation functions, including one with 50% dropout regularization in the hidden layers:

#dev.new(noRStudioGD=FALSE) #direct plotting output to a new window

```{r}
par(mfrow=c(2,2)) #set up the canvas for 2x2 plots
for (act in c("Tanh","Maxout","Rectifier","RectifierWithDropout")) {
  plotC(paste0("DL ",act," activation"), 
        h2o.deeplearning(1:2,3,spiral,
              activation=act,hidden=c(100,100),epochs=1000))
}

```
#Clearly, the dropout rate was too high or the number of epochs was too low for the last configuration, which often ends up performing the best on larger datasets where generalization is important.

```{r}
h2o.shutdown()
```


#To predict the 80-th percentile of the petal length of the Iris dataset in R
```{r}
irisPath <- system.file("extdata", "iris_wheader.csv", package = "h2o")
iris.hex <- h2o.uploadFile(path = irisPath)
iris.R<-as.data.frame(iris.hex)
splits<-h2o.splitFrame(iris.hex, ratio=0.7, seed=1234 )
dl1<-h2o.deeplearning(x=1:2, y="petal_len", training_frame = splits[[1]], distribution = "quantile",quantile_alpha = 0.8)
dl1
```

#handwriting example
```{r}
train<-h2o.importFile(path="https://h2o-public-test-data.s3.amazonaws.com/bigdata/laptop/mnist/train.csv.gz")
test<-h2o.importFile(path="https://h2o-public-test-data.s3.amazonaws.com/bigdata/laptop/mnist/test.csv.gz")
# summary(train)
# summary(test)
```

#specify response and predictor
```{r}
y="C785"
x<-setdiff(names(train),y)
x
```

#set y as factor
```{r}
train[,y]=as.factor(train[,y])
test[,y]=as.factor(test[,y])
dl1<-h2o.deeplearning(x=x,y=y, training_frame = train, validation_frame = test, distribution = "multinomial", activation="RectifierWithDropout", hidden=c(32,32,32), input_dropout_ratio=0.2, sparse=TRUE, l1=1e-5, epochs = 10)
dl1

model_cv<-h2o.deeplearning(x=x,y=y, training_frame = train,  distribution = "multinomial", activation="RectifierWithDropout", hidden=c(32,32,32), input_dropout_ratio=0.2, sparse=TRUE, l1=1e-5, epochs = 10, nfolds=5)
model_cv
```

```{r}
# View specified parameters of the deep learning model
 dl1@parameters

 # Examine the performance of the trained model
dl1 # display all performance metrics

 h2o.performance(dl1) # training metrics
 h2o.performance(dl1, valid = TRUE) # validation metrics

 # Get MSE only
 h2o.mse(dl1, valid = TRUE)

 # Cross-validated MSE
 h2o.mse(model_cv, xval = TRUE)
```

#apply predication to test data
```{r}
pred<-h2o.predict(dl1,newdata=test)
head(pred)
```

#checkpint model
```{r}
# Re-start the training process on a saved DL model
 # using the ‘checkpoint‘ argument
 model_chkp <- h2o.deeplearning(
 x = x,
 y = y,
 training_frame = train,
 validation_frame = test,
 distribution = "multinomial",
 checkpoint = dl1@model_id,
 activation = "RectifierWithDropout",
 hidden = c(32,32,32),
 input_dropout_ratio = 0.2,
 sparse = TRUE,
 l1 = 1e-5,
 epochs = 20)
model_chkp

```
#save model
```{r}
model_path<-h2o.saveModel(object=dl1,path=getwd(), force=TRUE)
print(model_path)
saved_model<-h2o.loadModel(model_path)
```

#retrieve model by h2o key
```{r}
model <- h2o.getModel(model_id = model_chkp@model_id)
model
```
#world record run used epochs=8000
```{r}
Starttime=Sys.time()
# model <- h2o.deeplearning(x=x, y=y,
#  training_frame=train, validation_frame=test,
#  activation="RectifierWithDropout",
#  hidden=c(1024,1024,2048), epochs=10,
#  input_dropout_ratio=0.2, l1=1e-5, max_w2=10,
#  train_samples_per_iteration=-1,
#  classification_stop=-1, stopping_rounds=0)
model_time=Sys.time()-Starttime
print(paste("Took", round(model_time, digits=2), units(model_time), "to build DeepLearning model."))
model@parameters

# worldModel<-h2o.saveModel(object=model,path="./WorldModel", force=TRUE)
#print(worldModel)
saved_model<-h2o.loadModel("C:\\Users\\r631758\\Desktop\\r631758\\R codes\\H2O\\exercise\\WorldModel\\DeepLearning_model_R_1507322206419_6")
saved_model
```
