Libraries

library(kableExtra)
library(tidyverse)
library(ggplot2)
library(dplyr)
library(TSstudio)
library(RColorBrewer)
library(GGally)
library(grid)
library(gridExtra)
library(mlbench)
library(psych)
library(cowplot)
library(corrplot)
library(caret)
library(geoR)
library(reshape)
library(naniar)
library(mice)
library(DMwR)
library(AppliedPredictiveModeling)
library(pls)
library(glmnet)
library(elasticnet)
library(earth)
library(kernlab)
library(randomForest)
library(vip)
library(party)
library(Cubist)
library(gbm)
library(rpart.plot)

Applied Predictive Modeling

Exercise 8.1

Recreate the simulated data from Exercise 7.2:

set.seed(200)

simulated <- mlbench.friedman1(200, sd = 1)
simulated <- cbind(simulated$x, simulated$y)
simulated <- as.data.frame(simulated)
colnames(simulated)[ncol(simulated)] <- "y"
  1. Fit a random forest model to all of the predictors, then estimate the variable importance scores:
model1 <- randomForest(y ~ ., data = simulated,
                        importance = TRUE,
                        ntree = 1000)


rfImp1 <- varImp(model1, scale = FALSE)

rfImp1
## Warning: namespace 'highr' is not available and has been replaced
## by .GlobalEnv when processing object '<unknown>'
Overall
V1 8.7322354
V2 6.4153694
V3 0.7635918
V4 7.6151188
V5 2.0235246
V6 0.1651112
V7 -0.0059617
V8 -0.1663626
V9 -0.0952927
V10 -0.0749448
varImpPlot(model1, scale = FALSE)

rfImp1 <- model1$importance 
vip(model1, color = 'red', fill='orange') + 
  ggtitle('Random Forest Model Variable Importance')
## Warning in vip.default(model1, color = "red", fill = "orange"): Arguments
## `width`, `alpha`, `color`, `fill`, `size`, and `shape` have all been deprecated
## in favor of the new `mapping` and `aesthetics` arguments. They will be removed
## in version 0.3.0.

Did the random forest model significantly use the uninformative predictors (V6 – V10)?

No it did not. The predictors V6 ~ V10 have very little importance, compared to other predictors such as V1, V2, V4, and V5.

  1. Now add an additional predictor that is highly correlated with one of the informative predictors. For example:
simulated$duplicate1 <- simulated$V1 + rnorm(200) * .1
cor(simulated$duplicate1, simulated$V1)
## [1] 0.9460206

Fit another random forest model to these data. Did the importance score for V1 change?

model2 <- randomForest(y ~ ., data = simulated, importance = TRUE, ntree = 1000)
rfImp2 <- varImp(model2, scale = FALSE)
rfImp2
Overall
V1 5.6911997
V2 6.0689606
V3 0.6297022
V4 7.0475224
V5 1.8723844
V6 0.1356906
V7 -0.0134564
V8 -0.0437056
V9 0.0084044
V10 0.0289481
duplicate1 4.2833158
grid.arrange(vip(model1, color = 'red', fill='dodgerblue4') + 
  ggtitle('Model1 Var Imp'), vip(model2, color = 'green', fill='red') + 
  ggtitle('Model2 Var Imp'), ncol = 2)
## Warning in vip.default(model1, color = "red", fill = "dodgerblue4"): Arguments
## `width`, `alpha`, `color`, `fill`, `size`, and `shape` have all been deprecated
## in favor of the new `mapping` and `aesthetics` arguments. They will be removed
## in version 0.3.0.
## Warning in vip.default(model2, color = "green", fill = "red"): Arguments
## `width`, `alpha`, `color`, `fill`, `size`, and `shape` have all been deprecated
## in favor of the new `mapping` and `aesthetics` arguments. They will be removed
## in version 0.3.0.

When we add one predictor that is highly correlated with V1, the importance score for V1 decreases. V4 is now the most important predictor.

What happens when you add another predictor that is also highly correlated with V1?

simulated$duplicate2 <- simulated$V1 + rnorm(200) * .2
model3 <- randomForest(y ~ ., data = simulated, importance = TRUE, ntree = 1000)
rfImp3 <- varImp(model3, scale = FALSE)
rfImp3
Overall
V1 5.4817443
V2 6.5652231
V3 0.5417128
V4 7.3729648
V5 1.9337776
V6 0.2005519
V7 -0.0207655
V8 -0.0428728
V9 -0.0437961
V10 -0.0381072
duplicate1 4.2048649
duplicate2 0.7558199

When we add yet another predictor that is also highly correlated with V1, the importance of V1 in our model decreases once more.

  1. Use the cforest() function in the party package to fit a random forest model using conditional inference trees. The party package function varimp() can calculate predictor importance. The conditional argument of that function toggles between the traditional importance measure and the modified version described in Strobl et al. (2007). Do these importances show the same pattern as the traditional random forest model?
cmodel1 <- cforest(y ~ ., data = simulated)
varimp(cmodel1, conditional = TRUE)
##           V1           V2           V3           V4           V5           V6 
##  1.676818578  4.640157148  0.013789059  5.546792297  0.984086706  0.012814670 
##           V7           V8           V9          V10   duplicate1   duplicate2 
## -0.011072713  0.005586275 -0.011191165  0.001480628  1.758185691 -0.008242676

The importances using conditional inference trees seem to show the same pattern in the predictors that are chosen by the model. V6-V10 are still less importance, but we do see that V3 has become less important relative to the results of the traditional random forest model.

  1. Repeat this process with different tree models, such as boosted trees and Cubist. Does the same pattern occur?

Cubist Model

model4 <- cubist(x = simulated[, names(simulated)[names(simulated) != 'y']], 
                 y = simulated[,c('y')])


# Conditional variable importance
cfImp4 <- varImp(model4, conditional = TRUE)
cfImp4
Overall
V1 50
V2 50
V4 50
V5 50
duplicate1 50
V3 0
V6 0
V7 0
V8 0
V9 0
V10 0
duplicate2 0
# Un-conditional variable importance
cfImp5 <- varImp(model4, conditional = FALSE)
cfImp5
Overall
V1 50
V2 50
V4 50
V5 50
duplicate1 50
V3 0
V6 0
V7 0
V8 0
V9 0
V10 0
duplicate2 0
old.par <- par(mfrow=c(1, 2))
barplot((t(cfImp4)),horiz = TRUE, main = 'Conditional', col = rainbow(3))
barplot((t(cfImp5)),horiz = TRUE, main = 'Un-Conditional', col = rainbow(5))

Boosted Trees

gbmGrid = expand.grid(interaction.depth = seq(1,5, by=2), n.trees = seq(100, 1000, by = 100), shrinkage = 0.1, n.minobsinnode = 5)
model4 <- train(y ~ ., data = simulated, tuneGrid = gbmGrid, verbose = FALSE, method = 'gbm' )


# Conditional variable importance
cfImp4 <- varImp(model4, conditional = TRUE)
cfImp4
## gbm variable importance
## 
##            Overall
## V4         100.000
## V2          75.915
## V1          62.233
## duplicate1  45.773
## V5          40.044
## V3          24.998
## V10          5.108
## V6           2.605
## V7           2.188
## V9           2.057
## V8           1.036
## duplicate2   0.000
# Un-conditional variable importance
cfImp5 <- varImp(model4, conditional = FALSE)
cfImp5
## gbm variable importance
## 
##            Overall
## V4         100.000
## V2          75.915
## V1          62.233
## duplicate1  45.773
## V5          40.044
## V3          24.998
## V10          5.108
## V6           2.605
## V7           2.188
## V9           2.057
## V8           1.036
## duplicate2   0.000
old.par <- par(mfrow=c(1, 2))
barplot((t(cfImp4$importance)),horiz = TRUE, main = 'Conditional', col = rainbow(3))
barplot((t(cfImp5$importance)),horiz = TRUE, main = 'Un-Conditional', col = rainbow(5))

We can see that conditional and un-conditional variable importance is same for Cubist and Boosted Trees algorithms.

Exercise 8.2

Use a simulation to show tree bias with different granularities.

Let’s create a simulated dataset there output variable Y is combination of two input variable V1 and V2. V1 has high number distinct values (2-500) compared to V2 (2-10).

V1 <- runif(1000, 2,500)
V2 <- rnorm(1000, 2,10)
V3 <- rnorm(1000, 1,1000)
y <- V2 + V1 

df <- data.frame(V1, V2, V3, y)
model3 <- cforest(y ~ ., data = df)

cfImp4 <- varimp(model3, conditional = FALSE)
barplot(sort(cfImp4),horiz = TRUE, main = 'Un-Conditional', col = rainbow(5))

We can see that Random Forest algorithm gives high score to variable V1 which has more number of distinct values.

Let’s reverse the granularity of V1 and V2 variable respectively. Output variable y will remain unchanged. Let’s fit the Random Forest tree and observe the variable importance.

V1 <- runif(1000, 2,10)
V2 <- rnorm(1000, 2,500)
V3 <- rnorm(1000, 1,1000)
y <- V2 + V1 
 

df <- data.frame(V1, V2, V3, y)
model3 <- cforest(y ~ ., data = df)

cfImp4 <- varimp(model3, conditional = FALSE)
barplot(sort(cfImp4),horiz = TRUE, main = 'Un-Conditional', col = rainbow(5))

We can see that Random forest algorithm now gives high score to V2 variable since it is more granular (high number of distinct values 2-500).

Exercise 8.3

In stochastic gradient boosting the bagging fraction and learning rate will govern the construction of the trees as they are guided by the gradient. Although the optimal values of these parameters should be obtained through the tuning process, it is helpful to understand how the magnitudes of these parameters affect magnitudes of variable importance. Figure 8.24 provides the variable importance plots for boosting using two extreme values for the bagging fraction (0.1 and 0.9) and the learning rate (0.1 and 0.9) for the solubility data. The left-hand plot has both parameters set to 0.1, and the right-hand plot has both set to 0.9:

Figure 8.24

  1. Why does the model on the right focus its importance on just the first few of predictors, whereas the model on the left spreads importance across more predictors?

Model on right have high bagging fraction rate (0.9). This means more and more trees saw the same fraction of data and diversity in variable selction is reduced. It results in few predictors dominating the importance score compared to left model which has very granular bagging fraction (0.1).

  1. Which model do you think would be more predictive of other samples?

Model with lower learning rate and bagging fraction should be more predictive on other sample. It has a balanced mix of selected important predictors rather than veyr few predictors with very high importance in the right hand side model. Right hand side model cna be over bias to the training data and can fail to generalize on test data due to its stong fiting on training dataset.

  1. How would increasing interaction depth affect the slope of predictor importance for either model in Fig. 8.24?

As we increase the interaction depth, trees are allowed to grow more deep. This will result in more and more predictors to consider for tree splitting chioce. This will spread the variable importance to more variables rather than selecting very few predictors with very high importance.

Exercise 8.7

Refer to Exercises 6.3 and 7.5 which describe a chemical manufacturing process. Use the same data imputation, data splitting, and pre-processing steps as before and train several tree-based models:

Data set Preview

data(ChemicalManufacturingProcess)

ChemicalManufacturingProcess %>% kable() %>% kable_styling(bootstrap_options = c("striped", "hover", "condensed", "responsive")) %>% 
  scroll_box(width="100%",height="300px")
Yield BiologicalMaterial01 BiologicalMaterial02 BiologicalMaterial03 BiologicalMaterial04 BiologicalMaterial05 BiologicalMaterial06 BiologicalMaterial07 BiologicalMaterial08 BiologicalMaterial09 BiologicalMaterial10 BiologicalMaterial11 BiologicalMaterial12 ManufacturingProcess01 ManufacturingProcess02 ManufacturingProcess03 ManufacturingProcess04 ManufacturingProcess05 ManufacturingProcess06 ManufacturingProcess07 ManufacturingProcess08 ManufacturingProcess09 ManufacturingProcess10 ManufacturingProcess11 ManufacturingProcess12 ManufacturingProcess13 ManufacturingProcess14 ManufacturingProcess15 ManufacturingProcess16 ManufacturingProcess17 ManufacturingProcess18 ManufacturingProcess19 ManufacturingProcess20 ManufacturingProcess21 ManufacturingProcess22 ManufacturingProcess23 ManufacturingProcess24 ManufacturingProcess25 ManufacturingProcess26 ManufacturingProcess27 ManufacturingProcess28 ManufacturingProcess29 ManufacturingProcess30 ManufacturingProcess31 ManufacturingProcess32 ManufacturingProcess33 ManufacturingProcess34 ManufacturingProcess35 ManufacturingProcess36 ManufacturingProcess37 ManufacturingProcess38 ManufacturingProcess39 ManufacturingProcess40 ManufacturingProcess41 ManufacturingProcess42 ManufacturingProcess43 ManufacturingProcess44 ManufacturingProcess45
38.00 6.25 49.58 56.97 12.74 19.51 43.73 100.00 16.66 11.44 3.46 138.09 18.83 NA NA NA NA NA NA NA NA 43.00 NA NA NA 35.5 4898 6108 4682 35.5 4865 6049 4665 0.0 NA NA NA 4873 6074 4685 10.7 21.0 9.9 69.1 156 66 2.4 486 0.019 0.5 3 7.2 NA NA 11.6 3.0 1.8 2.4
42.44 8.01 60.97 67.48 14.65 19.36 53.14 100.00 19.04 12.55 3.46 153.67 21.05 0.0 0.0 NA 917 1032.2 210.0 177 178 46.57 NA NA 0 34.0 4869 6095 4617 34.0 4867 6097 4621 0.0 3 0 3 4869 6107 4630 11.2 21.4 9.9 68.7 169 66 2.6 508 0.019 2.0 2 7.2 0.1 0.15 11.1 0.9 1.9 2.2
42.03 8.01 60.97 67.48 14.65 19.36 53.14 100.00 19.04 12.55 3.46 153.67 21.05 0.0 0.0 NA 912 1003.6 207.1 178 178 45.07 NA NA 0 34.8 4878 6087 4617 34.8 4877 6078 4621 0.0 4 1 4 4897 6116 4637 11.1 21.3 9.4 69.3 173 66 2.6 509 0.018 0.7 2 7.2 0.0 0.00 12.0 1.0 1.8 2.3
41.42 8.01 60.97 67.48 14.65 19.36 53.14 100.00 19.04 12.55 3.46 153.67 21.05 0.0 0.0 NA 911 1014.6 213.3 177 177 44.92 NA NA 0 34.8 4897 6102 4635 34.8 4872 6073 4611 0.0 5 2 5 4892 6111 4630 11.1 21.3 9.4 69.3 171 68 2.5 496 0.018 1.2 2 7.2 0.0 0.00 10.6 1.1 1.8 2.1
42.49 7.47 63.33 72.25 14.02 17.91 54.66 100.00 18.22 12.80 3.05 147.61 21.05 10.7 0.0 NA 918 1027.5 205.7 178 178 44.96 NA NA 0 34.6 4992 6233 4733 33.9 4886 6102 4659 -0.7 8 4 18 4930 6151 4684 11.3 21.6 9.0 69.4 171 70 2.5 468 0.017 0.2 2 7.3 0.0 0.00 11.0 1.1 1.7 2.1
43.57 6.12 58.36 65.31 15.17 21.79 51.23 100.00 18.30 12.13 3.78 151.88 20.76 12.0 0.0 NA 924 1016.8 208.9 178 178 45.32 NA NA 0 34.0 4985 6222 4786 33.4 4862 6115 4696 -0.6 9 1 1 4871 6128 4687 11.4 21.7 10.1 68.2 173 70 2.5 490 0.018 0.4 2 7.2 0.0 0.00 11.5 2.2 1.8 2.0
43.12 7.48 64.47 72.41 13.82 17.71 54.45 100.00 18.72 12.95 3.04 147.11 20.75 11.5 0.0 1.56 933 988.9 210.0 177 178 49.36 11.6 11.5 0 32.4 4745 5999 4486 33.8 4758 6013 4522 1.4 1 1 1 4795 6057 4572 11.2 21.2 11.2 67.6 159 65 2.5 475 0.019 0.8 2 7.3 0.0 0.00 11.7 0.7 2.0 2.2
43.06 6.94 63.60 72.06 15.70 19.42 54.72 100.00 18.85 13.13 3.85 154.20 21.45 12.0 0.0 1.55 929 1010.9 211.7 178 178 48.68 10.2 11.3 0 33.6 4854 6105 4626 33.6 4766 6022 4552 0.0 2 2 2 4806 6059 4586 11.1 21.2 10.9 67.9 161 65 2.5 478 0.019 1.0 2 7.3 0.0 0.00 11.4 0.8 2.0 2.2
41.49 6.94 63.60 72.06 15.70 19.42 54.72 100.00 18.85 13.13 3.85 154.20 21.45 12.0 0.0 1.56 928 1003.5 208.7 177 177 47.20 9.7 11.1 0 33.9 4893 6144 4658 33.9 4769 6033 4556 0.0 3 3 3 4842 6103 4609 11.3 21.5 10.5 68.0 160 65 2.5 491 0.019 1.2 3 7.4 0.0 0.00 11.4 0.9 1.9 2.1
42.45 6.94 63.60 72.06 15.70 19.42 54.72 100.00 18.85 13.13 3.85 154.20 21.45 12.0 0.0 1.55 938 1003.8 209.8 177 177 47.11 10.1 10.2 0 34.3 4846 6077 4614 35.3 4840 6091 4614 1.0 4 1 4 4893 6135 4650 11.4 21.7 9.8 68.5 164 66 2.5 488 0.019 1.8 3 7.1 0.0 0.00 11.3 0.8 1.9 2.4
42.04 7.17 61.23 70.01 13.36 18.67 52.83 100.00 17.88 12.62 2.90 143.28 20.21 10.3 0.0 1.55 932 983.1 209.4 177 177 46.24 9.0 9.5 0 35.8 4944 6156 4690 35.8 4900 6126 4665 0.0 6 3 6 4925 6161 4687 11.5 21.9 9.4 68.7 166 67 2.5 493 0.019 1.5 2 7.0 0.0 0.00 11.0 1.0 1.9 1.8
42.68 7.17 61.23 70.01 13.36 18.67 52.83 100.00 17.88 12.62 2.90 143.28 20.21 10.3 0.0 1.55 930 992.0 209.4 178 178 46.10 8.8 9.7 0 35.6 4959 6178 4708 35.2 4878 6134 4673 -0.4 7 4 7 4924 6161 4692 11.5 22.0 9.4 68.6 169 67 2.5 498 0.018 0.3 3 7.0 0.0 0.00 11.2 0.8 2.0 1.8
43.44 7.17 61.23 70.01 13.36 18.67 52.83 100.00 17.88 12.62 2.90 143.28 20.21 10.3 0.0 1.55 934 1004.1 207.8 177 177 47.53 9.3 10.4 0 35.1 4917 6158 4704 35.1 4835 6090 4651 0.0 8 1 8 4888 6129 4653 11.4 21.7 9.9 68.4 166 68 2.4 490 0.018 1.1 3 7.1 0.0 0.00 11.1 0.8 1.9 2.4
40.28 7.63 60.51 69.24 17.59 20.67 52.83 100.00 18.74 13.21 4.94 158.42 21.77 11.1 0.0 1.59 934 1036.8 209.1 178 178 45.28 9.6 9.8 0 34.9 4882 6108 4655 35.8 4858 6070 4628 0.9 10 2 2 4911 6124 4684 11.1 21.5 9.4 69.1 161 66 2.4 490 0.019 0.6 3 7.4 0.0 0.00 11.7 0.6 1.7 1.9
41.50 6.23 62.93 69.74 11.80 20.54 54.57 100.00 18.89 12.82 2.30 152.83 22.18 11.3 0.0 NA 930 1120.7 207.8 177 177 46.44 9.4 10.2 0 35.3 4918 6156 4690 35.3 4849 6093 4646 0.0 11 3 15 4874 6125 4659 11.3 21.6 9.9 68.5 160 64 2.5 490 0.019 1.6 3 7.2 0.0 0.00 11.6 0.8 1.9 2.0
41.21 7.13 60.30 68.18 13.80 20.72 52.49 100.00 18.68 12.75 3.25 152.82 21.35 11.1 0.0 NA 928 1073.6 207.1 177 177 45.40 9.6 10.2 0 35.4 4805 6124 4666 35.5 4842 6091 4641 0.1 12 4 16 4848 6095 4630 11.1 21.2 10.3 68.5 166 66 2.5 493 0.019 1.2 3 7.3 0.1 0.20 11.6 1.0 1.9 2.5
40.89 7.85 58.22 66.95 15.38 20.86 50.84 100.00 18.51 12.70 4.01 152.82 20.70 11.1 0.0 NA 928 1027.5 205.9 177 177 45.54 9.0 10.0 0 35.8 4942 6177 4725 35.8 4858 6100 4656 0.0 1 1 1 4860 6100 4659 11.1 21.3 10.0 68.7 167 68 2.5 495 0.019 1.3 3 7.4 0.0 0.00 11.5 1.1 1.9 2.3
40.14 7.64 59.44 67.22 15.67 21.50 52.02 100.00 18.72 12.86 4.16 156.51 21.47 12.4 0.0 NA 930 1021.4 208.4 177 177 45.73 9.5 10.6 0 35.5 4899 6123 4692 35.5 4811 6041 4636 0.0 2 1 2 4815 6055 4631 10.8 20.9 10.6 69.0 157 64 2.5 497 0.020 1.1 3 7.4 0.0 0.00 11.7 1.7 1.8 2.2
39.30 7.51 59.74 67.28 15.72 21.80 52.30 100.00 18.81 12.98 4.24 158.36 21.82 12.7 0.0 NA 929 1092.2 207.5 178 178 44.46 8.9 9.8 0 35.5 4957 6181 4751 35.5 4887 6124 4710 0.0 3 2 3 4876 6109 4696 11.1 21.4 9.8 68.8 156 64 2.4 514 0.021 0.7 3 7.3 0.0 0.00 11.6 1.8 1.7 2.3
39.53 7.51 59.74 67.28 15.72 21.80 52.30 100.00 18.81 12.98 4.24 158.36 21.82 12.7 0.0 NA 929 1175.3 204.1 177 177 45.02 8.9 9.9 0 35.6 4961 6180 4775 35.6 4869 6094 4690 0.0 4 3 4 4850 6094 4665 11.0 21.2 10.1 68.7 155 62 2.5 490 0.020 1.2 2 7.3 0.0 0.00 11.4 1.5 1.5 2.0
40.22 7.51 59.74 67.28 15.72 21.80 52.30 100.00 18.81 12.98 4.24 158.36 21.82 12.7 0.0 1.56 925 1102.8 206.6 178 178 45.22 9.0 10.0 0 35.2 4949 6161 4766 35.2 4871 6108 4705 0.0 5 4 5 4879 6117 4696 11.1 21.5 9.9 68.7 157 63 2.5 495 0.020 0.9 3 7.3 0.1 0.20 11.4 2.0 1.6 2.0
41.18 7.08 61.83 70.69 13.43 17.72 53.27 100.00 19.14 13.38 3.02 153.10 21.90 10.9 0.0 NA 936 1024.4 218.7 178 178 47.04 NA NA 0 33.2 4791 6026 4482 35.7 0 6111 0 2.5 6 2 16 4918 6152 4618 11.4 21.1 8.8 70.1 162 68 2.4 479 0.018 0.9 2 7.4 0.0 0.00 11.9 0.8 1.9 2.1
40.70 6.58 58.38 67.17 12.22 18.46 51.45 100.00 18.22 12.83 2.69 148.49 21.23 11.1 0.0 NA 937 997.7 209.6 177 177 46.43 NA NA 0 32.9 NA 6002 0 36.5 4902 6120 4621 3.6 7 3 17 4906 6134 4626 11.2 21.0 8.9 70.1 160 66 2.4 492 0.019 1.5 3 7.4 0.0 0.00 11.6 0.8 1.9 2.4
41.89 6.27 56.23 64.98 11.47 18.93 50.31 100.00 17.64 12.48 2.49 145.61 20.81 11.3 0.0 NA 940 1031.3 215.1 177 177 47.09 NA NA 0 33.6 4864 6085 4615 35.1 4847 6072 4607 1.5 1 1 1 4875 6095 4606 11.2 20.9 9.5 69.7 161 65 2.5 468 0.018 1.3 2 7.2 0.0 0.00 11.4 1.1 2.0 1.8
43.38 8.17 63.66 73.44 18.37 23.76 56.64 100.00 17.94 12.18 4.15 151.54 20.27 9.0 0.0 1.54 934 1007.3 209.4 178 178 47.28 11.1 9.0 0 33.3 4750 5994 4517 36.3 4887 6146 4653 3.0 5 5 12 4907 6150 4631 11.5 21.3 9.2 69.5 167 66 2.5 505 0.019 1.0 2 7.2 0.0 0.00 11.5 0.7 1.7 2.4
36.83 6.60 55.74 66.25 11.83 22.52 48.95 100.00 16.67 12.11 2.75 140.84 19.07 11.6 0.0 1.52 930 950.7 203.6 178 178 38.89 9.2 7.6 0 37.3 4840 5988 4577 40.0 4971 6114 4657 2.7 1 1 6 4990 6160 4693 11.0 21.0 7.5 71.5 161 65 2.5 500 0.019 0.9 3 6.9 0.0 0.00 11.8 0.8 1.7 2.2
35.25 6.90 54.26 60.99 12.22 20.16 47.23 100.00 16.57 11.73 3.06 139.52 18.62 9.2 0.0 1.53 926 955.8 203.0 177 178 39.02 8.4 7.5 0 38.0 4894 6022 4590 40.0 4971 6107 4675 2.0 2 2 7 4966 6112 4660 10.7 20.6 7.6 71.9 159 65 2.4 492 0.019 0.9 3 7.1 0.0 0.00 11.7 0.7 1.7 2.0
36.12 6.86 55.66 63.43 12.53 20.39 48.94 100.00 16.71 11.94 3.07 142.45 19.12 9.2 0.0 1.54 922 975.8 203.6 178 178 40.46 7.8 7.5 0 38.1 4942 6081 4645 39.3 4967 6117 4664 1.2 3 3 8 4961 6112 4649 10.7 20.5 7.6 71.8 157 64 2.5 522 0.021 0.9 3 7.4 0.0 0.00 11.6 1.3 1.7 2.2
38.52 6.67 63.44 76.94 14.28 21.67 58.42 100.00 17.47 13.12 3.10 158.66 21.88 9.2 0.0 1.52 924 986.9 206.4 177 177 42.67 7.5 7.7 0 38.6 5055 6213 4714 37.7 4960 6123 4639 -0.9 4 4 9 4966 6137 4641 10.8 20.7 7.6 71.7 158 63 2.5 499 0.020 1.9 3 7.3 0.1 0.20 11.7 0.8 1.7 2.2
38.35 6.53 61.68 77.15 11.81 21.12 59.38 100.00 17.31 12.83 2.14 154.56 22.18 10.4 0.0 1.52 921 1001.1 205.5 178 178 44.95 9.4 9.0 0 35.3 4831 6017 4556 35.3 4866 6028 4573 0.0 6 4 4 4863 6039 4564 10.6 20.1 9.0 70.9 157 64 2.5 504 0.020 0.7 3 6.8 0.0 0.00 11.6 0.5 1.9 2.2
39.98 6.89 61.54 76.07 12.41 21.14 57.89 100.00 17.42 12.79 2.35 153.00 21.58 10.3 0.0 1.54 928 1006.7 206.2 177 177 46.27 9.0 9.1 0 35.4 4872 6050 4585 35.0 4857 6036 4579 -0.4 7 1 5 4872 6060 4583 10.7 20.4 9.1 70.6 160 63 2.5 486 0.019 1.6 2 7.1 0.0 0.00 11.7 0.6 2.0 2.4
41.87 7.13 61.39 74.10 13.27 20.96 55.95 100.00 17.58 12.63 2.61 151.00 20.80 11.2 0.0 1.53 926 1024.0 208.2 178 178 47.08 10.7 9.5 0 33.5 4756 5972 4515 34.0 4854 6069 4600 0.5 8 2 6 4842 6057 4574 10.9 20.4 9.7 69.9 163 66 2.5 486 0.019 0.7 2 7.1 0.0 0.00 11.6 0.6 2.0 2.1
43.62 6.84 61.46 73.91 13.18 20.77 56.39 100.00 17.56 12.55 2.53 151.48 20.99 11.6 0.0 1.52 923 1041.2 207.3 177 177 48.77 10.1 10.5 0 33.4 4815 6033 4569 33.4 4773 5991 4521 0.0 9 3 7 4820 6042 4563 10.9 20.4 10.0 69.6 161 64 2.5 476 0.018 1.6 2 7.2 0.1 0.10 11.6 0.7 1.9 2.4
38.60 5.17 61.17 76.39 12.51 20.32 58.17 100.00 17.79 13.05 2.50 157.45 22.21 10.6 0.0 1.52 928 999.5 209.4 178 178 45.78 9.8 9.7 0 34.5 4811 5992 4578 34.5 4823 6010 4579 0.0 5 5 12 4829 6020 4590 10.6 20.1 9.7 70.3 156 61 2.5 496 0.020 1.1 2 7.5 0.1 0.10 11.6 0.5 1.8 2.3
39.65 7.01 61.30 72.71 13.23 24.85 54.89 100.00 17.38 12.47 2.76 149.46 20.42 11.6 0.0 1.53 925 1010.3 209.1 177 177 44.77 9.4 9.3 0 34.6 4852 6047 4852 34.4 4861 6073 4759 -0.2 3 3 16 4854 6061 4654 10.7 20.4 9.5 70.1 155 61 2.5 507 0.020 1.7 3 7.3 0.0 0.00 11.7 0.6 1.9 2.6
40.87 6.30 57.04 70.03 11.70 18.04 50.50 100.00 17.27 12.79 2.41 145.62 20.18 12.5 0.0 1.53 930 1002.4 207.5 177 177 46.65 9.7 10.0 4549 33.9 4844 6057 4562 33.1 4821 6033 4547 -0.8 1 1 13 4818 6041 4537 10.9 20.4 10.1 69.5 153 63 2.4 481 0.020 1.0 2 7.4 0.1 0.20 11.0 0.9 2.0 2.1
42.46 5.32 59.14 71.05 13.02 21.17 52.58 100.00 16.94 12.32 2.46 144.80 20.03 11.9 19.7 1.53 926 1019.6 206.4 178 178 46.17 9.4 10.2 4549 33.4 4854 6051 4595 32.2 4794 5992 4552 -1.2 3 2 2 4798 5982 4534 10.4 19.8 10.1 70.1 166 65 2.6 495 0.019 0.9 3 7.1 0.0 0.00 11.2 1.1 1.9 2.4
42.66 5.32 59.14 71.05 13.02 21.17 52.58 100.00 16.94 12.32 2.46 144.80 20.03 11.9 19.9 1.52 924 1008.6 208.7 177 177 46.38 9.7 10.0 4549 33.1 4825 6028 4571 32.6 4798 5986 4552 -0.5 4 3 3 4820 6006 4552 10.5 20.0 9.8 70.2 166 65 2.5 484 0.018 1.7 2 7.1 0.0 0.00 11.1 1.2 1.8 2.4
42.23 5.32 59.14 71.05 13.02 21.17 52.58 100.00 16.94 12.32 2.46 144.80 20.03 11.9 19.3 1.54 926 1014.3 208.7 178 178 45.92 9.5 10.0 4549 33.0 4838 6029 4593 32.6 4800 5991 4562 -0.4 5 4 4 4820 6013 4554 10.6 20.1 9.8 70.1 166 68 2.4 506 0.019 0.0 2 7.1 0.1 0.20 11.0 1.5 1.8 2.4
41.43 5.71 57.68 69.37 12.26 21.32 50.79 100.00 17.01 12.44 2.46 143.94 19.78 11.0 19.5 1.52 924 1027.0 206.6 177 177 46.77 10.2 9.8 4549 32.8 4806 6002 4591 32.8 4831 6027 4603 0.0 7 2 6 4847 6053 4607 10.9 20.5 9.7 69.9 160 65 2.5 488 0.019 0.0 2 7.2 0.0 0.00 11.4 1.0 1.9 2.3
41.47 6.60 58.80 71.17 12.40 22.14 52.24 100.00 17.21 12.77 2.58 148.28 20.33 11.3 19.3 1.52 925 1015.0 205.9 178 178 46.69 9.8 10.0 4549 33.0 4828 6020 4587 32.6 4815 6005 4582 -0.4 8 3 7 4837 6050 4598 10.9 20.4 9.8 59.8 159 64 2.5 493 0.019 0.0 2 7.2 0.0 0.00 10.9 1.0 1.9 2.2
42.07 6.76 55.42 69.80 11.25 18.15 49.89 100.00 17.61 13.40 2.57 151.50 20.88 10.8 22.5 1.48 936 954.7 211.2 177 177 45.95 11.1 10.5 4549 32.8 4713 5904 4467 32.8 4750 5927 4497 0.0 1 1 1 4784 5974 4543 10.3 19.6 10.1 70.3 157 61 2.6 509 0.020 1.1 3 7.4 0.0 0.00 11.4 0.7 1.8 2.4
44.35 6.76 55.42 69.80 11.25 18.15 49.89 100.00 17.61 13.40 2.57 151.50 20.88 10.8 20.5 1.53 923 954.0 210.0 178 177 46.66 8.1 8.9 4549 33.3 4934 6110 4635 32.5 4873 6058 4598 -0.8 2 2 2 4900 6089 4617 10.8 20.5 8.6 70.8 163 64 2.5 498 0.019 0.9 3 7.3 0.1 0.10 11.4 0.7 2.0 2.4
44.16 6.76 55.42 69.80 11.25 18.15 49.89 100.00 17.61 13.40 2.57 151.50 20.88 10.8 21.5 1.55 930 969.3 208.7 177 178 47.33 9.3 9.3 4549 32.5 4836 6014 4564 32.3 4844 6032 4577 -0.2 3 3 3 4869 6061 4603 10.7 20.3 9.0 70.7 160 63 2.5 501 0.020 1.0 3 7.1 0.0 0.00 11.0 0.7 1.8 2.4
43.33 6.77 58.76 72.74 12.12 20.65 53.17 100.00 17.59 13.31 2.61 154.06 21.31 11.4 20.5 1.56 928 980.3 208.2 178 178 46.78 9.0 8.8 4549 32.8 4869 6061 4583 32.3 4905 6117 4635 -0.5 4 4 4 4885 6098 4611 11.0 20.6 8.9 70.4 162 64 2.5 490 0.019 0.3 2 7.1 0.0 0.00 10.5 0.9 1.8 2.1
42.61 6.95 60.31 73.97 12.42 19.05 52.31 100.00 17.64 13.28 2.75 151.21 20.75 12.2 20.5 1.50 926 986.4 208.7 177 177 46.48 8.5 9.2 4549 33.2 4914 6108 4594 32.4 4850 6025 4535 -0.8 5 5 5 4854 6034 4547 10.6 20.0 9.1 70.9 165 65 2.5 490 0.019 1.2 3 7.0 0.0 0.00 10.7 0.7 1.8 2.3
42.96 6.95 60.31 73.97 12.42 19.05 52.31 100.00 17.64 13.28 2.75 151.21 20.75 12.2 20.5 1.50 925 977.8 227.4 178 178 46.03 9.4 9.1 4549 32.6 4833 6009 4521 32.6 4853 6027 4538 0.0 6 6 6 4864 6056 4556 10.7 20.2 9.0 70.7 168 65 2.6 492 0.018 0.7 3 7.4 0.0 0.00 11.1 1.1 1.8 2.1
43.84 6.95 60.31 73.97 12.42 19.05 52.31 100.00 17.64 13.28 2.75 151.21 20.75 12.2 20.0 1.56 927 1006.4 210.7 177 177 48.11 8.9 9.9 4549 32.6 4883 6146 4533 31.3 4812 6082 4484 -1.3 7 1 7 4816 6086 4481 11.0 20.2 9.9 69.9 164 69 2.4 493 0.019 1.8 3 6.7 0.1 0.10 11.6 1.4 1.8 2.4
46.34 7.97 64.75 74.10 15.11 22.66 56.22 100.00 18.82 12.76 3.18 157.34 21.33 11.7 18.0 1.52 921 1002.4 209.8 177 178 47.45 9.5 9.6 4549 32.1 4855 6077 4563 31.5 4836 6055 4551 -0.6 9 3 9 4850 6099 4548 11.2 20.7 9.6 69.7 167 68 2.4 490 0.018 0.6 3 7.3 0.0 0.00 11.6 1.3 1.8 2.1
39.74 6.94 57.02 69.51 13.45 18.44 50.16 100.00 17.55 12.83 3.09 149.60 20.25 10.4 19.0 1.52 923 971.2 207.8 178 178 44.65 8.9 9.1 4549 33.9 4852 6013 4518 33.6 4849 6015 4525 -0.3 10 4 10 4866 6045 4531 10.6 20.0 8.8 71.2 170 68 2.5 490 0.018 0.8 3 7.3 0.0 0.00 11.6 0.9 1.8 2.0
41.12 6.94 57.02 69.51 13.45 18.44 50.16 100.00 17.55 12.83 3.09 149.60 20.25 10.4 18.0 1.53 918 977.6 204.8 177 177 46.47 8.9 9.4 4549 33.4 4869 6043 4547 32.8 4827 6014 4526 -0.6 11 5 11 4846 6032 4521 10.6 20.0 9.2 70.8 169 67 2.5 504 0.019 1.3 3 7.1 0.0 0.00 11.4 2.5 1.8 2.1
40.14 6.94 57.02 69.51 13.45 18.44 50.16 100.00 17.55 12.83 3.09 149.60 20.25 10.4 19.5 1.54 926 989.2 206.4 178 178 47.33 8.7 11.0 4549 33.8 4874 6049 4551 33.1 4701 5890 4392 -0.7 12 6 12 4721 5901 4416 10.0 18.9 10.8 70.2 162 65 2.5 496 0.019 0.4 3 7.1 0.1 0.20 11.4 1.0 1.8 2.2
42.69 7.56 61.62 72.17 14.46 21.02 53.78 100.00 18.33 12.82 3.18 154.71 20.95 11.1 19.5 1.54 929 989.6 205.5 177 177 46.34 9.0 9.5 4549 33.1 4867 6061 4567 32.5 4836 6033 4541 -0.6 1 1 13 4861 6071 4550 10.9 20.5 9.3 70.3 165 66 2.5 495 0.019 1.9 3 7.4 0.0 0.00 11.4 1.1 2.0 2.3
40.15 6.87 57.33 71.52 13.22 15.62 50.85 100.00 17.74 13.16 2.91 148.45 20.44 9.7 19.5 1.54 923 1003.8 206.8 178 177 48.84 8.7 10.8 4549 33.6 4867 6039 4539 33.1 4718 5898 4404 -0.5 2 2 14 4747 5929 4423 10.1 19.2 10.5 70.3 157 63 2.5 501 0.020 1.4 3 7.3 0.0 0.00 11.6 1.2 1.9 2.2
39.77 6.87 57.33 71.52 13.22 15.62 50.85 100.00 17.74 13.16 2.91 148.45 20.44 9.7 19.5 1.54 925 984.2 205.2 177 178 46.32 8.4 8.6 4549 33.7 4891 6059 4563 33.1 4880 6048 4555 -0.6 3 3 15 4890 6069 4552 10.7 20.3 8.6 71.2 164 67 2.5 483 0.018 0.9 3 7.2 0.0 0.00 11.2 0.9 1.9 2.1
39.40 6.87 57.33 71.52 13.22 15.62 50.85 100.00 17.74 13.16 2.91 148.45 20.44 9.7 19.5 1.52 924 992.5 207.3 178 178 47.03 8.2 9.4 4549 33.9 4901 6055 4571 34.8 4816 5992 4493 0.9 4 4 16 4847 6021 4518 10.5 19.9 9.1 71.0 160 66 2.4 509 0.020 0.6 3 7.3 0.0 0.00 11.0 0.9 2.1 2.2
39.14 6.65 55.61 68.93 12.72 15.91 48.64 100.00 17.87 13.31 2.98 146.08 20.33 10.7 19.5 1.50 924 987.0 206.2 177 177 45.66 10.3 9.4 4549 33.8 4747 5918 4447 34.5 4829 6006 4531 0.7 5 5 17 4853 6033 4545 10.6 20.1 9.2 70.7 156 63 2.5 463 0.019 1.1 2 7.3 0.1 0.20 11.0 1.0 1.9 2.2
40.36 6.65 55.61 68.93 12.72 15.91 48.64 100.00 17.87 13.31 2.98 146.08 20.33 10.7 19.5 1.52 926 991.5 211.4 178 178 47.09 10.3 9.8 4549 33.5 4765 5946 4471 33.9 4808 5991 4509 0.4 6 6 18 4820 6016 4524 10.6 20.0 9.7 70.3 156 64 2.4 488 0.020 0.5 3 7.3 0.0 0.00 11.3 0.0 2.0 2.2
42.31 6.65 55.61 68.93 12.72 15.91 48.64 100.00 17.87 13.31 2.98 146.08 20.33 10.7 18.0 1.58 920 1003.2 207.8 177 177 49.04 11.1 10.2 4549 32.5 4724 5926 4441 33.5 4792 6002 4506 1.0 7 1 1 4811 6016 4519 10.7 20.1 10.0 69.9 157 63 2.5 512 0.020 1.4 3 7.1 0.0 0.00 11.8 11.0 1.9 2.3
40.49 6.72 57.24 71.27 12.25 16.58 50.88 100.00 18.18 13.60 2.75 151.23 21.30 9.3 20.0 1.57 928 983.2 208.0 177 178 46.30 9.4 9.1 4549 34.6 4813 5957 4491 35.2 4834 5983 4514 0.6 9 3 3 4863 6019 4541 10.4 19.9 8.8 71.3 160 63 2.5 499 0.019 0.8 3 7.0 0.0 0.00 11.5 0.7 1.8 2.3
40.57 6.57 55.93 69.48 11.95 18.00 50.50 100.00 17.92 13.23 2.63 151.67 21.29 8.7 19.0 1.60 921 980.8 206.2 178 178 46.37 9.6 9.1 4549 34.6 4803 5958 4506 35.0 4846 6009 4535 0.4 10 4 4 4867 6041 4554 10.6 20.1 8.9 71.0 161 64 2.5 513 0.020 0.7 3 7.1 0.1 0.10 11.6 0.9 1.8 2.2
38.20 6.16 54.67 66.95 11.15 17.25 48.12 100.83 17.97 13.12 2.48 149.03 20.88 9.1 20.0 1.51 925 982.3 205.9 177 177 46.32 10.4 9.0 4549 34.0 4756 5917 4484 35.3 4858 6039 4557 1.3 11 5 5 4877 6051 4587 10.7 20.3 8.9 70.8 153 61 2.5 484 0.020 1.3 2 7.1 0.0 0.00 11.7 0.8 1.8 2.1
38.70 6.16 54.67 66.95 11.15 17.25 48.12 100.83 17.97 13.12 2.48 149.03 20.88 9.1 19.5 1.51 923 983.4 206.2 178 178 46.02 10.2 8.7 4549 34.1 4772 5957 4494 35.4 4885 6065 4571 1.3 12 6 6 4889 6069 4590 10.8 20.4 8.8 70.8 156 62 2.5 502 0.020 0.5 3 7.1 0.0 0.00 11.6 0.8 1.8 2.0
38.94 6.16 54.67 66.95 11.15 17.25 48.12 100.83 17.97 13.12 2.48 149.03 20.88 9.1 19.5 1.49 928 1004.5 206.8 177 177 48.17 9.6 10.2 4549 33.9 4828 6010 4550 33.9 4763 5949 4510 0.0 1 1 7 4814 6010 4545 10.4 19.8 9.7 70.5 150 60 2.5 492 0.021 1.1 2 7.1 0.0 0.00 11.5 0.7 1.9 2.3
41.90 6.37 52.67 64.34 12.02 17.40 46.52 100.00 17.38 12.48 2.75 144.50 19.82 11.0 20.0 1.50 927 1016.1 210.3 178 177 47.44 11.2 9.9 4549 32.7 4716 5937 4476 34.1 4818 6043 4586 1.4 2 2 8 4835 6074 4592 10.9 20.4 9.7 69.8 162 63 2.6 492 0.019 1.0 2 7.0 0.0 0.00 11.6 0.9 2.0 2.3
42.03 6.37 52.67 64.34 12.02 17.40 46.52 100.00 17.38 12.48 2.75 144.50 19.82 11.0 19.5 1.51 922 999.7 213.3 177 178 47.30 10.9 10.1 4549 33.3 4742 5963 4505 33.9 4800 6038 4578 0.6 3 3 9 4841 6084 4593 11.0 20.5 9.7 69.8 163 63 2.6 498 0.019 1.0 3 6.9 0.0 0.00 11.7 1.4 1.9 1.8
41.96 6.37 52.67 64.34 12.02 17.40 46.52 100.00 17.38 12.48 2.75 144.50 19.82 11.0 19.5 1.50 923 1013.9 209.6 178 178 48.11 11.0 10.4 4549 32.9 4737 5967 4499 33.5 4785 6025 4558 0.6 4 4 10 4819 6065 4575 10.9 20.4 10.0 69.6 160 62 2.6 504 0.020 0.5 3 6.8 0.0 0.00 11.7 1.4 1.9 1.8
41.85 6.31 54.42 66.13 11.97 18.40 48.01 100.00 17.81 12.75 2.76 147.17 20.41 12.0 19.5 1.48 923 994.0 206.8 178 178 46.00 9.7 9.6 4549 34.2 4818 6019 4583 34.2 4830 6031 4603 0.0 6 6 12 4857 6065 4613 10.7 20.3 9.3 70.4 162 64 2.5 494 0.019 0.4 3 7.0 0.0 0.00 11.2 1.2 1.9 1.8
39.71 6.27 53.51 66.52 11.83 16.95 46.62 100.00 17.76 13.31 2.87 143.65 20.35 10.0 20.0 1.49 929 960.0 210.7 177 177 45.05 9.4 9.0 0 34.8 4824 5974 4534 35.4 4862 6021 4567 0.6 1 1 13 4877 6036 4569 10.4 20.0 8.7 71.3 156 62 2.5 500 0.020 1.1 3 7.3 0.0 0.00 11.9 1.2 1.8 2.5
39.38 6.27 53.51 66.52 11.83 16.95 46.62 100.00 17.76 13.31 2.87 143.65 20.35 10.2 19.0 1.48 921 961.3 205.0 178 177 43.83 8.9 8.4 0 35.5 4862 6023 4568 35.5 4905 6066 4597 0.0 2 2 14 4901 6063 4588 10.6 20.2 8.4 71.4 159 63 2.5 487 0.019 0.9 3 7.1 0.1 0.10 11.9 1.7 1.8 2.3
39.16 6.27 53.51 66.52 11.83 16.95 46.62 100.00 17.76 13.31 2.87 143.65 20.35 10.2 19.0 1.48 921 969.7 207.8 177 178 43.86 8.4 8.3 0 35.5 4897 6043 4584 35.5 4907 6064 4596 0.0 3 3 15 4916 6075 4595 10.6 20.3 8.2 70.5 158 62 2.6 484 0.019 0.7 3 7.0 0.0 0.00 11.5 1.7 1.9 2.2
39.38 6.58 52.50 63.29 12.24 18.28 46.04 100.00 17.67 12.47 2.85 144.40 19.85 9.5 19.0 1.47 923 989.7 205.7 177 177 47.40 9.7 9.5 0 34.2 4836 6047 4588 33.9 4854 6072 4603 -0.3 5 5 17 4876 6106 4609 11.1 20.8 9.3 69.9 162 65 2.5 494 0.019 1.3 3 7.1 0.0 0.00 11.7 1.2 1.8 1.9
40.08 6.45 53.18 64.98 12.11 18.77 46.80 100.00 17.58 12.69 2.79 145.90 19.96 10.9 19.5 1.51 923 996.4 206.2 178 178 47.52 10.2 9.9 0 33.5 4810 6039 4568 33.5 4827 6057 4580 0.0 6 6 18 4825 6063 4582 11.0 20.5 9.9 69.6 160 63 2.6 500 0.020 0.6 2 7.0 0.0 0.00 11.5 1.2 1.8 1.8
39.17 6.39 58.85 73.45 12.69 16.92 51.25 100.00 17.92 13.50 2.62 151.25 20.89 11.3 21.5 1.55 935 976.4 204.8 177 177 45.88 8.5 9.1 0 34.8 4879 6044 4574 34.4 4841 6018 4565 -0.4 7 1 1 4849 6041 4557 10.3 19.6 8.9 71.6 158 64 2.5 495 0.020 1.1 3 7.4 0.1 0.10 11.7 0.8 2.0 2.3
38.37 6.39 58.85 73.45 12.69 16.92 51.25 100.00 17.92 13.50 2.62 151.25 20.89 11.3 22.2 1.54 933 971.6 208.0 178 177 45.52 8.2 8.7 0 35.2 4899 6059 4609 34.5 4867 6037 4583 -0.7 8 2 2 4883 6045 4583 10.2 19.6 8.3 72.1 156 62 2.5 497 0.020 1.9 2 7.1 0.0 0.00 11.8 0.8 2.0 2.1
38.76 6.39 58.85 73.45 12.69 16.92 51.25 100.00 17.92 13.50 2.62 151.25 20.89 11.3 22.0 1.54 933 974.0 205.9 177 178 45.11 8.7 8.6 0 35.0 4857 6028 4569 34.6 4870 6051 4582 -0.4 9 3 3 4890 6058 4587 10.3 19.7 8.3 72.0 159 64 2.5 497 0.020 0.7 2 7.0 0.0 0.00 11.6 1.3 1.9 2.4
38.73 6.35 56.93 70.87 12.27 18.06 49.92 100.00 17.76 13.29 2.61 150.06 20.65 11.5 22.5 1.55 932 978.0 209.4 178 178 45.57 9.8 9.3 0 34.2 4795 5991 4520 34.2 4844 6031 4579 0.0 10 4 4 4859 6058 4581 10.5 19.8 8.9 71.2 157 61 2.6 507 0.020 0.8 3 7.1 0.0 0.00 11.6 0.6 2.0 2.3
38.95 6.17 53.80 65.53 12.70 18.65 47.67 100.00 16.82 12.11 2.58 142.66 19.64 11.4 21.5 1.55 932 980.2 205.7 177 177 44.92 8.7 9.1 0 35.1 4889 6089 4619 34.5 4857 6063 4594 -0.6 11 5 5 4876 6087 4601 10.6 20.0 8.8 71.2 163 65 2.5 498 0.019 1.7 3 7.0 0.0 0.00 11.4 1.3 1.9 2.2
40.41 6.97 58.18 71.85 14.25 17.08 50.06 100.00 18.02 13.51 3.37 148.92 20.33 11.0 21.5 1.55 937 993.4 205.5 177 177 46.11 9.3 9.4 0 34.5 4835 6014 4527 34.5 4827 5994 4525 0.0 1 1 7 4835 6015 4537 10.3 19.5 9.1 71.3 156 63 2.5 494 0.020 1.0 3 7.1 0.0 0.00 11.4 0.8 2.0 2.5
39.90 6.97 58.18 71.85 14.25 17.08 50.06 100.00 18.02 13.51 3.37 148.92 20.33 11.0 22.0 1.52 931 990.5 207.1 178 177 46.12 8.4 9.3 0 35.2 4899 6061 4572 33.8 4813 5977 4515 -1.4 2 2 8 4834 6015 4526 10.3 19.5 9.1 71.4 154 61 2.5 517 0.021 1.2 3 7.1 0.0 0.00 11.3 0.9 2.1 2.2
39.79 6.97 58.18 71.85 14.25 17.08 50.06 100.00 18.02 13.51 3.37 148.92 20.33 11.0 22.0 1.53 935 982.9 206.6 177 178 44.55 8.2 8.4 0 35.2 4897 6048 4572 34.8 4881 6059 4584 -0.4 3 3 9 4898 6073 4573 10.5 19.9 8.3 71.8 159 65 2.4 495 0.019 0.3 2 7.1 0.0 0.00 11.3 1.0 1.9 2.1
41.25 8.81 63.99 78.25 23.09 19.96 53.87 100.00 18.84 14.08 6.87 158.73 20.57 12.7 22.0 1.55 934 1000.7 205.5 178 178 45.10 8.6 8.8 0 34.9 4844 5990 4524 34.9 4825 5981 4517 0.0 4 4 10 4878 6049 4546 10.3 19.6 8.4 72.1 167 65 2.6 507 0.019 0.2 3 7.1 0.0 0.00 11.6 1.2 1.9 2.2
41.00 6.80 57.21 70.34 13.98 16.81 48.46 100.00 17.91 13.34 3.37 146.67 19.95 11.2 20.5 1.55 934 1000.8 205.2 178 178 44.99 9.1 8.5 0 34.7 4829 5998 4550 34.7 4879 6040 4573 0.0 6 6 12 4881 6060 4576 10.5 19.9 8.6 71.5 164 65 2.5 492 0.019 0.3 3 7.1 0.0 0.00 11.5 1.6 1.9 2.4
41.59 6.80 57.21 70.34 13.98 16.81 48.46 100.00 17.91 13.34 3.37 146.67 19.95 11.2 21.0 1.55 939 1171.2 206.4 177 178 45.64 8.9 9.1 0 35.1 4853 6022 4582 34.8 4826 5996 4564 -0.3 1 1 13 4866 6051 4567 10.4 19.7 8.7 71.6 164 66 2.5 506 0.019 0.9 2 7.2 0.0 0.00 11.5 1.5 1.9 2.4
40.91 6.80 57.21 70.34 13.98 16.81 48.46 100.00 17.91 13.34 3.37 146.67 19.95 11.2 22.0 1.54 936 979.2 208.7 178 178 44.62 8.6 8.4 0 35.0 4864 6030 4577 34.8 4887 6068 4599 -0.2 2 2 14 4907 6087 4602 10.5 20.0 8.2 71.8 165 68 2.4 493 0.019 0.9 2 7.1 0.0 0.00 11.6 1.6 2.0 2.4
38.99 6.30 51.96 64.07 12.65 19.23 44.66 100.00 17.16 12.71 3.15 142.11 19.13 11.6 21.0 1.55 932 994.5 207.8 177 177 45.45 9.4 9.4 0 34.8 4841 6048 4631 34.1 4839 6062 4631 -0.7 3 3 15 4867 6082 4642 10.7 20.2 9.1 70.7 160 64 2.5 499 0.019 1.4 3 7.2 0.0 0.00 11.5 1.0 1.9 2.4
38.81 6.30 51.96 64.07 12.65 19.23 44.66 100.00 17.16 12.71 3.15 142.11 19.13 11.6 21.5 1.55 937 989.2 205.2 178 178 45.52 9.3 9.5 0 34.5 4841 6052 4635 33.9 4830 6038 4618 -0.6 4 4 16 4844 6067 4632 10.7 20.1 9.4 70.5 159 64 2.5 513 0.020 0.9 2 7.1 0.1 0.10 11.7 1.1 1.8 2.1
39.30 6.30 51.96 64.07 12.65 19.23 44.66 100.00 17.16 12.71 3.15 142.11 19.13 11.6 21.5 1.55 939 983.7 206.4 177 177 46.10 9.4 9.6 0 34.6 4832 6035 4624 34.3 4822 6039 4617 -0.3 5 5 17 4838 6053 4622 10.6 20.0 9.4 70.6 159 62 2.5 498 0.020 1.2 2 7.2 0.0 0.00 11.7 1.1 1.9 2.3
40.77 6.45 55.09 69.18 11.98 17.22 47.12 100.00 17.74 13.47 2.86 148.00 20.04 10.8 21.5 1.54 937 960.4 204.8 177 178 43.52 8.1 8.3 0 35.8 4893 6028 4564 35.5 4879 6026 4556 -0.3 7 2 2 4873 6023 4546 10.2 19.5 8.4 72.1 163 65 2.5 486 0.019 0.8 2 7.2 0.0 0.00 11.5 1.1 1.9 2.3
39.27 6.45 55.09 69.18 11.98 17.22 47.12 100.00 17.74 13.47 2.86 148.00 20.04 10.8 21.5 1.55 935 954.8 NA 178 178 43.52 8.4 8.3 0 35.6 4866 6005 4550 35.6 4893 6046 4569 0.0 8 3 3 4897 6047 4572 10.3 19.7 8.1 72.2 157 64 2.5 493 0.020 1.2 2 7.2 0.0 0.00 11.3 0.8 1.9 2.1
40.06 6.45 55.09 69.18 11.98 17.22 47.12 100.00 17.74 13.47 2.86 148.00 20.04 10.8 21.7 1.55 938 975.8 205.2 177 177 44.11 8.4 8.3 0 35.4 4874 6012 4561 35.3 4885 6046 4571 -0.1 9 4 4 4885 6042 4546 10.3 19.7 8.3 72.0 158 63 2.5 478 0.019 1.0 2 7.2 0.1 0.10 11.6 0.9 1.9 2.5
39.17 6.28 54.33 68.30 12.37 15.46 46.72 100.00 17.41 13.28 2.96 143.47 19.85 12.4 22.0 1.54 941 1009.1 206.4 178 178 46.33 8.7 9.2 0 34.2 4867 6034 4588 33.8 4835 6015 4569 -0.4 10 5 5 4846 6019 4572 10.3 19.6 9.1 71.3 156 62 2.5 522 0.021 0.8 3 7.2 0.0 0.00 11.6 1.1 1.8 2.2
39.98 7.33 56.87 70.17 14.20 20.16 49.06 100.00 18.01 13.34 3.62 147.63 19.77 9.9 21.5 1.54 933 992.6 205.9 177 177 45.69 8.4 9.1 0 35.3 4896 6062 4614 34.5 4844 6021 4583 -0.8 11 6 6 4870 6054 4595 10.5 19.9 8.8 71.3 154 60 2.5 492 0.020 1.1 2 7.2 0.0 0.00 11.5 0.9 1.8 2.0
39.91 7.22 57.32 71.02 14.05 19.16 49.21 100.00 18.02 13.43 3.52 148.07 19.87 10.0 21.5 1.55 937 1003.7 205.0 178 178 44.22 8.6 8.9 0 35.6 4848 6005 4559 35.6 4866 6028 4579 0.0 1 1 7 4887 6056 4575 10.4 19.8 8.4 71.8 158 63 2.5 491 0.019 0.9 2 7.2 0.0 0.00 11.9 0.9 1.9 2.5
40.77 6.19 53.59 66.40 11.99 18.73 47.41 100.00 17.04 12.58 2.45 146.20 19.83 9.1 21.5 1.53 931 1006.1 206.8 178 177 45.49 9.1 8.9 0 35.2 4839 6020 4559 35.2 4849 6024 4576 0.0 3 3 9 4872 6054 4566 10.5 19.9 8.8 71.4 164 66 2.5 490 0.019 1.0 2 7.2 0.1 0.10 11.8 0.8 1.9 2.3
39.86 5.98 54.10 66.50 11.73 18.80 47.98 100.00 17.35 12.75 2.51 147.95 20.25 10.6 22.0 1.53 934 1001.0 204.8 177 177 44.09 8.8 8.7 0 35.3 4853 6021 4566 35.3 4864 6043 4582 0.0 4 4 10 4889 6070 4590 10.5 20.0 8.5 71.6 160 64 2.5 482 0.019 1.0 2 7.2 0.0 0.00 11.6 0.8 1.9 2.2
40.03 5.98 54.10 66.50 11.73 18.80 47.98 100.00 17.35 12.75 2.51 147.95 20.25 10.6 22.0 1.54 937 994.9 205.0 178 178 44.56 9.3 9.0 0 35.2 4815 5991 4541 35.2 4834 6008 4555 0.0 5 5 11 4862 6040 4562 10.4 19.7 8.8 71.5 159 64 2.5 499 0.020 0.7 2 7.2 0.0 0.00 11.6 0.9 2.0 2.1
40.81 5.98 54.10 66.50 11.73 18.80 47.98 100.00 17.35 12.75 2.51 147.95 20.25 10.6 20.9 1.55 933 1001.3 205.2 177 177 45.14 9.1 NA 0 35.1 4834 6022 4560 34.8 4859 6030 4568 -0.3 6 6 12 4869 6050 4574 10.5 19.9 8.8 71.3 160 64 2.5 493 0.019 0.9 2 7.3 0.0 0.00 11.5 0.9 1.9 2.1
37.94 5.85 51.75 64.02 10.41 20.40 44.30 100.00 16.96 12.68 2.34 143.33 19.63 11.5 22.0 1.54 934 1004.3 205.0 177 178 43.73 11.4 8.7 0 34.3 4701 5914 4579 35.7 4911 6124 4705 1.4 2 2 14 4923 6137 4710 11.0 20.7 8.6 70.7 154 63 2.4 490 0.020 0.8 3 7.2 0.1 0.10 11.7 1.1 1.9 2.2
37.73 5.85 51.75 64.02 10.41 20.40 44.30 100.00 16.96 12.68 2.34 143.33 19.63 11.5 21.0 1.55 934 1008.1 204.6 178 177 44.35 8.7 9.2 0 35.3 4895 6079 4670 34.3 4850 6022 4634 -1.0 3 3 15 4855 6046 4644 10.5 19.9 9.2 70.9 151 60 2.5 491 0.020 1.3 2 7.2 0.0 0.00 11.5 0.9 1.9 2.3
37.30 5.85 51.75 64.02 10.41 20.40 44.30 100.00 16.96 12.68 2.34 143.33 19.63 11.5 21.5 1.53 937 1004.8 206.6 177 177 44.13 8.9 9.3 0 35.3 4865 6037 4651 34.8 4839 6003 4619 -0.5 4 4 16 4856 6041 4634 10.4 19.9 9.1 71.1 150 60 2.5 486 0.020 1.6 2 7.2 0.0 0.00 11.5 0.8 1.9 2.3
37.86 6.01 51.83 63.80 11.22 19.69 44.57 100.00 17.09 12.57 2.55 143.07 19.47 11.8 21.9 1.54 937 992.8 204.6 178 178 43.49 8.8 9.0 0 35.4 4864 6025 4607 35.0 4847 6024 4614 -0.4 5 5 17 4865 6041 4606 10.4 19.7 8.8 71.4 154 62 2.5 507 0.021 0.8 3 7.2 0.0 0.00 11.6 1.1 1.8 2.3
38.05 5.89 51.28 64.04 9.99 16.90 44.74 100.00 16.79 13.04 2.33 142.66 19.67 12.3 21.7 1.55 936 974.0 206.4 177 177 42.66 8.9 8.7 0 35.5 4838 5997 4571 35.5 4855 6021 4584 0.0 6 6 18 4886 6051 4594 10.3 19.7 8.3 72.0 157 63 2.5 498 0.020 1.3 3 7.2 0.0 0.00 11.4 1.0 1.8 2.4
37.87 5.90 51.44 63.61 10.49 18.04 44.73 100.00 17.18 12.95 2.46 143.84 19.85 11.8 21.6 1.55 937 987.7 206.6 178 178 43.58 8.7 9.1 0 35.3 4861 6020 4611 34.9 4837 6007 4593 -0.4 7 1 1 4845 6019 4590 10.2 19.6 9.0 71.4 156 62 2.5 496 0.020 0.7 3 7.3 0.1 0.10 11.8 0.6 1.9 2.1
38.60 5.90 51.44 63.61 10.49 18.04 44.73 100.00 17.18 12.95 2.46 143.84 19.85 11.8 21.8 1.55 936 987.9 205.5 177 177 44.70 9.3 9.1 0 35.2 4816 5983 4579 35.2 4834 5997 4583 0.0 8 2 2 4841 6010 4583 10.2 19.5 9.1 71.4 155 62 2.5 481 0.019 1.0 2 6.9 0.0 0.00 11.7 0.8 1.9 2.0
38.44 5.90 51.44 63.61 10.49 18.04 44.73 100.00 17.18 12.95 2.46 143.84 19.85 11.8 20.8 1.55 933 986.6 205.5 178 178 44.23 8.7 9.0 0 35.2 4873 6031 4606 34.8 4846 6014 4595 -0.4 9 3 3 4853 6027 4605 0.0 19.7 9.0 71.4 156 62 2.5 493 0.020 0.5 3 7.1 0.0 0.00 11.9 0.8 1.9 2.3
39.42 5.79 53.96 66.53 10.40 18.26 47.57 100.00 17.24 12.99 2.16 146.64 20.57 9.8 22.0 1.54 932 992.3 208.4 177 177 45.61 9.0 8.9 0 35.1 4843 6016 4564 35.1 4850 6016 4569 0.0 10 4 4 4857 6036 4577 0.0 19.7 8.9 71.5 156 63 2.5 495 0.020 1.4 3 7.0 0.0 0.00 11.7 0.8 1.9 1.9
39.75 5.79 53.96 66.53 10.40 18.26 47.57 100.00 17.24 12.99 2.16 146.64 20.57 9.8 21.9 1.50 934 987.9 208.9 178 178 45.99 9.4 9.3 0 34.7 4807 5967 4536 34.7 4819 5997 4544 0.0 11 5 5 0 0 0 0.0 0.0 0.0 0.0 156 62 2.5 485 0.019 0.7 3 7.0 0.0 0.00 11.6 0.8 1.9 2.4
39.51 5.79 53.96 66.53 10.40 18.26 47.57 100.00 17.24 12.99 2.16 146.64 20.57 9.8 22.4 1.48 937 994.3 211.4 177 177 46.01 9.0 8.9 0 35.0 4839 6013 4561 35.0 4856 6032 4571 0.0 12 6 6 4859 6037 4590 0.0 19.7 8.8 71.5 155 62 2.5 491 0.020 1.5 2 7.1 0.0 0.10 11.7 0.8 1.9 1.9
38.35 5.94 51.27 63.54 10.48 17.72 44.43 100.00 17.13 13.00 2.52 143.19 19.69 11.7 22.0 1.52 940 980.3 205.2 178 178 43.75 9.6 9.4 0 35.1 4793 5953 4550 35.4 4808 5972 4551 0.3 1 1 7 4854 6013 4594 0.0 19.5 8.9 71.6 155 63 2.5 510 0.021 0.6 3 7.3 0.0 0.00 11.5 0.9 1.8 2.4
40.38 6.40 58.73 71.51 12.29 17.09 50.62 100.00 17.44 13.10 2.58 147.66 20.25 11.4 20.5 1.57 929 976.0 204.8 178 177 43.75 9.5 7.9 0 35.2 4783 5932 4485 35.7 4935 6105 4602 0.5 3 3 9 4912 6080 4572 0.0 19.9 8.0 72.1 162 65 2.5 494 0.019 0.7 2 7.0 0.0 0.00 11.6 1.7 1.8 2.2
40.19 6.40 58.73 71.51 12.29 17.09 50.62 100.00 17.44 13.10 2.58 147.66 20.25 11.4 22.2 1.54 937 994.1 208.9 177 177 44.64 7.8 8.7 0 35.9 4937 6098 4602 34.7 4845 6014 4533 -1.2 4 4 10 4859 6011 4535 0.0 19.3 8.6 72.1 158 63 2.5 488 0.019 0.7 3 7.0 0.0 0.00 11.3 1.2 1.9 2.3
39.96 6.40 58.73 71.51 12.29 17.09 50.62 100.00 17.44 13.10 2.58 147.66 20.25 11.4 22.3 1.53 936 983.2 213.7 178 178 43.84 8.1 8.7 0 35.7 4896 6050 4559 35.0 4849 6010 4536 -0.7 5 5 11 4871 6027 4544 0.0 19.5 8.4 72.1 160 64 2.5 497 0.019 0.9 2 7.0 0.1 0.10 11.3 0.9 1.8 2.3
39.79 6.10 56.36 69.52 11.71 16.35 49.10 100.00 17.15 12.99 2.44 146.29 20.16 11.7 22.0 1.51 935 981.6 204.3 177 177 43.37 7.8 8.1 0 36.2 4936 6076 4602 35.3 4887 6033 4567 -0.9 6 6 12 4884 6037 4563 0.0 19.5 8.2 72.3 161 65 2.5 500 0.019 0.7 3 7.1 0.0 0.00 11.4 1.1 1.8 2.3
41.86 5.78 53.70 67.22 11.17 15.44 47.33 100.00 16.76 12.81 2.30 144.34 19.95 12.0 21.2 1.55 942 992.0 204.1 177 177 44.94 9.1 9.0 0 34.7 4808 5941 4504 35.1 4822 5959 4526 0.4 7 1 13 4848 5976 4524 0.0 19.2 8.7 72.1 163 65 2.5 492 0.019 1.3 3 7.1 0.0 0.00 11.8 0.5 1.9 2.3
42.15 6.15 53.06 70.29 11.87 16.59 49.97 100.00 16.78 12.63 2.31 144.75 19.94 12.3 21.1 1.54 934 980.0 206.4 178 178 43.12 8.3 8.3 0 35.2 4880 6031 4567 35.2 4877 6035 4570 0.0 8 2 14 4902 6060 4566 0.0 19.9 8.1 72.0 167 68 2.5 501 0.019 0.4 3 7.2 0.0 0.00 11.8 0.6 1.9 2.2
43.88 6.40 52.62 72.39 12.34 17.38 51.77 100.00 16.80 12.51 2.32 145.03 19.93 12.5 21.0 1.55 938 997.8 205.7 177 177 45.23 8.5 9.1 0 34.9 4879 6047 4564 34.4 4827 6000 4535 -0.5 1 1 15 4858 6029 4546 0.0 19.6 8.8 71.6 163 65 2.5 495 0.019 1.0 3 6.9 0.0 0.00 11.8 0.7 1.9 2.2
39.58 5.60 51.45 63.44 10.50 19.17 45.06 100.00 16.83 12.53 2.27 142.90 19.64 11.4 21.0 1.55 934 991.9 205.0 178 177 44.10 8.8 9.2 0 35.1 4869 6037 4622 34.7 4835 5999 4585 -0.4 2 2 16 4857 6032 4598 0.0 19.7 9.0 71.3 153 61 2.5 500 0.020 0.9 3 7.0 0.1 0.10 11.7 0.8 1.9 2.0
40.19 5.60 51.45 63.44 10.50 19.17 45.06 100.00 16.83 12.53 2.27 142.90 19.64 11.4 20.9 1.54 934 986.1 204.1 177 178 43.92 9.0 9.1 0 35.1 4849 6011 4584 35.0 4844 6013 4589 -0.1 3 3 17 4863 6036 4608 0.0 19.7 8.9 71.4 156 61 2.6 501 0.020 0.7 2 7.1 0.0 0.00 11.6 0.8 1.7 2.4
39.84 5.60 51.45 63.44 10.50 19.17 45.06 100.00 16.83 12.53 2.27 142.90 19.64 11.4 21.1 1.55 935 923.0 204.8 178 178 43.26 8.7 8.6 0 35.6 4876 6040 4598 35.2 4880 6037 4611 -0.4 4 4 18 4873 6043 4601 0.0 19.8 8.7 71.5 157 61 2.6 497 0.020 0.5 3 6.9 0.0 0.00 11.9 0.5 1.9 2.2
40.59 5.43 49.10 62.08 10.11 19.09 44.45 100.00 16.54 12.21 2.11 141.49 19.46 12.0 21.2 1.55 935 1003.2 206.8 177 177 45.63 9.0 9.6 0 34.7 4872 6060 4633 34.1 4828 6015 4592 -0.6 5 5 19 4844 6055 4617 0.0 20.0 9.4 70.6 149 59 2.6 486 0.020 1.1 3 6.9 0.0 0.00 11.6 0.5 1.7 1.9
40.66 6.02 50.01 60.82 11.79 19.43 43.24 100.00 17.54 12.61 3.08 142.33 19.17 12.3 21.5 1.55 939 995.2 205.7 178 178 46.51 9.8 10.1 0 33.9 4839 6053 4654 33.1 4806 6022 4623 -0.8 6 6 20 4825 6053 4657 0.0 20.2 10.0 69.9 146 56 2.6 499 0.021 0.5 2 7.0 0.0 0.00 11.8 0.5 1.8 2.1
42.58 6.03 52.58 65.05 11.45 19.11 46.06 100.00 17.23 12.84 2.67 145.08 19.74 11.3 21.2 1.55 936 991.8 208.2 177 178 45.85 8.8 9.7 0 34.9 4882 6056 4600 33.9 4814 6004 4580 -1.0 7 1 1 4820 6022 4585 0.0 19.8 9.6 70.6 159 64 2.5 489 0.019 0.9 2 7.1 0.1 0.20 11.5 0.5 1.9 2.2
43.42 6.08 52.89 66.72 11.23 19.06 47.29 100.00 17.03 12.96 2.49 147.33 20.04 11.2 20.8 1.55 935 994.7 207.3 178 178 46.95 8.9 9.8 0 34.3 4878 6072 4614 33.2 4811 6018 4567 -1.1 8 2 2 4824 6045 4578 0.0 20.0 9.7 70.3 160 64 2.6 481 0.019 0.7 2 7.1 0.0 0.00 11.6 0.7 1.8 2.2
41.45 5.76 52.73 63.88 10.59 20.49 47.29 100.00 17.70 12.48 2.23 150.39 20.62 12.1 20.9 1.54 934 996.9 207.3 177 178 47.21 10.0 10.2 0 33.7 4812 6032 4626 33.2 4797 6019 4611 -0.5 9 3 3 4809 6057 4623 0.0 20.2 10.1 69.7 151 58 2.5 498 0.021 1.1 3 7.1 0.0 0.00 11.5 0.6 1.9 2.2
41.31 5.79 52.70 64.33 10.70 20.09 47.21 100.00 17.54 12.55 2.27 149.52 20.48 11.9 21.2 1.55 935 996.5 209.1 178 178 46.23 9.3 10.0 0 34.4 4859 6063 4635 33.3 4804 6024 4601 -1.1 10 4 4 4820 6050 4606 0.0 20.1 9.9 70.1 154 60 2.5 505 0.020 0.7 3 7.0 0.0 0.00 11.5 0.7 1.9 2.1
42.28 6.23 52.95 66.71 12.75 16.31 45.84 100.00 16.70 12.75 2.98 140.93 19.10 11.4 21.3 1.54 939 987.3 211.4 177 178 45.71 8.1 9.1 0 35.0 4905 6061 4595 33.8 4820 5982 4529 -1.2 11 5 5 4829 6009 4534 0.0 19.5 9.1 71.4 160 63 2.5 507 0.020 1.0 3 7.2 0.0 0.00 11.4 0.6 1.9 2.2
41.62 6.23 52.95 66.71 12.75 16.31 45.84 100.00 16.70 12.75 2.98 140.93 19.10 11.4 21.3 1.55 939 981.9 205.7 178 178 44.99 8.3 8.8 0 34.8 4883 6030 4577 34.2 4855 6024 4558 -0.6 12 6 6 4862 6035 4564 0.0 19.7 8.8 71.6 160 66 2.4 502 0.020 0.7 2 7.2 0.1 0.10 11.4 0.7 1.9 2.0
42.73 6.23 52.95 66.71 12.75 16.31 45.84 100.00 16.70 12.75 2.98 140.93 19.10 11.4 21.4 1.54 941 990.0 207.1 177 178 45.62 8.2 9.4 0 35.2 4903 6056 4550 34.1 4805 5955 4496 -1.1 1 1 7 4832 5990 4533 0.0 19.3 9.1 71.6 162 65 2.5 500 0.019 0.4 2 7.0 0.0 0.00 11.7 0.5 2.0 2.2
41.66 6.26 55.94 69.22 12.07 18.08 48.92 100.00 17.56 13.14 2.60 148.53 20.45 12.2 21.5 1.55 939 1004.4 206.4 178 178 46.67 9.1 10.1 0 33.8 4853 6027 4544 33.4 4777 5962 4509 -0.4 2 2 8 4856 6058 4580 0.0 20.0 9.1 70.9 158 64 2.5 516 0.020 0.8 3 7.0 0.0 0.00 11.4 0.5 1.9 2.3
40.89 6.26 55.94 69.22 12.07 18.08 48.92 100.00 17.56 13.14 2.60 148.53 20.45 12.2 21.4 1.55 936 990.8 208.2 177 177 45.23 8.9 8.2 0 34.4 4865 6034 4562 34.4 4926 6122 4643 0.0 3 3 9 4935 6145 4651 0.0 20.7 8.1 71.2 160 65 2.5 490 0.019 1.0 2 7.2 0.0 0.00 11.4 0.4 1.9 2.2
40.82 6.26 55.94 69.22 12.07 18.08 48.92 100.00 17.56 13.14 2.60 148.53 20.45 12.2 21.5 1.54 939 999.2 206.4 178 178 45.73 7.9 8.8 0 34.6 4950 6143 4665 33.6 4884 6084 4615 -1.0 4 4 10 4895 6120 4632 0.0 20.5 8.6 70.9 158 65 2.4 501 0.020 0.4 3 7.2 0.0 0.00 11.6 0.6 1.8 2.1
39.77 6.36 53.18 66.57 12.07 16.22 46.51 100.00 17.19 12.97 2.80 144.38 19.51 10.8 21.2 1.55 940 998.9 208.9 178 178 46.36 8.2 9.2 0 34.9 4912 6091 4625 33.5 4842 6027 4575 -1.4 1 1 12 4846 6040 4574 0.0 19.8 9.1 71.1 158 65 2.5 499 0.020 0.6 2 7.2 0.0 0.00 11.9 0.4 2.0 2.3
38.05 6.72 53.85 67.10 12.50 16.15 46.87 100.00 17.61 13.15 3.02 145.80 19.56 10.9 NA 1.55 934 1000.1 208.7 177 178 47.06 8.7 9.4 0 34.5 4875 6056 4596 33.4 4821 6003 4550 -1.1 2 2 13 4816 6037 4568 0.0 19.8 9.6 70.6 152 62 2.5 495 0.020 1.3 2 7.3 0.0 0.00 11.8 0.4 1.9 2.2
37.86 5.18 48.60 61.04 9.38 16.30 43.78 100.00 15.88 12.16 1.87 138.14 19.20 11.3 21.4 1.54 936 1001.2 206.2 178 177 46.95 9.3 9.6 0 34.3 4830 6001 4581 33.6 4809 5977 4569 -0.7 3 3 14 4817 6012 4584 0.0 19.6 9.6 70.8 150 62 2.4 502 0.021 1.1 3 7.2 0.0 0.00 11.8 0.4 1.9 2.4
38.03 5.18 48.60 61.04 9.38 16.30 43.78 100.00 15.88 12.16 1.87 138.14 19.20 11.3 21.3 1.55 936 1011.9 207.5 177 177 47.47 8.5 10.0 0 34.4 4893 6061 4635 32.6 4780 5952 4559 -1.8 4 4 15 4785 5973 4557 0.0 19.3 10.0 70.6 149 60 2.5 500 0.021 1.1 2 7.2 0.0 0.00 11.7 0.5 1.8 2.4
37.39 5.18 48.60 61.04 9.38 16.30 43.78 100.00 15.88 12.16 1.87 138.14 19.20 11.3 21.3 1.55 936 998.6 206.2 178 178 46.06 9.3 9.3 0 34.4 4833 6004 4589 34.0 4831 6005 4601 -0.4 5 5 16 4843 6014 4599 0.0 19.6 9.2 71.2 151 61 2.5 495 0.020 1.1 3 7.3 0.1 0.10 11.7 0.5 1.9 2.4
39.16 6.29 50.64 63.92 11.46 13.24 43.50 100.00 16.58 12.88 2.90 136.35 18.35 12.2 21.6 1.55 940 1002.1 207.5 178 178 46.58 9.7 9.3 0 33.4 4792 5964 4540 33.8 4822 5985 4562 0.4 7 1 1 4841 5996 4569 0.0 19.4 9.1 71.5 153 64 2.4 502 0.021 1.1 3 7.3 0.0 0.00 11.8 0.5 1.8 2.1
37.64 6.10 50.60 63.37 10.90 19.05 44.18 100.00 17.37 13.10 2.67 146.66 19.73 10.0 NA 1.55 935 986.1 205.2 177 178 46.06 9.5 9.5 0 34.7 4823 6003 4617 34.4 4831 6015 4609 -0.3 8 2 2 4822 6018 4606 0.0 19.8 9.6 70.6 152 60 2.6 497 0.020 1.1 3 7.2 0.0 0.00 11.7 0.7 1.9 2.3
39.77 5.30 46.87 57.56 9.93 18.07 40.60 100.00 16.34 12.12 2.43 135.81 18.57 13.1 21.3 1.55 936 983.6 207.5 178 178 45.30 10.4 9.8 0 33.7 4772 5962 4606 34.4 4812 5995 4621 0.7 9 3 3 4806 6000 4612 0.0 19.7 9.9 70.4 151 59 2.6 509 0.021 1.3 3 7.3 0.0 0.00 11.8 0.4 1.8 2.4
38.66 5.83 50.17 63.11 10.32 17.24 44.31 100.00 16.86 12.88 2.38 143.75 19.59 10.2 21.2 1.54 934 989.8 206.8 177 178 46.53 8.6 9.3 0 34.9 4883 6042 4623 33.9 4826 5997 4590 -1.0 10 4 4 4840 6001 4588 0.0 19.6 9.2 71.3 152 60 2.5 494 0.020 1.0 3 7.2 0.1 0.10 11.7 0.5 1.8 2.3
40.31 6.25 54.57 67.56 12.10 17.66 47.80 100.00 17.32 12.89 2.73 145.57 19.76 12.4 21.2 1.55 935 1008.1 206.6 178 178 46.12 8.7 9.5 0 34.4 4877 6059 4604 33.4 4821 6008 4568 -1.0 11 5 5 4829 6026 4572 0.0 19.8 9.4 70.7 156 61 2.5 497 0.020 1.0 3 7.2 0.0 0.00 11.7 0.6 1.8 2.3
40.54 6.25 54.57 67.56 12.10 17.66 47.80 100.00 17.32 12.89 2.73 145.57 19.76 12.4 21.4 1.54 934 1000.6 207.3 177 178 46.39 8.9 9.6 0 34.3 4864 6044 4589 33.4 4820 6008 4550 -0.9 12 6 6 4823 6022 4558 0.0 19.7 9.5 70.8 156 61 2.5 509 0.020 1.0 3 7.2 0.0 0.00 11.7 0.5 1.8 2.3
40.64 6.25 54.57 67.56 12.10 17.66 47.80 100.00 17.32 12.89 2.73 145.57 19.76 12.4 21.4 1.55 939 1011.6 206.8 178 178 46.50 9.0 10.0 0 33.9 4858 6035 4577 33.1 4790 5980 4531 -0.8 1 1 7 4802 5993 4533 0.0 19.5 9.8 70.7 156 64 2.4 493 0.020 1.1 2 7.3 0.0 0.00 11.8 0.5 1.9 2.3
38.60 6.00 53.29 65.50 11.71 18.80 46.34 100.00 17.32 12.75 2.63 145.37 19.75 12.7 21.4 1.55 937 1005.1 208.9 177 178 44.78 9.3 9.7 0 34.2 4846 6027 4608 33.7 4612 5999 4581 -0.5 2 2 8 4851 6049 4596 0.0 20.0 9.3 70.7 153 62 2.5 485 0.020 1.8 2 7.3 0.1 0.10 11.5 0.5 1.9 2.2
38.13 6.00 53.29 65.50 11.71 18.80 46.34 100.00 17.32 12.75 2.63 145.37 19.75 12.7 21.6 1.54 938 1014.4 206.4 178 178 44.52 9.3 9.4 0 34.2 4847 6029 4610 33.7 4831 6012 4605 -0.5 3 3 9 4844 6035 4588 0.0 19.9 9.4 70.7 152 63 2.4 517 0.021 1.0 3 7.3 0.0 0.00 11.8 0.5 1.9 1.8
40.10 6.00 53.29 65.50 11.71 18.80 46.34 100.00 17.32 12.75 2.63 145.37 19.75 12.7 21.6 1.55 941 1012.5 206.6 177 177 46.46 9.1 9.6 0 34.4 4858 6046 4619 33.5 4813 6004 4590 -0.9 4 1 10 4827 6011 4581 0.0 19.7 9.6 70.7 153 61 2.5 472 0.019 2.0 2 7.1 0.0 0.00 11.7 0.5 1.8 1.9
39.14 5.49 48.03 58.99 10.24 18.30 41.50 100.00 16.61 12.34 2.55 137.49 18.77 13.0 21.4 1.54 939 998.5 209.8 177 178 45.23 9.9 10.1 0 34.2 4802 5990 4620 34.0 4786 5983 4614 -0.2 6 3 12 4783 5983 4597 0.0 19.5 10.2 70.3 149 59 2.5 490 0.021 0.7 2 7.2 0.0 0.00 11.9 0.6 1.8 2.1
38.63 5.30 46.87 57.56 9.93 18.07 40.60 100.00 16.34 12.12 2.43 135.81 18.57 13.3 21.4 1.54 940 1012.2 209.4 178 178 46.74 9.4 10.6 0 34.0 4858 6067 4677 32.8 4768 5964 4596 -1.2 7 4 13 4776 5995 4607 0.0 19.7 10.5 69.8 143 56 2.5 496 0.022 1.0 3 7.1 0.1 0.10 11.9 0.4 1.7 1.8
41.43 5.54 52.48 64.98 10.30 18.24 45.86 100.00 17.07 12.90 2.18 145.88 20.08 13.4 21.4 1.54 939 1007.6 208.9 177 177 45.54 9.0 9.4 0 34.4 4862 6036 4601 33.5 4832 6003 4578 -0.9 8 5 14 4841 6023 4581 0.0 19.8 9.3 70.9 151 60 2.5 470 0.019 1.3 2 7.2 0.0 0.00 12.0 0.5 1.8 1.9
40.96 5.54 52.29 64.61 10.32 18.52 45.76 100.00 17.05 12.82 2.19 145.59 20.02 13.3 21.1 1.54 936 1022.3 206.4 178 178 46.26 9.3 10.1 0 34.2 4843 6021 4597 33.2 4780 5971 4553 -1.0 9 6 15 4781 5983 4551 0.0 19.5 10.1 70.4 148 59 2.5 507 0.021 0.6 3 7.2 0.0 0.00 11.7 0.6 1.7 2.3
37.89 6.25 52.68 65.12 11.64 18.11 45.42 100.00 17.51 13.01 2.88 145.07 19.56 12.7 21.5 1.60 946 1005.0 210.5 178 178 45.76 8.3 9.8 0 34.6 4920 6093 4648 33.3 4797 5977 4570 -1.3 1 1 16 4812 6008 4567 0.0 19.7 9.7 70.6 154 63 2.5 500 0.020 0.8 3 7.4 0.0 0.00 11.6 0.7 1.9 1.9
37.42 6.25 52.68 65.12 11.64 18.11 45.42 100.00 17.51 13.01 2.88 145.07 19.56 12.7 21.7 1.59 939 998.0 205.5 177 177 44.90 9.5 9.5 0 34.3 4831 6017 4591 34.2 4821 6009 4586 -0.1 2 2 17 4864 6056 4600 0.0 20.0 9.0 71.0 155 63 2.4 493 0.020 1.8 2 7.3 0.0 0.00 11.3 0.5 1.9 2.1
37.51 6.25 52.68 65.12 11.64 18.11 45.42 100.00 17.51 13.01 2.88 145.07 19.56 12.7 21.3 1.60 934 997.1 206.2 178 178 45.30 9.5 9.4 0 34.4 4824 6003 4586 34.3 4832 6012 4589 -0.1 3 3 18 4839 6019 4585 0.0 19.7 9.3 70.9 154 61 2.5 501 0.020 0.7 3 7.3 0.1 0.10 10.7 0.7 1.8 2.1
37.92 6.22 52.76 65.13 11.50 18.55 45.48 100.00 17.57 13.08 2.88 145.10 19.64 12.7 21.2 1.60 936 1006.1 207.3 177 177 45.28 8.7 9.5 0 34.7 4887 6076 4637 33.9 4829 6012 4599 -0.8 4 4 19 4844 6025 4594 0.0 19.8 9.3 70.9 152 61 2.5 470 0.019 1.7 2 7.3 0.0 0.00 11.3 0.5 1.8 1.8
36.77 5.90 51.37 63.65 10.76 19.90 45.07 100.00 17.06 12.78 2.46 144.98 19.73 12.4 21.3 1.54 935 990.5 205.7 177 177 44.38 9.1 9.1 0 35.0 4850 6015 4611 34.9 4856 6027 4618 -0.1 7 3 3 4852 6031 4606 0.0 19.8 9.1 71.0 152 65 2.3 505 0.021 1.5 3 7.3 0.0 0.00 11.0 0.4 1.9 2.3
37.14 5.90 51.37 63.65 10.76 19.90 45.07 100.00 17.06 12.78 2.46 144.98 19.73 12.4 21.0 1.55 934 1002.4 206.2 178 178 44.25 9.1 9.3 0 34.9 4854 6014 4617 34.6 4834 6009 4601 -0.3 8 4 4 4839 6015 4594 0.0 19.7 9.3 71.0 154 64 2.4 517 0.021 0.4 3 7.3 0.1 0.20 10.9 0.5 1.9 2.3
37.73 5.90 51.37 63.65 10.76 19.90 45.07 100.00 17.06 12.78 2.46 144.98 19.73 12.4 21.2 1.55 940 984.3 204.3 177 177 44.38 9.0 9.7 0 35.3 4861 6025 4611 34.8 4805 5975 4570 -0.5 9 5 5 4862 6060 4609 0.0 20.1 9.1 70.8 156 63 2.5 502 0.020 1.7 3 7.3 0.0 0.00 12.0 0.5 1.8 2.3
38.03 5.70 52.77 66.25 10.50 15.18 47.07 100.00 16.67 12.84 2.17 144.39 19.93 12.1 21.4 1.55 938 1005.3 207.1 178 178 44.86 8.5 8.8 0 34.8 4868 6012 4561 34.5 4844 5982 4542 -0.3 10 1 6 4852 6009 4556 0.0 19.4 8.8 71.8 156 64 2.5 502 0.020 0.9 3 7.3 0.0 0.00 12.1 0.3 1.8 2.3
37.86 5.70 52.77 66.25 10.50 15.18 47.07 100.00 16.67 12.84 2.17 144.39 19.93 12.1 21.3 1.55 936 1003.8 206.2 177 177 44.09 7.7 8.1 0 35.3 4924 6050 4597 34.8 4897 6022 4586 -0.5 11 2 7 4906 6040 4586 0.0 19.7 8.0 72.3 158 65 2.4 481 0.019 1.3 2 7.2 0.0 0.00 12.1 0.7 1.8 2.1
38.31 5.70 52.77 66.25 10.50 15.18 47.07 100.00 16.67 12.84 2.17 144.39 19.93 12.1 21.5 1.54 934 1009.5 205.7 178 178 44.62 7.8 8.6 0 35.2 4925 6058 4601 34.6 4863 6016 4594 -0.6 12 3 8 4906 6047 4585 0.0 19.7 8.1 72.2 158 63 2.5 506 0.020 0.9 3 7.3 0.0 0.00 12.0 0.3 1.8 2.3
38.66 5.97 53.13 66.58 11.00 16.55 46.77 100.00 16.97 13.00 2.40 145.03 19.88 12.1 21.1 1.54 938 1015.1 207.5 178 178 45.51 8.8 9.2 0 34.5 4856 6022 4579 34.1 4820 5989 4544 -0.4 2 1 10 4846 6015 4541 0.0 19.6 9.0 71.4 158 64 2.5 520 0.021 0.5 3 7.4 0.0 0.00 11.7 0.3 1.9 1.8
38.65 6.39 53.72 67.13 11.75 18.61 46.32 100.00 17.44 13.25 2.75 145.96 19.80 12.4 21.0 1.55 936 1020.8 206.6 177 177 46.22 8.8 9.2 0 34.7 4866 6040 4571 34.0 4844 6028 4553 -0.7 9 5 17 4855 6055 4581 0.0 19.9 9.1 71.0 158 65 2.4 482 0.019 1.6 3 7.3 0.0 0.00 11.5 0.6 1.8 2.2
38.67 5.36 53.39 65.30 12.05 18.57 46.91 100.00 16.42 12.32 2.31 144.29 19.71 11.5 21.2 1.55 939 1014.5 206.6 177 177 45.87 9.3 9.9 0 34.4 4833 6020 4569 34.1 4789 5973 4532 -0.3 1 1 19 4809 5990 4536 0.0 19.5 9.6 70.9 155 61 2.5 488 0.020 1.0 3 7.3 0.1 0.10 11.8 0.4 1.8 2.4
38.42 5.27 52.45 64.09 10.84 18.10 46.02 100.00 16.35 12.22 1.77 143.59 19.66 11.4 21.2 1.55 933 1029.0 205.9 178 177 47.70 10.5 10.3 0 33.5 4750 5952 4516 33.5 4765 5956 4520 0.0 2 2 20 4790 5993 4541 0.0 19.5 10.1 70.4 149 59 2.5 512 0.021 1.8 3 7.3 0.0 0.00 11.6 0.4 1.8 2.3
39.15 4.58 49.56 61.08 9.84 18.68 43.53 100.00 16.16 12.14 1.99 139.64 18.80 11.6 21.2 1.55 933 1008.0 205.5 177 178 45.51 9.3 10.1 0 34.4 4795 5982 4600 34.4 4785 5984 4587 0.0 3 3 21 4815 5991 4597 0.0 19.6 9.7 70.7 151 61 2.5 511 0.021 1.1 3 7.2 0.0 0.00 11.4 0.8 1.7 2.2
38.82 4.58 49.56 61.08 9.84 18.68 43.53 100.00 16.16 12.14 1.99 139.64 18.80 11.6 21.2 1.55 934 999.9 208.2 178 178 45.13 9.4 9.6 0 34.8 4832 6024 4625 34.7 4816 6001 4601 -0.1 4 4 22 4837 6024 4608 0.0 19.8 9.4 70.8 151 60 2.5 507 0.021 1.9 3 7.3 0.0 0.00 11.4 0.7 1.8 2.4
39.08 4.58 49.56 61.08 9.84 18.68 43.53 100.00 16.16 12.14 1.99 139.64 18.80 11.6 20.0 1.55 928 1003.2 206.6 177 177 45.73 9.7 10.0 0 34.4 4815 6010 4618 34.4 4786 5986 4590 0.0 5 5 23 4815 6011 4594 0.0 19.7 9.7 70.5 150 60 2.5 518 0.022 1.6 3 7.3 0.0 0.00 11.3 0.5 1.7 2.1
38.90 7.70 62.92 75.91 13.49 16.10 55.29 100.00 18.28 13.83 3.20 149.91 21.23 11.3 20.8 1.55 934 1014.6 208.7 177 177 46.04 8.2 8.2 0 34.8 4882 6022 4606 34.8 4884 6026 4613 0.0 7 1 1 4903 6034 4606 0.0 19.5 8.0 72.5 158 66 2.4 475 0.019 1.6 3 0.0 0.0 0.00 11.8 0.2 1.8 2.2
39.62 6.39 59.10 71.04 11.52 21.82 53.53 100.00 17.93 12.92 2.38 155.77 20.76 12.5 19.9 1.55 933 1005.1 205.2 178 177 45.31 8.8 9.1 0 34.9 4878 6058 4641 34.6 4853 6047 4630 -0.3 8 2 2 4872 6058 4629 0.0 20.1 8.9 71.0 160 66 2.4 496 0.019 1.4 3 0.0 0.0 0.00 11.6 0.3 1.8 2.5
39.77 6.63 59.81 71.94 11.89 20.76 53.86 100.00 17.99 13.09 2.53 154.68 20.85 14.1 20.0 1.54 936 1029.7 206.8 177 178 44.77 9.0 9.4 0 34.7 4860 6041 4617 34.0 4822 6000 4588 -0.7 9 3 3 4832 6013 4585 0.0 19.7 9.4 71.0 160 63 2.5 496 0.019 0.6 3 0.0 0.0 0.00 11.7 0.5 1.8 2.2
39.66 6.71 56.32 66.19 12.35 20.02 50.26 100.00 17.54 12.50 2.82 143.45 20.32 12.8 21.5 1.54 935 1027.0 206.2 178 177 46.78 9.6 9.6 0 33.9 4829 6026 4621 33.5 4815 6011 4612 -0.4 2 2 8 NA NA NA NA NA NA NA 156 NA NA NA NA 2.3 0 0.0 0.0 0.00 0.0 0.6 0.0 0.0
39.68 6.87 56.74 66.61 12.55 20.18 50.80 100.00 17.48 12.41 2.82 143.10 20.24 12.8 21.5 1.56 933 1032.0 206.6 177 178 46.51 9.5 9.9 0 34.0 4833 6029 4608 33.5 4807 6001 4584 -0.5 0 0 0 NA NA NA NA NA NA NA 158 NA NA NA NA 1.0 0 0.0 0.0 0.00 0.0 0.6 0.0 0.0
42.23 7.50 58.41 68.30 13.33 20.81 52.96 100.00 17.23 12.04 2.83 141.72 19.92 13.0 20.4 1.55 930 1040.0 208.7 178 177 48.05 10.1 10.4 0 33.1 4795 6000 4557 32.8 4764 5977 4538 -0.3 0 0 0 NA NA NA NA NA NA NA 167 NA NA NA NA 1.3 0 0.0 0.0 0.00 0.0 0.6 0.0 0.0
38.48 7.53 58.36 69.25 14.35 20.57 51.31 100.00 17.87 12.77 3.55 145.56 20.04 14.1 21.6 1.55 935 1044.8 208.0 177 177 48.11 10.2 10.3 0 32.9 4793 6029 4600 32.4 4787 6030 4592 -0.5 0 0 0 NA NA NA NA NA NA NA 156 NA NA NA NA 2.3 0 0.0 0.0 0.00 0.0 0.5 0.0 0.0
39.49 7.53 58.36 69.25 14.35 20.57 51.31 100.00 17.87 12.77 3.55 145.56 20.04 14.1 20.8 1.55 932 1053.8 207.5 178 178 48.13 9.9 10.3 0 32.9 4824 6068 4630 32.0 4795 6050 4607 -0.9 0 0 0 NA NA NA NA NA NA NA 160 NA NA NA NA 0.9 0 0.0 0.0 0.00 0.0 0.6 0.0 0.0

Imputation Process: K Nearest Neighbor Technique

I have adopted the KNN approach for imputation for the current data set using the knnImputation() fruntion from DMwR package. I am using k=10 as a tuning parameter.

ChemicalManProcessImputed <- knnImputation(ChemicalManufacturingProcess, k = 10)

Pre-Processing

Excluding Near Zero Variance Predictors
nZVIndices <- nearZeroVar(ChemicalManProcessImputed)

ChemicalManProcessTransformed <- ChemicalManProcessImputed[,-nZVIndices]
Excluding highly Correlated Predictors
corThresh <- 0.9
tooHigh <- findCorrelation(cor(ChemicalManProcessTransformed), corThresh)
corrPred <- names(ChemicalManProcessTransformed)[tooHigh]
ChemicalManProcessTransformed <- ChemicalManProcessTransformed[,-tooHigh]

Test/Train Split

trainingRows <- createDataPartition(ChemicalManProcessTransformed$Yield, times = 1, p = 0.8, list = FALSE)

train_df <- ChemicalManProcessTransformed[trainingRows,]
test_df <- ChemicalManProcessTransformed[-trainingRows,]

df.train.x = train_df[,-1]
df.train.y = train_df[,1]
df.test.x = test_df[,-1]
df.test.y = test_df[,1]

Model Evaluation Function

model.eval = function(modelmethod, gridSearch = NULL)
{
  Model = train(x = df.train.x, y = df.train.y, method = modelmethod, tuneGrid = gridSearch, preProcess = c('center', 'scale'), trControl = trainControl(method='cv'))
  Pred = predict(Model, newdata = df.test.x)
  modelperf = postResample(Pred, df.test.y)
  print(modelperf)
}
  1. Which tree-based regression model gives the optimal resampling and test set performance?

Model1: Simple Regression Tree

perftree = model.eval('rpart')
## Warning in nominalTrainWorkflow(x = x, y = y, wts = weights, info = trainInfo, :
## There were missing values in resampled performance measures.
##      RMSE  Rsquared       MAE 
## 1.4611357 0.3141973 1.1443696

Model2: Random Forest

perfrf = model.eval('rf')
##      RMSE  Rsquared       MAE 
## 1.0303527 0.6667363 0.8019678

Model3: Gradient Boosting Tree

perfgbm = model.eval('gbm')
## Iter   TrainDeviance   ValidDeviance   StepSize   Improve
##      1        3.2714             nan     0.1000    0.2600
##      2        3.0391             nan     0.1000    0.2007
##      3        2.9046             nan     0.1000    0.1124
##      4        2.7613             nan     0.1000    0.1284
##      5        2.6061             nan     0.1000    0.0777
##      6        2.4674             nan     0.1000    0.1171
##      7        2.3621             nan     0.1000    0.0829
##      8        2.2858             nan     0.1000    0.0210
##      9        2.1736             nan     0.1000    0.0807
##     10        2.0996             nan     0.1000    0.0298
##     20        1.5350             nan     0.1000    0.0059
##     40        1.1179             nan     0.1000    0.0026
##     60        0.9187             nan     0.1000   -0.0111
##     80        0.7833             nan     0.1000   -0.0050
##    100        0.6887             nan     0.1000   -0.0086
##    120        0.6056             nan     0.1000   -0.0016
##    140        0.5442             nan     0.1000   -0.0023
##    150        0.5182             nan     0.1000   -0.0058
## 
## Iter   TrainDeviance   ValidDeviance   StepSize   Improve
##      1        3.2640             nan     0.1000    0.2494
##      2        2.9500             nan     0.1000    0.2016
##      3        2.6766             nan     0.1000    0.1983
##      4        2.4948             nan     0.1000    0.2129
##      5        2.3253             nan     0.1000    0.1349
##      6        2.1842             nan     0.1000    0.1190
##      7        2.0557             nan     0.1000    0.1022
##      8        1.9285             nan     0.1000    0.0668
##      9        1.8511             nan     0.1000    0.0772
##     10        1.7502             nan     0.1000    0.0720
##     20        1.1695             nan     0.1000    0.0257
##     40        0.7497             nan     0.1000    0.0020
##     60        0.5447             nan     0.1000    0.0016
##     80        0.4151             nan     0.1000   -0.0047
##    100        0.3378             nan     0.1000   -0.0121
##    120        0.2704             nan     0.1000   -0.0043
##    140        0.2175             nan     0.1000   -0.0006
##    150        0.1974             nan     0.1000   -0.0043
## 
## Iter   TrainDeviance   ValidDeviance   StepSize   Improve
##      1        3.1776             nan     0.1000    0.3223
##      2        2.9046             nan     0.1000    0.1899
##      3        2.6280             nan     0.1000    0.2469
##      4        2.3972             nan     0.1000    0.1868
##      5        2.2171             nan     0.1000    0.0900
##      6        2.0046             nan     0.1000    0.1277
##      7        1.8569             nan     0.1000    0.0838
##      8        1.7174             nan     0.1000    0.0833
##      9        1.5958             nan     0.1000    0.0531
##     10        1.5075             nan     0.1000    0.0322
##     20        1.0559             nan     0.1000   -0.0010
##     40        0.6456             nan     0.1000   -0.0102
##     60        0.4161             nan     0.1000   -0.0056
##     80        0.2893             nan     0.1000   -0.0055
##    100        0.2170             nan     0.1000   -0.0073
##    120        0.1646             nan     0.1000   -0.0027
##    140        0.1191             nan     0.1000   -0.0012
##    150        0.1048             nan     0.1000   -0.0020
## 
## Iter   TrainDeviance   ValidDeviance   StepSize   Improve
##      1        3.2436             nan     0.1000    0.2347
##      2        3.0185             nan     0.1000    0.2274
##      3        2.8717             nan     0.1000    0.1212
##      4        2.7003             nan     0.1000    0.1601
##      5        2.5346             nan     0.1000    0.0857
##      6        2.4247             nan     0.1000    0.0851
##      7        2.3319             nan     0.1000    0.0671
##      8        2.2307             nan     0.1000    0.0531
##      9        2.1355             nan     0.1000    0.0418
##     10        2.0626             nan     0.1000    0.0397
##     20        1.5493             nan     0.1000    0.0118
##     40        1.1377             nan     0.1000   -0.0030
##     60        0.9194             nan     0.1000   -0.0194
##     80        0.7952             nan     0.1000   -0.0026
##    100        0.6842             nan     0.1000   -0.0096
##    120        0.6054             nan     0.1000   -0.0073
##    140        0.5418             nan     0.1000   -0.0104
##    150        0.5191             nan     0.1000   -0.0107
## 
## Iter   TrainDeviance   ValidDeviance   StepSize   Improve
##      1        3.2088             nan     0.1000    0.2772
##      2        2.8605             nan     0.1000    0.2361
##      3        2.6061             nan     0.1000    0.1814
##      4        2.4034             nan     0.1000    0.1942
##      5        2.2347             nan     0.1000    0.1048
##      6        2.0781             nan     0.1000    0.0895
##      7        1.9508             nan     0.1000    0.0618
##      8        1.8374             nan     0.1000    0.0906
##      9        1.7334             nan     0.1000    0.0386
##     10        1.6392             nan     0.1000    0.0615
##     20        1.1852             nan     0.1000    0.0092
##     40        0.8107             nan     0.1000    0.0013
##     60        0.5920             nan     0.1000   -0.0119
##     80        0.4799             nan     0.1000   -0.0135
##    100        0.3815             nan     0.1000   -0.0075
##    120        0.3018             nan     0.1000   -0.0039
##    140        0.2563             nan     0.1000   -0.0048
##    150        0.2309             nan     0.1000   -0.0049
## 
## Iter   TrainDeviance   ValidDeviance   StepSize   Improve
##      1        3.0966             nan     0.1000    0.3499
##      2        2.7860             nan     0.1000    0.1427
##      3        2.4805             nan     0.1000    0.2125
##      4        2.2444             nan     0.1000    0.1296
##      5        2.0859             nan     0.1000    0.1118
##      6        1.9324             nan     0.1000    0.1068
##      7        1.8099             nan     0.1000    0.0673
##      8        1.6856             nan     0.1000    0.0687
##      9        1.5974             nan     0.1000    0.0598
##     10        1.5288             nan     0.1000   -0.0051
##     20        0.9981             nan     0.1000   -0.0001
##     40        0.6249             nan     0.1000   -0.0044
##     60        0.4078             nan     0.1000    0.0042
##     80        0.2918             nan     0.1000   -0.0068
##    100        0.2099             nan     0.1000   -0.0065
##    120        0.1539             nan     0.1000   -0.0030
##    140        0.1080             nan     0.1000   -0.0018
##    150        0.0938             nan     0.1000   -0.0029
## 
## Iter   TrainDeviance   ValidDeviance   StepSize   Improve
##      1        3.3939             nan     0.1000    0.1253
##      2        3.1700             nan     0.1000    0.1702
##      3        2.9609             nan     0.1000    0.2072
##      4        2.7783             nan     0.1000    0.1673
##      5        2.6833             nan     0.1000    0.0924
##      6        2.5641             nan     0.1000    0.1083
##      7        2.4545             nan     0.1000    0.0873
##      8        2.3670             nan     0.1000    0.0796
##      9        2.2802             nan     0.1000    0.0691
##     10        2.1913             nan     0.1000    0.0493
##     20        1.5990             nan     0.1000    0.0013
##     40        1.2301             nan     0.1000   -0.0084
##     60        1.0449             nan     0.1000   -0.0151
##     80        0.9090             nan     0.1000   -0.0153
##    100        0.8147             nan     0.1000   -0.0085
##    120        0.7360             nan     0.1000   -0.0101
##    140        0.6605             nan     0.1000   -0.0045
##    150        0.6270             nan     0.1000   -0.0092
## 
## Iter   TrainDeviance   ValidDeviance   StepSize   Improve
##      1        3.3055             nan     0.1000    0.1804
##      2        3.0742             nan     0.1000    0.2153
##      3        2.8397             nan     0.1000    0.0533
##      4        2.6511             nan     0.1000    0.1294
##      5        2.4771             nan     0.1000    0.1577
##      6        2.2919             nan     0.1000    0.1091
##      7        2.1797             nan     0.1000    0.0461
##      8        2.0580             nan     0.1000    0.1385
##      9        1.9522             nan     0.1000    0.0847
##     10        1.8418             nan     0.1000    0.0648
##     20        1.3026             nan     0.1000   -0.0236
##     40        0.8602             nan     0.1000   -0.0040
##     60        0.6338             nan     0.1000   -0.0084
##     80        0.4922             nan     0.1000   -0.0099
##    100        0.3893             nan     0.1000   -0.0139
##    120        0.3174             nan     0.1000   -0.0068
##    140        0.2582             nan     0.1000   -0.0023
##    150        0.2381             nan     0.1000   -0.0043
## 
## Iter   TrainDeviance   ValidDeviance   StepSize   Improve
##      1        3.2404             nan     0.1000    0.2683
##      2        2.9219             nan     0.1000    0.1437
##      3        2.7397             nan     0.1000    0.1090
##      4        2.5248             nan     0.1000    0.0975
##      5        2.3412             nan     0.1000    0.1149
##      6        2.1574             nan     0.1000    0.1590
##      7        1.9707             nan     0.1000    0.1109
##      8        1.8578             nan     0.1000    0.0540
##      9        1.7613             nan     0.1000    0.0611
##     10        1.6455             nan     0.1000    0.0827
##     20        1.0948             nan     0.1000   -0.0012
##     40        0.6347             nan     0.1000    0.0029
##     60        0.4237             nan     0.1000   -0.0113
##     80        0.3050             nan     0.1000   -0.0087
##    100        0.2139             nan     0.1000   -0.0018
##    120        0.1605             nan     0.1000    0.0010
##    140        0.1197             nan     0.1000   -0.0017
##    150        0.1059             nan     0.1000   -0.0006
## 
## Iter   TrainDeviance   ValidDeviance   StepSize   Improve
##      1        3.0878             nan     0.1000    0.2573
##      2        2.8732             nan     0.1000    0.1887
##      3        2.6707             nan     0.1000    0.1638
##      4        2.5851             nan     0.1000    0.0182
##      5        2.4274             nan     0.1000    0.0895
##      6        2.3548             nan     0.1000    0.0072
##      7        2.2481             nan     0.1000    0.0206
##      8        2.1504             nan     0.1000    0.0623
##      9        2.0593             nan     0.1000    0.0770
##     10        1.9650             nan     0.1000    0.0811
##     20        1.4702             nan     0.1000    0.0245
##     40        1.0871             nan     0.1000    0.0009
##     60        0.9340             nan     0.1000   -0.0021
##     80        0.8167             nan     0.1000   -0.0108
##    100        0.7337             nan     0.1000   -0.0063
##    120        0.6623             nan     0.1000   -0.0036
##    140        0.5936             nan     0.1000   -0.0101
##    150        0.5762             nan     0.1000   -0.0049
## 
## Iter   TrainDeviance   ValidDeviance   StepSize   Improve
##      1        3.0509             nan     0.1000    0.3234
##      2        2.7874             nan     0.1000    0.2373
##      3        2.5478             nan     0.1000    0.2182
##      4        2.3455             nan     0.1000    0.1373
##      5        2.1748             nan     0.1000    0.1387
##      6        2.0200             nan     0.1000    0.1172
##      7        1.9169             nan     0.1000    0.0230
##      8        1.8101             nan     0.1000    0.0518
##      9        1.7317             nan     0.1000    0.0480
##     10        1.6428             nan     0.1000    0.0599
##     20        1.0881             nan     0.1000   -0.0003
##     40        0.7484             nan     0.1000   -0.0047
##     60        0.5815             nan     0.1000   -0.0059
##     80        0.4420             nan     0.1000   -0.0058
##    100        0.3625             nan     0.1000   -0.0054
##    120        0.2957             nan     0.1000   -0.0071
##    140        0.2397             nan     0.1000   -0.0049
##    150        0.2083             nan     0.1000   -0.0029
## 
## Iter   TrainDeviance   ValidDeviance   StepSize   Improve
##      1        3.0408             nan     0.1000    0.3250
##      2        2.7687             nan     0.1000    0.1833
##      3        2.5371             nan     0.1000    0.1882
##      4        2.3214             nan     0.1000    0.1997
##      5        2.1220             nan     0.1000    0.1081
##      6        1.9700             nan     0.1000    0.1268
##      7        1.8344             nan     0.1000    0.0943
##      8        1.7308             nan     0.1000    0.0583
##      9        1.6363             nan     0.1000    0.0296
##     10        1.5397             nan     0.1000    0.0426
##     20        1.0378             nan     0.1000    0.0011
##     40        0.6833             nan     0.1000   -0.0176
##     60        0.4891             nan     0.1000   -0.0019
##     80        0.3641             nan     0.1000   -0.0123
##    100        0.2592             nan     0.1000   -0.0027
##    120        0.1951             nan     0.1000   -0.0022
##    140        0.1461             nan     0.1000   -0.0008
##    150        0.1263             nan     0.1000   -0.0069
## 
## Iter   TrainDeviance   ValidDeviance   StepSize   Improve
##      1        3.1940             nan     0.1000    0.2493
##      2        2.9186             nan     0.1000    0.2428
##      3        2.6753             nan     0.1000    0.1906
##      4        2.5524             nan     0.1000    0.0765
##      5        2.3872             nan     0.1000    0.1115
##      6        2.2623             nan     0.1000    0.0404
##      7        2.1911             nan     0.1000    0.0270
##      8        2.1130             nan     0.1000    0.0319
##      9        2.0353             nan     0.1000    0.0468
##     10        1.9564             nan     0.1000    0.0438
##     20        1.4756             nan     0.1000    0.0129
##     40        1.1048             nan     0.1000   -0.0105
##     60        0.9318             nan     0.1000   -0.0189
##     80        0.8351             nan     0.1000   -0.0100
##    100        0.7439             nan     0.1000   -0.0077
##    120        0.6668             nan     0.1000   -0.0147
##    140        0.6061             nan     0.1000   -0.0032
##    150        0.5705             nan     0.1000   -0.0020
## 
## Iter   TrainDeviance   ValidDeviance   StepSize   Improve
##      1        3.1343             nan     0.1000    0.2460
##      2        2.8287             nan     0.1000    0.2535
##      3        2.5680             nan     0.1000    0.2153
##      4        2.3215             nan     0.1000    0.1847
##      5        2.1597             nan     0.1000    0.1349
##      6        2.0174             nan     0.1000    0.1207
##      7        1.8698             nan     0.1000    0.0996
##      8        1.7463             nan     0.1000    0.0513
##      9        1.7009             nan     0.1000    0.0007
##     10        1.6271             nan     0.1000    0.0404
##     20        1.1406             nan     0.1000    0.0133
##     40        0.7702             nan     0.1000   -0.0187
##     60        0.5810             nan     0.1000   -0.0032
##     80        0.4458             nan     0.1000   -0.0082
##    100        0.3633             nan     0.1000   -0.0115
##    120        0.3089             nan     0.1000   -0.0096
##    140        0.2535             nan     0.1000   -0.0070
##    150        0.2315             nan     0.1000   -0.0024
## 
## Iter   TrainDeviance   ValidDeviance   StepSize   Improve
##      1        3.0384             nan     0.1000    0.3325
##      2        2.7061             nan     0.1000    0.3006
##      3        2.5044             nan     0.1000    0.1658
##      4        2.3244             nan     0.1000    0.1285
##      5        2.1416             nan     0.1000    0.1195
##      6        2.0024             nan     0.1000    0.1598
##      7        1.8339             nan     0.1000    0.0876
##      8        1.7053             nan     0.1000    0.1105
##      9        1.6103             nan     0.1000    0.0471
##     10        1.5287             nan     0.1000   -0.0268
##     20        1.0404             nan     0.1000   -0.0426
##     40        0.6340             nan     0.1000   -0.0189
##     60        0.4512             nan     0.1000   -0.0044
##     80        0.3332             nan     0.1000   -0.0063
##    100        0.2487             nan     0.1000   -0.0049
##    120        0.1838             nan     0.1000   -0.0030
##    140        0.1432             nan     0.1000   -0.0031
##    150        0.1233             nan     0.1000   -0.0025
## 
## Iter   TrainDeviance   ValidDeviance   StepSize   Improve
##      1        3.2509             nan     0.1000    0.2621
##      2        3.0081             nan     0.1000    0.2323
##      3        2.8358             nan     0.1000    0.1686
##      4        2.7520             nan     0.1000    0.0120
##      5        2.6282             nan     0.1000    0.0378
##      6        2.4659             nan     0.1000    0.1057
##      7        2.3609             nan     0.1000    0.0824
##      8        2.2852             nan     0.1000    0.0529
##      9        2.2123             nan     0.1000    0.0529
##     10        2.0995             nan     0.1000    0.0532
##     20        1.4897             nan     0.1000   -0.0083
##     40        1.1020             nan     0.1000   -0.0057
##     60        0.9000             nan     0.1000   -0.0151
##     80        0.7804             nan     0.1000   -0.0124
##    100        0.6858             nan     0.1000    0.0011
##    120        0.6122             nan     0.1000   -0.0105
##    140        0.5377             nan     0.1000   -0.0057
##    150        0.5098             nan     0.1000   -0.0054
## 
## Iter   TrainDeviance   ValidDeviance   StepSize   Improve
##      1        3.1498             nan     0.1000    0.2776
##      2        2.9272             nan     0.1000    0.2039
##      3        2.7146             nan     0.1000    0.1732
##      4        2.5123             nan     0.1000    0.1535
##      5        2.3178             nan     0.1000    0.1668
##      6        2.1497             nan     0.1000    0.1512
##      7        2.0019             nan     0.1000    0.1227
##      8        1.8758             nan     0.1000    0.0542
##      9        1.7723             nan     0.1000    0.0387
##     10        1.6979             nan     0.1000    0.0267
##     20        1.1932             nan     0.1000    0.0044
##     40        0.7772             nan     0.1000   -0.0021
##     60        0.5695             nan     0.1000   -0.0139
##     80        0.4288             nan     0.1000   -0.0098
##    100        0.3375             nan     0.1000   -0.0060
##    120        0.2713             nan     0.1000   -0.0036
##    140        0.2155             nan     0.1000   -0.0031
##    150        0.1909             nan     0.1000   -0.0028
## 
## Iter   TrainDeviance   ValidDeviance   StepSize   Improve
##      1        3.1129             nan     0.1000    0.2846
##      2        2.7973             nan     0.1000    0.2776
##      3        2.5571             nan     0.1000    0.1890
##      4        2.3641             nan     0.1000    0.1193
##      5        2.1826             nan     0.1000    0.0451
##      6        2.0042             nan     0.1000    0.0960
##      7        1.8366             nan     0.1000    0.1239
##      8        1.7113             nan     0.1000    0.1130
##      9        1.6044             nan     0.1000    0.0239
##     10        1.5520             nan     0.1000    0.0059
##     20        0.9965             nan     0.1000   -0.0029
##     40        0.5662             nan     0.1000    0.0046
##     60        0.3995             nan     0.1000   -0.0120
##     80        0.2722             nan     0.1000   -0.0105
##    100        0.2056             nan     0.1000   -0.0050
##    120        0.1600             nan     0.1000   -0.0015
##    140        0.1208             nan     0.1000   -0.0035
##    150        0.1039             nan     0.1000   -0.0018
## 
## Iter   TrainDeviance   ValidDeviance   StepSize   Improve
##      1        2.9349             nan     0.1000    0.2654
##      2        2.7114             nan     0.1000    0.2007
##      3        2.5311             nan     0.1000    0.1110
##      4        2.4018             nan     0.1000    0.1103
##      5        2.2902             nan     0.1000    0.0765
##      6        2.2135             nan     0.1000    0.0576
##      7        2.1151             nan     0.1000    0.0615
##      8        2.0315             nan     0.1000    0.0663
##      9        1.9599             nan     0.1000    0.0289
##     10        1.8692             nan     0.1000    0.0799
##     20        1.4100             nan     0.1000    0.0112
##     40        1.0863             nan     0.1000   -0.0253
##     60        0.9263             nan     0.1000   -0.0087
##     80        0.8131             nan     0.1000   -0.0089
##    100        0.7059             nan     0.1000    0.0013
##    120        0.6283             nan     0.1000   -0.0084
##    140        0.5577             nan     0.1000   -0.0067
##    150        0.5337             nan     0.1000   -0.0069
## 
## Iter   TrainDeviance   ValidDeviance   StepSize   Improve
##      1        2.9651             nan     0.1000    0.2741
##      2        2.6933             nan     0.1000    0.1926
##      3        2.5144             nan     0.1000    0.2230
##      4        2.3121             nan     0.1000    0.1249
##      5        2.1473             nan     0.1000    0.1205
##      6        2.0274             nan     0.1000    0.0380
##      7        1.8837             nan     0.1000    0.0590
##      8        1.7931             nan     0.1000    0.0366
##      9        1.6961             nan     0.1000    0.0720
##     10        1.6016             nan     0.1000    0.0485
##     20        1.1258             nan     0.1000    0.0118
##     40        0.7073             nan     0.1000   -0.0063
##     60        0.5350             nan     0.1000   -0.0018
##     80        0.4347             nan     0.1000   -0.0091
##    100        0.3449             nan     0.1000   -0.0095
##    120        0.2744             nan     0.1000   -0.0051
##    140        0.2285             nan     0.1000   -0.0058
##    150        0.2102             nan     0.1000   -0.0051
## 
## Iter   TrainDeviance   ValidDeviance   StepSize   Improve
##      1        2.8870             nan     0.1000    0.2025
##      2        2.6241             nan     0.1000    0.1567
##      3        2.4157             nan     0.1000    0.1677
##      4        2.2322             nan     0.1000    0.1014
##      5        2.0756             nan     0.1000    0.1004
##      6        1.9119             nan     0.1000    0.1376
##      7        1.7844             nan     0.1000    0.0646
##      8        1.6503             nan     0.1000    0.0704
##      9        1.5507             nan     0.1000    0.0498
##     10        1.4522             nan     0.1000    0.0313
##     20        0.9818             nan     0.1000    0.0096
##     40        0.5884             nan     0.1000   -0.0083
##     60        0.3878             nan     0.1000   -0.0063
##     80        0.2858             nan     0.1000   -0.0030
##    100        0.2000             nan     0.1000   -0.0026
##    120        0.1545             nan     0.1000   -0.0024
##    140        0.1104             nan     0.1000   -0.0015
##    150        0.0943             nan     0.1000   -0.0007
## 
## Iter   TrainDeviance   ValidDeviance   StepSize   Improve
##      1        3.2447             nan     0.1000    0.2693
##      2        3.0896             nan     0.1000    0.1353
##      3        2.9467             nan     0.1000    0.0891
##      4        2.7734             nan     0.1000    0.1539
##      5        2.6131             nan     0.1000    0.1575
##      6        2.4830             nan     0.1000    0.0810
##      7        2.3808             nan     0.1000    0.0524
##      8        2.3316             nan     0.1000    0.0059
##      9        2.2333             nan     0.1000    0.1110
##     10        2.1429             nan     0.1000    0.0660
##     20        1.5656             nan     0.1000    0.0209
##     40        1.1464             nan     0.1000   -0.0059
##     60        0.9589             nan     0.1000   -0.0228
##     80        0.8472             nan     0.1000   -0.0126
##    100        0.7505             nan     0.1000   -0.0035
##    120        0.6791             nan     0.1000   -0.0069
##    140        0.6167             nan     0.1000   -0.0109
##    150        0.5915             nan     0.1000   -0.0054
## 
## Iter   TrainDeviance   ValidDeviance   StepSize   Improve
##      1        3.1674             nan     0.1000    0.2835
##      2        2.8732             nan     0.1000    0.1982
##      3        2.6685             nan     0.1000    0.1648
##      4        2.4178             nan     0.1000    0.1718
##      5        2.2805             nan     0.1000    0.0806
##      6        2.1559             nan     0.1000    0.0910
##      7        2.0031             nan     0.1000    0.1108
##      8        1.8876             nan     0.1000    0.0831
##      9        1.8174             nan     0.1000   -0.0027
##     10        1.7221             nan     0.1000    0.0572
##     20        1.2224             nan     0.1000   -0.0100
##     40        0.8313             nan     0.1000   -0.0037
##     60        0.6181             nan     0.1000    0.0013
##     80        0.4924             nan     0.1000   -0.0061
##    100        0.3885             nan     0.1000   -0.0030
##    120        0.3240             nan     0.1000   -0.0092
##    140        0.2669             nan     0.1000   -0.0042
##    150        0.2419             nan     0.1000   -0.0044
## 
## Iter   TrainDeviance   ValidDeviance   StepSize   Improve
##      1        3.2040             nan     0.1000    0.2740
##      2        2.8997             nan     0.1000    0.2468
##      3        2.6462             nan     0.1000    0.2165
##      4        2.4054             nan     0.1000    0.1487
##      5        2.1901             nan     0.1000    0.1789
##      6        2.0096             nan     0.1000    0.1390
##      7        1.8845             nan     0.1000    0.0759
##      8        1.7531             nan     0.1000    0.1094
##      9        1.6439             nan     0.1000    0.0835
##     10        1.5480             nan     0.1000    0.0488
##     20        1.0463             nan     0.1000   -0.0019
##     40        0.6299             nan     0.1000   -0.0092
##     60        0.3979             nan     0.1000   -0.0008
##     80        0.2961             nan     0.1000   -0.0097
##    100        0.2170             nan     0.1000   -0.0028
##    120        0.1567             nan     0.1000   -0.0050
##    140        0.1195             nan     0.1000   -0.0019
##    150        0.1081             nan     0.1000   -0.0032
## 
## Iter   TrainDeviance   ValidDeviance   StepSize   Improve
##      1        3.1800             nan     0.1000    0.2817
##      2        3.0148             nan     0.1000    0.1268
##      3        2.8448             nan     0.1000    0.1341
##      4        2.6652             nan     0.1000    0.1909
##      5        2.5227             nan     0.1000    0.1022
##      6        2.3643             nan     0.1000    0.1306
##      7        2.2510             nan     0.1000    0.1224
##      8        2.1567             nan     0.1000    0.0429
##      9        2.0536             nan     0.1000    0.0814
##     10        1.9860             nan     0.1000    0.0357
##     20        1.4183             nan     0.1000    0.0134
##     40        1.0527             nan     0.1000    0.0089
##     60        0.8853             nan     0.1000   -0.0161
##     80        0.7728             nan     0.1000   -0.0087
##    100        0.6697             nan     0.1000   -0.0111
##    120        0.5958             nan     0.1000   -0.0066
##    140        0.5440             nan     0.1000   -0.0062
##    150        0.5170             nan     0.1000   -0.0056
## 
## Iter   TrainDeviance   ValidDeviance   StepSize   Improve
##      1        3.1045             nan     0.1000    0.2444
##      2        2.8269             nan     0.1000    0.2132
##      3        2.5780             nan     0.1000    0.1664
##      4        2.4056             nan     0.1000    0.0273
##      5        2.2273             nan     0.1000    0.1188
##      6        2.0658             nan     0.1000    0.0947
##      7        1.9495             nan     0.1000    0.0904
##      8        1.8439             nan     0.1000    0.0802
##      9        1.7479             nan     0.1000    0.0687
##     10        1.6379             nan     0.1000    0.0587
##     20        1.0993             nan     0.1000    0.0155
##     40        0.7115             nan     0.1000   -0.0056
##     60        0.5494             nan     0.1000   -0.0143
##     80        0.4368             nan     0.1000   -0.0117
##    100        0.3350             nan     0.1000   -0.0038
##    120        0.2683             nan     0.1000   -0.0025
##    140        0.2254             nan     0.1000   -0.0009
##    150        0.2023             nan     0.1000   -0.0051
## 
## Iter   TrainDeviance   ValidDeviance   StepSize   Improve
##      1        3.0565             nan     0.1000    0.3420
##      2        2.7114             nan     0.1000    0.2450
##      3        2.4318             nan     0.1000    0.2062
##      4        2.2297             nan     0.1000    0.1658
##      5        2.0361             nan     0.1000    0.1673
##      6        1.9278             nan     0.1000    0.0584
##      7        1.7648             nan     0.1000    0.1215
##      8        1.6363             nan     0.1000    0.0607
##      9        1.5150             nan     0.1000    0.0898
##     10        1.4333             nan     0.1000    0.0574
##     20        0.9514             nan     0.1000    0.0088
##     40        0.5548             nan     0.1000   -0.0167
##     60        0.3744             nan     0.1000   -0.0093
##     80        0.2667             nan     0.1000   -0.0115
##    100        0.1987             nan     0.1000   -0.0040
##    120        0.1491             nan     0.1000   -0.0023
##    140        0.1107             nan     0.1000   -0.0031
##    150        0.0972             nan     0.1000   -0.0031
## 
## Iter   TrainDeviance   ValidDeviance   StepSize   Improve
##      1        3.2166             nan     0.1000    0.2625
##      2        2.9871             nan     0.1000    0.1549
##      3        2.7959             nan     0.1000    0.1272
##      4        2.6109             nan     0.1000    0.1674
##      5        2.5064             nan     0.1000    0.0788
##      6        2.4084             nan     0.1000    0.0818
##      7        2.2956             nan     0.1000    0.0750
##      8        2.1618             nan     0.1000    0.1037
##      9        2.0785             nan     0.1000    0.0804
##     10        1.9688             nan     0.1000    0.0821
##     20        1.4437             nan     0.1000    0.0228
##     40        1.0444             nan     0.1000   -0.0037
##     60        0.8923             nan     0.1000   -0.0199
##     80        0.7807             nan     0.1000   -0.0062
##    100        0.6992             nan     0.1000    0.0019
##    120        0.6130             nan     0.1000   -0.0011
##    140        0.5585             nan     0.1000   -0.0146
##    150        0.5301             nan     0.1000   -0.0002
## 
## Iter   TrainDeviance   ValidDeviance   StepSize   Improve
##      1        3.1066             nan     0.1000    0.2341
##      2        2.8322             nan     0.1000    0.2454
##      3        2.5606             nan     0.1000    0.2245
##      4        2.3528             nan     0.1000    0.1627
##      5        2.1912             nan     0.1000    0.0882
##      6        2.0415             nan     0.1000    0.1350
##      7        1.9718             nan     0.1000    0.0374
##      8        1.8066             nan     0.1000    0.1105
##      9        1.7127             nan     0.1000    0.0868
##     10        1.6170             nan     0.1000    0.0371
##     20        1.1150             nan     0.1000    0.0022
##     40        0.7238             nan     0.1000    0.0007
##     60        0.5171             nan     0.1000   -0.0100
##     80        0.4118             nan     0.1000   -0.0090
##    100        0.3230             nan     0.1000   -0.0060
##    120        0.2488             nan     0.1000   -0.0039
##    140        0.2099             nan     0.1000   -0.0008
##    150        0.1938             nan     0.1000   -0.0076
## 
## Iter   TrainDeviance   ValidDeviance   StepSize   Improve
##      1        3.0903             nan     0.1000    0.2570
##      2        2.8094             nan     0.1000    0.2924
##      3        2.5365             nan     0.1000    0.2088
##      4        2.3377             nan     0.1000    0.1149
##      5        2.1046             nan     0.1000    0.1703
##      6        1.9142             nan     0.1000    0.1420
##      7        1.7854             nan     0.1000    0.0839
##      8        1.6567             nan     0.1000    0.1005
##      9        1.5801             nan     0.1000    0.0258
##     10        1.4885             nan     0.1000   -0.0104
##     20        0.9573             nan     0.1000   -0.0067
##     40        0.5561             nan     0.1000    0.0030
##     60        0.3432             nan     0.1000   -0.0059
##     80        0.2371             nan     0.1000   -0.0041
##    100        0.1644             nan     0.1000   -0.0039
##    120        0.1146             nan     0.1000   -0.0014
##    140        0.0849             nan     0.1000   -0.0017
##    150        0.0756             nan     0.1000   -0.0013
## 
## Iter   TrainDeviance   ValidDeviance   StepSize   Improve
##      1        3.1993             nan     0.1000    0.2778
##      2        3.0676             nan     0.1000    0.1079
##      3        2.9039             nan     0.1000    0.1722
##      4        2.7169             nan     0.1000    0.1749
##      5        2.5880             nan     0.1000    0.0803
##      6        2.4285             nan     0.1000    0.1218
##      7        2.3383             nan     0.1000    0.0040
##      8        2.2431             nan     0.1000    0.0464
##      9        2.1907             nan     0.1000    0.0251
##     10        2.0846             nan     0.1000    0.0957
##     20        1.5282             nan     0.1000    0.0084
##     40        1.1333             nan     0.1000   -0.0255
##     60        0.9443             nan     0.1000   -0.0020
##     80        0.8049             nan     0.1000   -0.0098
##    100        0.7214             nan     0.1000   -0.0085
## 
##      RMSE  Rsquared       MAE 
## 1.1121949 0.6028262 0.8663108

Model4: Cubist

perfcubist = model.eval('cubist')
##      RMSE  Rsquared       MAE 
## 0.8847581 0.7565752 0.6794279

Model Comparison

df.perf = rbind(data.frame(Name = 'SimpleRegressionTree', RMSE = round(perftree[1],4)), 
                data.frame(Name= 'RandomForest', RMSE = round(perfrf[1],4)), 
                data.frame(Name = 'BoostingTree', RMSE = round(perfgbm[1],4)), 
                data.frame(Name = 'Cubist', RMSE = round(perfcubist[1],4)))

ggplot(data = df.perf, aes(x = Name, y = RMSE, fill=Name)) +
  geom_bar(stat="identity", position=position_dodge()) +
  geom_text(aes(label=RMSE), vjust=1, color="white",
            position = position_dodge(0.9), size=3.5)

From the above model performance chart, we can see the Cubist model gives the lowest RMSE on test set. Cubist is the most optimal model for this dataset.

  1. Which predictors are most important in the optimal tree-based regression model? Do either the biological or process variables dominate the list? How do the top 10 important predictors compare to the top 10 predictors from the optimal linear and nonlinear models?
cModel <- train(x = df.train.x,
                     y = df.train.y,
                     method = 'cubist')
vip(cModel, color = 'red', fill='dodgerblue4')
## Warning in vip.default(cModel, color = "red", fill = "dodgerblue4"): Arguments
## `width`, `alpha`, `color`, `fill`, `size`, and `shape` have all been deprecated
## in favor of the new `mapping` and `aesthetics` arguments. They will be removed
## in version 0.3.0.

We can see that manufacturing process variables dominate the list of important variables which is in parity with optimal list of variables from linear and non-linear models.

  1. Plot the optimal single tree with the distribution of yield in the terminal nodes. Does this view of the data provide additional knowledge about the biological or process predictors and their relationship with yield?
multi.class.model  = rpart(Yield~., data=train_df)
rpart.plot(multi.class.model)

From the above tree plot, we can clearly see that high values of manufacturing process vairaibles contributes to higher Yield.