library(readxl)
library(caret)
library(tidyverse)
library(VIM)
library(ggplot2)
library(GGally)
library(car)
library(glmnet)
Below we import the excel data from Github
stu_data_url <- 'https://github.com/amedina613/Data624-Project2/raw/refs/heads/main/StudentData.xlsx'
dest_file_stu_data <- 'StudentData.xlsx'
stu_eval_url <- 'https://github.com/amedina613/Data624-Project2/raw/refs/heads/main/StudentEvaluation.xlsx'
dest_file_stu_eval <- 'StudentEvaluation.xlsx'
download.file(stu_data_url, dest_file_stu_data, mode = 'wb')
download.file(stu_eval_url, dest_file_stu_eval, mode = 'wb')
stu_data_raw <- read_excel('StudentData.xlsx',col_names = T)
stu_eval_raw <- read_excel('StudentEvaluation.xlsx',col_names = T)
stu_data_raw <- as.data.frame(stu_data_raw)
stu_eval_raw <- as.data.frame(stu_eval_raw)
First, we check the structure of the data set we will use to train and test the model.
## 'data.frame': 2571 obs. of 33 variables:
## $ Brand Code : chr "B" "A" "B" "A" ...
## $ Carb Volume : num 5.34 5.43 5.29 5.44 5.49 ...
## $ Fill Ounces : num 24 24 24.1 24 24.3 ...
## $ PC Volume : num 0.263 0.239 0.263 0.293 0.111 ...
## $ Carb Pressure : num 68.2 68.4 70.8 63 67.2 66.6 64.2 67.6 64.2 72 ...
## $ Carb Temp : num 141 140 145 133 137 ...
## $ PSC : num 0.104 0.124 0.09 NA 0.026 0.09 0.128 0.154 0.132 0.014 ...
## $ PSC Fill : num 0.26 0.22 0.34 0.42 0.16 ...
## $ PSC CO2 : num 0.04 0.04 0.16 0.04 0.12 ...
## $ Mnf Flow : num -100 -100 -100 -100 -100 -100 -100 -100 -100 -100 ...
## $ Carb Pressure1 : num 119 122 120 115 118 ...
## $ Fill Pressure : num 46 46 46 46.4 45.8 45.6 51.8 46.8 46 45.2 ...
## $ Hyd Pressure1 : num 0 0 0 0 0 0 0 0 0 0 ...
## $ Hyd Pressure2 : num NA NA NA 0 0 0 0 0 0 0 ...
## $ Hyd Pressure3 : num NA NA NA 0 0 0 0 0 0 0 ...
## $ Hyd Pressure4 : num 118 106 82 92 92 116 124 132 90 108 ...
## $ Filler Level : num 121 119 120 118 119 ...
## $ Filler Speed : num 4002 3986 4020 4012 4010 ...
## $ Temperature : num 66 67.6 67 65.6 65.6 66.2 65.8 65.2 65.4 66.6 ...
## $ Usage cont : num 16.2 19.9 17.8 17.4 17.7 ...
## $ Carb Flow : num 2932 3144 2914 3062 3054 ...
## $ Density : num 0.88 0.92 1.58 1.54 1.54 1.52 0.84 0.84 0.9 0.9 ...
## $ MFR : num 725 727 735 731 723 ...
## $ Balling : num 1.4 1.5 3.14 3.04 3.04 ...
## $ Pressure Vacuum : num -4 -4 -3.8 -4.4 -4.4 -4.4 -4.4 -4.4 -4.4 -4.4 ...
## $ PH : num 8.36 8.26 8.94 8.24 8.26 8.32 8.4 8.38 8.38 8.5 ...
## $ Oxygen Filler : num 0.022 0.026 0.024 0.03 0.03 0.024 0.066 0.046 0.064 0.022 ...
## $ Bowl Setpoint : num 120 120 120 120 120 120 120 120 120 120 ...
## $ Pressure Setpoint: num 46.4 46.8 46.6 46 46 46 46 46 46 46 ...
## $ Air Pressurer : num 143 143 142 146 146 ...
## $ Alch Rel : num 6.58 6.56 7.66 7.14 7.14 7.16 6.54 6.52 6.52 6.54 ...
## $ Carb Rel : num 5.32 5.3 5.84 5.42 5.44 5.44 5.38 5.34 5.34 5.34 ...
## $ Balling Lvl : num 1.48 1.56 3.28 3.04 3.04 3.02 1.44 1.44 1.44 1.38 ...
Next, we check the amount of missing values in each column.
## Brand Code Carb Volume Fill Ounces PC Volume
## 120 10 38 39
## Carb Pressure Carb Temp PSC PSC Fill
## 27 26 33 23
## PSC CO2 Mnf Flow Carb Pressure1 Fill Pressure
## 39 2 32 22
## Hyd Pressure1 Hyd Pressure2 Hyd Pressure3 Hyd Pressure4
## 11 15 15 30
## Filler Level Filler Speed Temperature Usage cont
## 20 57 14 5
## Carb Flow Density MFR Balling
## 2 1 212 1
## Pressure Vacuum PH Oxygen Filler Bowl Setpoint
## 0 4 12 2
## Pressure Setpoint Air Pressurer Alch Rel Carb Rel
## 12 0 9 10
## Balling Lvl
## 1
We also visualize missing data:
The Brand Code column is a categorical variable. We will have to convert it to dummy variables to perform knn imputation.
First, we convert the column to a factor.
Next, we convert the Brand Code to dummy variables:
dummy_variables <- dummyVars(~., data = stu_data_raw, fullRank = T)
training_data <- predict(dummy_variables, newdata = stu_data_raw)
## `Brand Code`B `Brand Code`C `Brand Code`D `Carb Volume` `Fill Ounces`
## 1 1 0 0 5.340000 23.96667
## 2 0 0 0 5.426667 24.00667
## 3 1 0 0 5.286667 24.06000
## 4 0 0 0 5.440000 24.00667
## 5 0 0 0 5.486667 24.31333
## 6 0 0 0 5.380000 23.92667
## `PC Volume` `Carb Pressure` `Carb Temp` PSC `PSC Fill` `PSC CO2` `Mnf Flow`
## 1 0.2633333 68.2 141.2 0.104 0.26 0.04 -100
## 2 0.2386667 68.4 139.6 0.124 0.22 0.04 -100
## 3 0.2633333 70.8 144.8 0.090 0.34 0.16 -100
## 4 0.2933333 63.0 132.6 NA 0.42 0.04 -100
## 5 0.1113333 67.2 136.8 0.026 0.16 0.12 -100
## 6 0.2693333 66.6 138.4 0.090 0.24 0.04 -100
## `Carb Pressure1` `Fill Pressure` `Hyd Pressure1` `Hyd Pressure2`
## 1 118.8 46.0 0 NA
## 2 121.6 46.0 0 NA
## 3 120.2 46.0 0 NA
## 4 115.2 46.4 0 0
## 5 118.4 45.8 0 0
## 6 119.6 45.6 0 0
## `Hyd Pressure3` `Hyd Pressure4` `Filler Level` `Filler Speed` Temperature
## 1 NA 118 121.2 4002 66.0
## 2 NA 106 118.6 3986 67.6
## 3 NA 82 120.0 4020 67.0
## 4 0 92 117.8 4012 65.6
## 5 0 92 118.6 4010 65.6
## 6 0 116 120.2 4014 66.2
## `Usage cont` `Carb Flow` Density MFR Balling `Pressure Vacuum` PH
## 1 16.18 2932 0.88 725.0 1.398 -4.0 8.36
## 2 19.90 3144 0.92 726.8 1.498 -4.0 8.26
## 3 17.76 2914 1.58 735.0 3.142 -3.8 8.94
## 4 17.42 3062 1.54 730.6 3.042 -4.4 8.24
## 5 17.68 3054 1.54 722.8 3.042 -4.4 8.26
## 6 23.82 2948 1.52 738.8 2.992 -4.4 8.32
## `Oxygen Filler` `Bowl Setpoint` `Pressure Setpoint` `Air Pressurer`
## 1 0.022 120 46.4 142.6
## 2 0.026 120 46.8 143.0
## 3 0.024 120 46.6 142.0
## 4 0.030 120 46.0 146.2
## 5 0.030 120 46.0 146.2
## 6 0.024 120 46.0 146.6
## `Alch Rel` `Carb Rel` `Balling Lvl`
## 1 6.58 5.32 1.48
## 2 6.56 5.30 1.56
## 3 7.66 5.84 3.28
## 4 7.14 5.42 3.04
## 5 7.14 5.44 3.04
## 6 7.16 5.44 3.02
We now have three dummy variable columns: Brand Code B, Brand Code C, and Brand Code D with zeros and ones. If all three columns have zero, then it means the Brand is A.
Next, we center and scale the data and perform KNN imputation for missing values.
preProcess_model <- preProcess(training_data, method = c("center", "scale", "knnImpute"))
training_data_imputed <- predict(preProcess_model, training_data)
We now have zero missing values:
## `Brand Code`B `Brand Code`C `Brand Code`D `Carb Volume`
## 0 0 0 0
## `Fill Ounces` `PC Volume` `Carb Pressure` `Carb Temp`
## 0 0 0 0
## PSC `PSC Fill` `PSC CO2` `Mnf Flow`
## 0 0 0 0
## `Carb Pressure1` `Fill Pressure` `Hyd Pressure1` `Hyd Pressure2`
## 0 0 0 0
## `Hyd Pressure3` `Hyd Pressure4` `Filler Level` `Filler Speed`
## 0 0 0 0
## Temperature `Usage cont` `Carb Flow` Density
## 0 0 0 0
## MFR Balling `Pressure Vacuum` PH
## 0 0 0 0
## `Oxygen Filler` `Bowl Setpoint` `Pressure Setpoint` `Air Pressurer`
## 0 0 0 0
## `Alch Rel` `Carb Rel` `Balling Lvl`
## 0 0 0
Near zero variance predictors are identified and removed from the imputed numeric data set. These predictors have little variability and do not contribute meaningfully to analysis or modeling.
There is only one variable with near zero variance:
## [1] "`Hyd Pressure1`"
Our working training data-set is training_data_imputed
## `Brand Code`B `Brand Code`C `Brand Code`D `Carb Volume` `Fill Ounces`
## 1 0.9888423 -0.3762116 -0.5786455 -0.28385369 -0.09240162
## 2 -1.0108710 -0.3762116 -0.5786455 0.53079575 0.36458499
## 3 0.9888423 -0.3762116 -0.5786455 -0.78517642 0.97390047
## 4 -1.0108710 -0.3762116 -0.5786455 0.65612644 0.36458499
## 5 -1.0108710 -0.3762116 -0.5786455 1.09478383 3.86814899
## 6 -1.0108710 -0.3762116 -0.5786455 0.09213836 -0.54938823
## `PC Volume` `Carb Pressure` `Carb Temp` PSC `PSC Fill` `PSC CO2`
## 1 -0.2271248 0.002946277 0.0260259 0.3942913 0.5487360 -0.3813757
## 2 -0.6335262 0.059472131 -0.3702701 0.8002265 0.2091248 -0.3813757
## 3 -0.2271248 0.737782381 0.9176919 0.1101367 1.2279585 2.4068150
## 4 0.2671472 -1.466725932 -2.1040652 0.2481547 1.9071809 -0.3813757
## 5 -2.7314362 -0.279682994 -1.0637881 -1.1888558 -0.3002920 1.4774181
## 6 -0.1282704 -0.449260557 -0.6674921 0.1101367 0.3789304 -0.3813757
## `Mnf Flow` `Carb Pressure1` `Fill Pressure` `Hyd Pressure2` `Hyd Pressure3`
## 1 -1.042583 -0.7983274 -0.6049215 -1.27918 -1.280596
## 2 -1.042583 -0.2079690 -0.6049215 -1.27918 -1.280596
## 3 -1.042583 -0.5031482 -0.6049215 -1.27918 -1.280596
## 4 -1.042583 -1.5573596 -0.4790381 -1.27918 -1.280596
## 5 -1.042583 -0.8826643 -0.6678631 -1.27918 -1.280596
## 6 -1.042583 -0.6296536 -0.7308048 -1.27918 -1.280596
## `Hyd Pressure4` `Filler Level` `Filler Speed` Temperature `Usage cont`
## 1 1.6544895 0.7610718 0.4083977 0.02347442 -1.6162070
## 2 0.7400338 0.5954501 0.3876406 1.18056518 -0.3670199
## 3 -1.0888777 0.6846310 0.4317494 0.74665614 -1.0856383
## 4 -0.3268313 0.5444896 0.4213709 -0.26579827 -1.1998113
## 5 -0.3268313 0.5954501 0.4187762 -0.26579827 -1.1125025
## 6 1.5020803 0.6973712 0.4239655 0.16811076 0.9493279
## `Carb Flow` Density MFR Balling `Pressure Vacuum` PH
## 1 0.4318220 -0.7778248 0.2835077 -0.8589593 2.133539 -1.076123
## 2 0.6292707 -0.6718721 0.3078655 -0.7515585 2.133539 -1.655778
## 3 0.4150575 1.0763476 0.4188288 1.0141113 2.484420 2.285880
## 4 0.5528991 0.9703949 0.3592875 0.9067105 1.431776 -1.771709
## 5 0.5454482 0.9703949 0.2537371 0.9067105 1.431776 -1.655778
## 6 0.4467238 0.9174185 0.4702508 0.8530101 1.431776 -1.307985
## `Oxygen Filler` `Bowl Setpoint` `Pressure Setpoint` `Air Pressurer`
## 1 -0.5326049 0.697465 -0.596061 -0.1930780
## 2 -0.4468482 0.697465 -0.399891 0.1369776
## 3 -0.4897265 0.697465 -0.497976 -0.6881615
## 4 -0.3610914 0.697465 -0.792231 2.7774225
## 5 -0.3610914 0.697465 -0.792231 2.7774225
## 6 -0.4897265 0.697465 -0.792231 3.1074781
## `Alch Rel` `Carb Rel` `Balling Lvl`
## 1 -0.6282042 -0.9072722 -0.6549488
## 2 -0.6677866 -1.0626503 -0.5630274
## 3 1.5092444 3.1325583 1.4132824
## 4 0.4801025 -0.1303817 1.1375182
## 5 0.4801025 0.0249964 1.1375182
## 6 0.5196849 0.0249964 1.1145379
Before further analysis continues, the final data set should be
The data is split into 80% training and 20% testing to train and validate models.
set.seed(321)
index <- createDataPartition(training_data_imputed$PH, p = 0.8, list = FALSE)
train_x <- training_data_imputed[index, -which(names(training_data_imputed) == "PH")]
train_y <- training_data_imputed[index, "PH"]
test_x <- training_data_imputed[-index, -which(names(training_data_imputed) == "PH")]
test_y <- training_data_imputed[-index, "PH"]
We evaluate the relationships between pH (our target) and all available predictors to gain a preliminary understanding of which factors might drive our outcome. Using correlation and visualization we identify the variables are most strongly associated with pH.
The table of correlations ranks predictors based on their correlation with the target variable, pH. Mnf.Flow shows the strongest relationship with pH (negative correlation of -0.445), Bowl.Setpoint (positive correlation of 0.346) is has the strongest positive correlation and may indicate a role in influencing pH – although these correlations are moderate at best.
# Compute correlations between 'PH' and all other predictors
correlation_values <- training_data_imputed %>%
summarise(across(.cols = everything(),
.fns = ~ cor(., training_data_imputed$PH, use = "complete.obs"),
.names = "cor_{col}")) %>%
pivot_longer(cols = everything(), names_to = "Predictor", values_to = "Correlation") %>%
mutate(Predictor = gsub("cor_", "", Predictor)) %>%
filter(Predictor != "PH") %>%
arrange(desc(abs(Correlation)))
# Output sorted list of correlations
print(correlation_values)
## # A tibble: 33 × 2
## Predictor Correlation
## <chr> <dbl>
## 1 `Mnf Flow` -0.447
## 2 `Bowl Setpoint` 0.348
## 3 `Filler Level` 0.324
## 4 `Usage cont` -0.318
## 5 `Pressure Setpoint` -0.307
## 6 `Brand Code`C -0.296
## 7 `Hyd Pressure3` -0.240
## 8 `Pressure Vacuum` 0.220
## 9 `Fill Pressure` -0.215
## 10 `Hyd Pressure2` -0.201
## # ℹ 23 more rows
# Create scatterplot matrix for PH against other predictors
correlation_matrix <- ggpairs(
data = training_data_imputed,
columns = which(names(training_data_imputed) == "PH"):ncol(training_data_imputed),
upper = list(continuous = wrap("cor", size = 3)),
title = "Correlation Scatterplot Matrix for PH") +
theme(
axis.text.x = element_text(size = 7),
axis.text.y = element_text(size = 7),
strip.text = element_text(size = 7),
plot.title = element_text(size = 12))
# Display the scatterplot matrix
print(correlation_matrix)
We expand from a single predictor to a full multiple linear regression model that incorporates all potential factors. We examine model summaries and diagnostic plots to assess fit and identify potential issues like non-linearity or heteroskedasticity.
The multiple linear regression model results an adjusted R-squared value of 0.4398; the model explains approximately 44% of the variance in pH. Significant predictors include Mnf.Flow (negative association), Carb.Pressure1 (positive association), Hyd.Pressure3, Temperature, Usage.cont, Balling and others as indicated by extremely low p values, Several predictors, such as PC Volume and Hyd.Pressure4, are not significant and may add unnecessary complexity to the model.
The Residuals vs Fitted plot shows a reasonably random spread, suggesting linearity and homoscedasticity. The Q-Q plot suggests most residuals are normally distributed, with some deviation in the tails. The Scale-Location plot supports a relatively even variance across fitted values. The Residuals vs Leverage plot highlights a few influential points (e.g., observation 172), which may warrant further treatment.
##
## Call:
## lm(formula = PH ~ ., data = stu_data_raw)
##
## Residuals:
## Min 1Q Median 3Q Max
## -0.55282 -0.07123 0.00900 0.08493 0.45163
##
## Coefficients:
## Estimate Std. Error t value Pr(>|t|)
## (Intercept) 1.293e+01 1.322e+00 9.779 < 2e-16 ***
## `Brand Code`B 1.353e-01 3.183e-02 4.251 2.23e-05 ***
## `Brand Code`C -2.015e-02 3.081e-02 -0.654 0.513172
## `Brand Code`D 9.745e-02 2.452e-02 3.975 7.30e-05 ***
## `Carb Volume` -2.362e-01 1.169e-01 -2.020 0.043520 *
## `Fill Ounces` -1.216e-01 3.650e-02 -3.332 0.000878 ***
## `PC Volume` -1.918e-03 6.594e-02 -0.029 0.976798
## `Carb Pressure` 8.150e-03 5.422e-03 1.503 0.132956
## `Carb Temp` -5.736e-03 4.255e-03 -1.348 0.177830
## PSC -2.510e-02 6.347e-02 -0.396 0.692489
## `PSC Fill` -2.619e-02 2.518e-02 -1.040 0.298536
## `PSC CO2` -1.071e-01 7.058e-02 -1.517 0.129485
## `Mnf Flow` -6.677e-04 5.217e-05 -12.798 < 2e-16 ***
## `Carb Pressure1` 5.990e-03 7.786e-04 7.694 2.22e-14 ***
## `Fill Pressure` -3.492e-03 1.951e-03 -1.790 0.073634 .
## `Hyd Pressure1` 7.618e-05 4.038e-04 0.189 0.850355
## `Hyd Pressure2` -1.878e-03 5.794e-04 -3.241 0.001211 **
## `Hyd Pressure3` 3.842e-03 6.526e-04 5.887 4.60e-09 ***
## `Hyd Pressure4` 9.140e-05 4.746e-04 0.193 0.847307
## `Filler Level` -7.162e-04 9.106e-04 -0.786 0.431669
## `Filler Speed` 1.603e-05 2.728e-05 0.587 0.556970
## Temperature -1.960e-02 3.345e-03 -5.860 5.40e-09 ***
## `Usage cont` -7.488e-03 1.299e-03 -5.763 9.55e-09 ***
## `Carb Flow` 1.533e-05 4.807e-06 3.189 0.001448 **
## Density -9.824e-02 3.081e-02 -3.188 0.001453 **
## MFR -9.983e-05 1.480e-04 -0.675 0.499945
## Balling -2.204e-01 4.009e-02 -5.499 4.31e-08 ***
## `Pressure Vacuum` -5.507e-02 1.029e-02 -5.352 9.68e-08 ***
## `Oxygen Filler` -4.054e-01 8.596e-02 -4.716 2.57e-06 ***
## `Bowl Setpoint` 2.752e-03 9.402e-04 2.927 0.003457 **
## `Pressure Setpoint` -3.425e-03 2.629e-03 -1.303 0.192813
## `Air Pressurer` -1.610e-03 2.540e-03 -0.634 0.526213
## `Alch Rel` -1.624e-02 4.056e-02 -0.400 0.688945
## `Carb Rel` 1.468e-01 6.296e-02 2.332 0.019821 *
## `Balling Lvl` 3.025e-01 4.713e-02 6.419 1.70e-10 ***
## ---
## Signif. codes: 0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
##
## Residual standard error: 0.1278 on 2003 degrees of freedom
## (533 observations deleted due to missingness)
## Multiple R-squared: 0.4492, Adjusted R-squared: 0.4398
## F-statistic: 48.04 on 34 and 2003 DF, p-value: < 2.2e-16
Due to the potential issues introduced by multiple predictors, we use the Variance Inflation Factor to detect and remove variables that cause multicollinearity. After pruning these collinear variables, we refit the model. By mitigating multicollinearity, we ensure that each predictor contributes distinct information about pH. After removing predictors with high multicollinearity, the remaining predictors have low VIF values - all less than 5.
This refined model has an adjusted R-squared of 0.2285,lower than the full MLR model. The results suggest that some variables remain non-significant and could potentially be excluded but the drop in R Square value indicate the the excluded predictors offer explanatory power of the variance.
The diagnostic plots show similar trends to the previous MLR model, with the Residuals vs Fitted plot displaying a random spread, suggesting linearity. The Q-Q plot indicates a relatively normal distribution of residuals with some deviations in the tails. The Scale-Location plot suggests variance homogeneity. Residuals vs Leverage plot identifies a few influential points (e.g., observation 1094), which may need attention.
# Compute VIF values
vif_df <- as.data.frame(vif(model))
names(vif_df) <- "VIF"
vif_df$Predictor <- rownames(vif_df)
# Filter predictors with VIF <= 5
predictors_to_keep <- vif_df$Predictor[vif_df$VIF <= 5]
# Create formula with the filtered predictors
formula <- as.formula(paste("PH ~", paste(predictors_to_keep, collapse = " + ")))
# Refit model using selected predictors
final_model <- lm(formula, data = stu_data_raw)
# Compute VIF values for the final model
final_vif_df <- as.data.frame(vif(final_model))
names(final_vif_df) <- "VIF"
final_vif_df$Predictor <- rownames(final_vif_df)
# Display VIF values for kept predictors
print(final_vif_df)
## VIF Predictor
## `Fill Ounces` 1.057565 `Fill Ounces`
## `PC Volume` 1.459941 `PC Volume`
## PSC 1.125810 PSC
## `PSC Fill` 1.098176 `PSC Fill`
## `PSC CO2` 1.052370 `PSC CO2`
## `Carb Pressure1` 1.371328 `Carb Pressure1`
## `Fill Pressure` 2.165508 `Fill Pressure`
## `Hyd Pressure1` 1.317134 `Hyd Pressure1`
## `Hyd Pressure4` 1.248064 `Hyd Pressure4`
## Temperature 1.154975 Temperature
## `Usage cont` 1.457358 `Usage cont`
## `Carb Flow` 1.454509 `Carb Flow`
## `Pressure Vacuum` 1.413673 `Pressure Vacuum`
## `Oxygen Filler` 1.253885 `Oxygen Filler`
## `Pressure Setpoint` 2.316475 `Pressure Setpoint`
## `Air Pressurer` 1.115625 `Air Pressurer`
##
## Call:
## lm(formula = formula, data = stu_data_raw)
##
## Residuals:
## Min 1Q Median 3Q Max
## -0.58218 -0.09053 0.00780 0.10833 0.47817
##
## Coefficients:
## Estimate Std. Error t value Pr(>|t|)
## (Intercept) 1.493e+01 1.004e+00 14.874 < 2e-16 ***
## `Fill Ounces` -1.317e-01 3.835e-02 -3.434 0.000604 ***
## `PC Volume` 1.039e-01 6.629e-02 1.567 0.117311
## PSC -2.200e-01 6.882e-02 -3.197 0.001408 **
## `PSC Fill` -1.663e-03 2.802e-02 -0.059 0.952675
## `PSC CO2` -1.903e-01 7.714e-02 -2.466 0.013718 *
## `Carb Pressure1` 3.490e-03 7.893e-04 4.422 1.02e-05 ***
## `Fill Pressure` -2.566e-03 1.489e-03 -1.723 0.084964 .
## `Hyd Pressure1` 2.476e-04 2.937e-04 0.843 0.399371
## `Hyd Pressure4` -6.636e-04 2.770e-04 -2.396 0.016667 *
## Temperature -2.462e-02 2.718e-03 -9.058 < 2e-16 ***
## `Usage cont` -1.528e-02 1.280e-03 -11.939 < 2e-16 ***
## `Carb Flow` 6.308e-06 3.634e-06 1.736 0.082727 .
## `Pressure Vacuum` 3.205e-02 6.529e-03 4.909 9.79e-07 ***
## `Oxygen Filler` 1.856e-01 8.092e-02 2.294 0.021867 *
## `Pressure Setpoint` -1.308e-02 2.338e-03 -5.592 2.51e-08 ***
## `Air Pressurer` -5.301e-03 2.740e-03 -1.935 0.053107 .
## ---
## Signif. codes: 0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
##
## Residual standard error: 0.1511 on 2303 degrees of freedom
## (251 observations deleted due to missingness)
## Multiple R-squared: 0.2338, Adjusted R-squared: 0.2285
## F-statistic: 43.92 on 16 and 2303 DF, p-value: < 2.2e-16
Training a non-linear SVM model to predict PH values.
control <- trainControl(method = "cv", number = 5)
set.seed(123)
svm_model <- train(
x = train_x,
y = train_y,
method = "svmRadial",
tuneLength = 10,
trControl = control
)
Predicting PH values using the trained SVM model on the test set.
Evaluate the model.
## SVM RMSE: 0.7103096
## SVM R²: 0.5159691
This RMSE score is high at 0.71, and the R² is 0.51 indicating there is room for improvement in the model
Training a Ridge Regression model with cross-validation to find the optimal λ (lambda) value and predict pH values.
set.seed(345)
ridge_model <- train(
x = train_x,
y = train_y,
method = "glmnet",
tuneGrid = expand.grid(alpha = 0, lambda = seq(0.001, 1, length = 20)),
metric = "Rsquared",
trControl = control,
preProc = c("center", "scale")
)
Using the trained Ridge model to predict pH values for the test set.
# Predict and evaluate the Ridge Regression model
ridge_predictions <- predict(ridge_model, newdata = test_x)
Evaluate the model
## Ridge Regression RMSE: 0.8109992
## Ridge Regression R²: 0.3650728
The low R² value indicates there’s still a large amount of unexplained variance.
We model a Lasso regression to leverage its feature selection, model complexity reduction attributes. Lasso applies a penalty to shrink less important coefficients toward zero resulting in smaller set non-zero important features . By focusing on the most critical variables, we may be able to generate a parsimonious model that may outperform our linear regression baselines.
The Lasso regression results reveal a tuned model with optimal parameters: alpha = 0.1 and lambda = 0.0012, The variable importance rankings highlight Oxygen.Filler as the most influential predictor, followed by Carb.Rel, PC.Volume, and Density. These top features identify key predictors of pH.
The Lasso regression achieves effective dimensionality reduction but te relatively modest R-squared value suggests that further exploration may be necessary. The R-squared of 0.354 is lower than the adjusted R-squared of 0.4072 from the multiple linear regression (MLR) model and may not be a suitable model for this dataset.
set.seed(321)
#Define cross-validation method
cross_val_10 <- trainControl(method = "cv", number = 10)
lasso_model <- train(
x = train_x,
y = train_y,
method = "lasso",
trControl = cross_val_10,
tuneLength = 20)
Next, we predict the test set with the Lasso model:
## RMSE Rsquared MAE
## 0.8087921 0.3692186 0.6211638
To capture non-linear and potentially complex relationships, we train a neural network model.
The neural network model achieves an R-squared of 0.477, surpassing the performance of the Lasso regression (R-squared = 0.354) and coming close to the multiple linear regression (MLR) model’s R-squared of 0.407. The optimal parameters are size = 9 (number of hidden units) and decay = 0.5 (regularization parameter). Overall, the neural network model has a moderate results.
The tuning results graph shows that the combination of decay and hidden units significantly impacts model performance, with RMSE values improving as the decay and size are fine-tuned. The variable importance plot highlights Carb.Flow, Hyd.Pressure1, and Mnf.Flow as key predictors.
The residual plot suggests minimal systematic errors, with residuals centered around zero across predicted values.
library(nnet)
# Define a tuning grid
nnetGrid <- expand.grid(
size = 1:10,
decay = c(0.001, 0.01, 0.1, 0.5))
We create a cluster that is two less than the number of system cores.
Next, we start the cluster, train the model, and close the cluster.
registerDoParallel(cluster0)
# Train neural network
set.seed(321)
nnetTuned <- train(
x = train_x,
y = train_y,
method = "nnet",
tuneGrid = nnetGrid,
preProcess = c("center", "scale"),
trControl = trainControl(method = "cv", number = 10),
linout = TRUE,
trace = FALSE,
maxit = 500,
MaxNWts = 10 * (ncol(train_x) + 1) + 10 + 1)
stopCluster(cluster0)
Next, we try predicting the test set with the neural network:
## RMSE Rsquared MAE
## 0.7511327 0.4713855 0.5603725
Below we try fitting a random forest model to the data.
One of the tuning parameters for random forests is the number of predictors, k, to choose (referred to as \(m_{try}\) - it is recommended to set this parameter to one-third of the number of predictors. Number of trees is an additional parameter - it is recommended to set this to at least 1000.
Below, we create a tune grid for \(m_{try}\) parameter.
We use parallel computing to reduce the run time for training the model.
We create a cluster that is two less than the number of system cores.
Next, we start the cluster, train the model, and close the cluster.
registerDoParallel(cluster)
set.seed(50)
rf_model <- train(x = train_x, y = train_y, method = 'rf',
tuneGrid = mtry,
trControl = trainControl(method = 'cv', number = 10, allowParallel = T))
stopCluster(cluster)
## Random Forest
##
## 2058 samples
## 33 predictor
##
## No pre-processing
## Resampling: Cross-Validated (10 fold)
## Summary of sample sizes: 1851, 1853, 1852, 1851, 1852, 1854, ...
## Resampling results across tuning parameters:
##
## mtry RMSE Rsquared MAE
## 1 0.7027458 0.5760284 0.5473556
## 5 0.5866556 0.6801276 0.4367668
## 9 0.5693061 0.6925205 0.4194761
## 13 0.5617571 0.6964324 0.4113211
## 17 0.5561654 0.6999992 0.4056312
## 21 0.5530506 0.7018771 0.4026702
## 25 0.5554906 0.6967981 0.4023201
## 29 0.5562778 0.6941822 0.4026970
## 33 0.5593541 0.6893224 0.4044515
##
## RMSE was used to select the optimal model using the smallest value.
## The final value used for the model was mtry = 21.
Below is a a plot of the cross-validation RMSE
Below is a plot of the variable importance:
Next, we try predicting the test set with the random forest model
We compare the predictions with the actual values:
## RMSE Rsquared MAE
## 0.6074740 0.6523460 0.4370254
Next, we try fitting a gradient boosted regression tree model to the data. The parameters for this model include tree depth, number of trees, and shrinkage parameter which controls how much of a predicted value from a previous iteration is added to the current iteration (values of <0.01 are recommended). Lastly, the bagging fraction is kept constant at 0.5.
Below, we create the tuning grid:
gbmGrid <- expand.grid(interaction.depth = seq(1,7, by = 2),
shrinkage = c(0.01,0.1),
n.trees = seq(100,1000, by = 50),
n.minobsinnode = seq(5,30, by = 5))
We will use parallel computing to reduce the training time. Below we start the cluster, train the model, and close the cluster.
registerDoParallel(cluster2)
set.seed(100)
gbm_model <- train(x = train_x, y = train_y, , method = 'gbm',
tuneGrid = gbmGrid,
trControl = trainControl(method = 'cv', number = 10, allowParallel = T),
verbose = F)
stopCluster(cluster2)
## Stochastic Gradient Boosting
##
## 2058 samples
## 33 predictor
##
## No pre-processing
## Resampling: Cross-Validated (10 fold)
## Summary of sample sizes: 1851, 1853, 1851, 1852, 1852, 1854, ...
## Resampling results across tuning parameters:
##
## shrinkage interaction.depth n.minobsinnode n.trees RMSE Rsquared
## 0.01 1 5 100 0.8783691 0.3286048
## 0.01 1 5 150 0.8486822 0.3531979
## 0.01 1 5 200 0.8282199 0.3681532
## 0.01 1 5 250 0.8122134 0.3811016
## 0.01 1 5 300 0.8003723 0.3896606
## 0.01 1 5 350 0.7919159 0.3954121
## 0.01 1 5 400 0.7848881 0.4012776
## 0.01 1 5 450 0.7792765 0.4066565
## 0.01 1 5 500 0.7747129 0.4115345
## 0.01 1 5 550 0.7709445 0.4158271
## 0.01 1 5 600 0.7677762 0.4193236
## 0.01 1 5 650 0.7651932 0.4224568
## 0.01 1 5 700 0.7624260 0.4261294
## 0.01 1 5 750 0.7598396 0.4296932
## 0.01 1 5 800 0.7577747 0.4324543
## 0.01 1 5 850 0.7555915 0.4354857
## 0.01 1 5 900 0.7536988 0.4377012
## 0.01 1 5 950 0.7518645 0.4399599
## 0.01 1 5 1000 0.7497330 0.4426869
## 0.01 1 10 100 0.8779420 0.3304927
## 0.01 1 10 150 0.8486265 0.3532055
## 0.01 1 10 200 0.8280469 0.3689692
## 0.01 1 10 250 0.8122047 0.3813144
## 0.01 1 10 300 0.8005219 0.3895934
## 0.01 1 10 350 0.7912608 0.3964723
## 0.01 1 10 400 0.7846638 0.4020604
## 0.01 1 10 450 0.7790100 0.4072911
## 0.01 1 10 500 0.7743890 0.4124104
## 0.01 1 10 550 0.7706454 0.4167446
## 0.01 1 10 600 0.7670604 0.4211226
## 0.01 1 10 650 0.7640055 0.4249071
## 0.01 1 10 700 0.7610744 0.4286285
## 0.01 1 10 750 0.7587299 0.4315679
## 0.01 1 10 800 0.7565600 0.4343636
## 0.01 1 10 850 0.7544196 0.4372191
## 0.01 1 10 900 0.7523788 0.4398835
## 0.01 1 10 950 0.7503584 0.4427206
## 0.01 1 10 1000 0.7487238 0.4448535
## 0.01 1 15 100 0.8781208 0.3283162
## 0.01 1 15 150 0.8490885 0.3523933
## 0.01 1 15 200 0.8277183 0.3694958
## 0.01 1 15 250 0.8123305 0.3810827
## 0.01 1 15 300 0.8005372 0.3895383
## 0.01 1 15 350 0.7913970 0.3964314
## 0.01 1 15 400 0.7841578 0.4028304
## 0.01 1 15 450 0.7789432 0.4076375
## 0.01 1 15 500 0.7741016 0.4128432
## 0.01 1 15 550 0.7704620 0.4168008
## 0.01 1 15 600 0.7670770 0.4207886
## 0.01 1 15 650 0.7641401 0.4244164
## 0.01 1 15 700 0.7613019 0.4279112
## 0.01 1 15 750 0.7585455 0.4317893
## 0.01 1 15 800 0.7562358 0.4349065
## 0.01 1 15 850 0.7538388 0.4380498
## 0.01 1 15 900 0.7517434 0.4408531
## 0.01 1 15 950 0.7498503 0.4433048
## 0.01 1 15 1000 0.7480477 0.4455654
## 0.01 1 20 100 0.8782973 0.3314372
## 0.01 1 20 150 0.8495499 0.3526080
## 0.01 1 20 200 0.8289394 0.3680896
## 0.01 1 20 250 0.8127202 0.3808621
## 0.01 1 20 300 0.8002995 0.3903473
## 0.01 1 20 350 0.7915675 0.3957859
## 0.01 1 20 400 0.7846384 0.4016579
## 0.01 1 20 450 0.7792330 0.4067031
## 0.01 1 20 500 0.7745528 0.4119402
## 0.01 1 20 550 0.7707960 0.4157716
## 0.01 1 20 600 0.7672963 0.4201405
## 0.01 1 20 650 0.7639391 0.4245070
## 0.01 1 20 700 0.7611319 0.4278918
## 0.01 1 20 750 0.7585296 0.4313214
## 0.01 1 20 800 0.7562947 0.4343374
## 0.01 1 20 850 0.7541087 0.4373365
## 0.01 1 20 900 0.7521850 0.4396599
## 0.01 1 20 950 0.7502398 0.4421542
## 0.01 1 20 1000 0.7481437 0.4448884
## 0.01 1 25 100 0.8782748 0.3305820
## 0.01 1 25 150 0.8493277 0.3512194
## 0.01 1 25 200 0.8279714 0.3693956
## 0.01 1 25 250 0.8123160 0.3808376
## 0.01 1 25 300 0.8007968 0.3884660
## 0.01 1 25 350 0.7914053 0.3963096
## 0.01 1 25 400 0.7844569 0.4021773
## 0.01 1 25 450 0.7786780 0.4080392
## 0.01 1 25 500 0.7742881 0.4127765
## 0.01 1 25 550 0.7705773 0.4166589
## 0.01 1 25 600 0.7667520 0.4214139
## 0.01 1 25 650 0.7635721 0.4255006
## 0.01 1 25 700 0.7607692 0.4288394
## 0.01 1 25 750 0.7580614 0.4322539
## 0.01 1 25 800 0.7557044 0.4351625
## 0.01 1 25 850 0.7533430 0.4383560
## 0.01 1 25 900 0.7511813 0.4409837
## 0.01 1 25 950 0.7495016 0.4432663
## 0.01 1 25 1000 0.7478113 0.4453360
## 0.01 1 30 100 0.8786866 0.3263384
## 0.01 1 30 150 0.8489768 0.3535241
## 0.01 1 30 200 0.8279000 0.3702858
## 0.01 1 30 250 0.8125150 0.3810013
## 0.01 1 30 300 0.8003708 0.3900935
## 0.01 1 30 350 0.7914966 0.3960987
## 0.01 1 30 400 0.7843567 0.4021904
## 0.01 1 30 450 0.7787454 0.4075501
## 0.01 1 30 500 0.7745351 0.4118994
## 0.01 1 30 550 0.7703363 0.4166993
## 0.01 1 30 600 0.7667771 0.4209788
## 0.01 1 30 650 0.7635342 0.4247060
## 0.01 1 30 700 0.7609991 0.4279289
## 0.01 1 30 750 0.7583829 0.4311289
## 0.01 1 30 800 0.7563478 0.4336540
## 0.01 1 30 850 0.7540430 0.4368205
## 0.01 1 30 900 0.7518395 0.4395588
## 0.01 1 30 950 0.7499267 0.4420624
## 0.01 1 30 1000 0.7483691 0.4440116
## 0.01 3 5 100 0.8152949 0.4256035
## 0.01 3 5 150 0.7778097 0.4491847
## 0.01 3 5 200 0.7545768 0.4659375
## 0.01 3 5 250 0.7393687 0.4792633
## 0.01 3 5 300 0.7280112 0.4908124
## 0.01 3 5 350 0.7194310 0.4995847
## 0.01 3 5 400 0.7122246 0.5071539
## 0.01 3 5 450 0.7065365 0.5135115
## 0.01 3 5 500 0.7016080 0.5192186
## 0.01 3 5 550 0.6970771 0.5244508
## 0.01 3 5 600 0.6927042 0.5296983
## 0.01 3 5 650 0.6894077 0.5333586
## 0.01 3 5 700 0.6863893 0.5366973
## 0.01 3 5 750 0.6836083 0.5398669
## 0.01 3 5 800 0.6807600 0.5432016
## 0.01 3 5 850 0.6783319 0.5460088
## 0.01 3 5 900 0.6760412 0.5487008
## 0.01 3 5 950 0.6741640 0.5506655
## 0.01 3 5 1000 0.6722458 0.5529213
## 0.01 3 10 100 0.8147762 0.4268292
## 0.01 3 10 150 0.7772619 0.4502912
## 0.01 3 10 200 0.7539866 0.4676757
## 0.01 3 10 250 0.7376710 0.4826670
## 0.01 3 10 300 0.7265127 0.4933078
## 0.01 3 10 350 0.7179294 0.5022978
## 0.01 3 10 400 0.7102241 0.5106526
## 0.01 3 10 450 0.7034011 0.5185441
## 0.01 3 10 500 0.6977787 0.5251029
## 0.01 3 10 550 0.6933627 0.5299860
## 0.01 3 10 600 0.6894423 0.5342972
## 0.01 3 10 650 0.6856083 0.5388013
## 0.01 3 10 700 0.6823511 0.5425553
## 0.01 3 10 750 0.6795723 0.5455290
## 0.01 3 10 800 0.6768234 0.5486508
## 0.01 3 10 850 0.6742986 0.5515539
## 0.01 3 10 900 0.6719932 0.5541508
## 0.01 3 10 950 0.6698139 0.5565824
## 0.01 3 10 1000 0.6678891 0.5587959
## 0.01 3 15 100 0.8144005 0.4283089
## 0.01 3 15 150 0.7773589 0.4500643
## 0.01 3 15 200 0.7532398 0.4686922
## 0.01 3 15 250 0.7376220 0.4822043
## 0.01 3 15 300 0.7258162 0.4942692
## 0.01 3 15 350 0.7170289 0.5033093
## 0.01 3 15 400 0.7097633 0.5108443
## 0.01 3 15 450 0.7033836 0.5181679
## 0.01 3 15 500 0.6980752 0.5244483
## 0.01 3 15 550 0.6934594 0.5297663
## 0.01 3 15 600 0.6891683 0.5344848
## 0.01 3 15 650 0.6855314 0.5383802
## 0.01 3 15 700 0.6821321 0.5424118
## 0.01 3 15 750 0.6788372 0.5464038
## 0.01 3 15 800 0.6757586 0.5500590
## 0.01 3 15 850 0.6733145 0.5527878
## 0.01 3 15 900 0.6709952 0.5553050
## 0.01 3 15 950 0.6687732 0.5577465
## 0.01 3 15 1000 0.6666621 0.5602329
## 0.01 3 20 100 0.8145036 0.4274871
## 0.01 3 20 150 0.7765136 0.4514448
## 0.01 3 20 200 0.7538418 0.4667491
## 0.01 3 20 250 0.7378379 0.4810732
## 0.01 3 20 300 0.7267704 0.4916045
## 0.01 3 20 350 0.7175136 0.5018547
## 0.01 3 20 400 0.7104902 0.5090821
## 0.01 3 20 450 0.7042791 0.5160780
## 0.01 3 20 500 0.6991571 0.5220816
## 0.01 3 20 550 0.6940601 0.5279741
## 0.01 3 20 600 0.6898442 0.5328210
## 0.01 3 20 650 0.6861217 0.5370583
## 0.01 3 20 700 0.6826109 0.5410731
## 0.01 3 20 750 0.6795592 0.5445290
## 0.01 3 20 800 0.6769948 0.5473492
## 0.01 3 20 850 0.6743194 0.5503732
## 0.01 3 20 900 0.6717583 0.5534195
## 0.01 3 20 950 0.6697565 0.5556030
## 0.01 3 20 1000 0.6677523 0.5579463
## 0.01 3 25 100 0.8143818 0.4272748
## 0.01 3 25 150 0.7770826 0.4504196
## 0.01 3 25 200 0.7533145 0.4671054
## 0.01 3 25 250 0.7377239 0.4801350
## 0.01 3 25 300 0.7264129 0.4912789
## 0.01 3 25 350 0.7174381 0.5010969
## 0.01 3 25 400 0.7104890 0.5087583
## 0.01 3 25 450 0.7045187 0.5155662
## 0.01 3 25 500 0.6989117 0.5220168
## 0.01 3 25 550 0.6940089 0.5277665
## 0.01 3 25 600 0.6899620 0.5323601
## 0.01 3 25 650 0.6863557 0.5364758
## 0.01 3 25 700 0.6832038 0.5400262
## 0.01 3 25 750 0.6801991 0.5433826
## 0.01 3 25 800 0.6772209 0.5467860
## 0.01 3 25 850 0.6745686 0.5497348
## 0.01 3 25 900 0.6721668 0.5523841
## 0.01 3 25 950 0.6700266 0.5550205
## 0.01 3 25 1000 0.6684768 0.5567478
## 0.01 3 30 100 0.8144156 0.4274036
## 0.01 3 30 150 0.7778398 0.4486910
## 0.01 3 30 200 0.7540868 0.4660777
## 0.01 3 30 250 0.7379329 0.4800186
## 0.01 3 30 300 0.7269168 0.4906301
## 0.01 3 30 350 0.7176675 0.5004105
## 0.01 3 30 400 0.7099442 0.5090731
## 0.01 3 30 450 0.7042060 0.5153081
## 0.01 3 30 500 0.6989650 0.5212826
## 0.01 3 30 550 0.6937672 0.5274657
## 0.01 3 30 600 0.6896591 0.5321525
## 0.01 3 30 650 0.6858860 0.5364624
## 0.01 3 30 700 0.6825979 0.5403347
## 0.01 3 30 750 0.6795226 0.5437945
## 0.01 3 30 800 0.6765023 0.5473655
## 0.01 3 30 850 0.6739976 0.5501046
## 0.01 3 30 900 0.6716584 0.5530027
## 0.01 3 30 950 0.6695363 0.5554724
## 0.01 3 30 1000 0.6673412 0.5580577
## 0.01 5 5 100 0.7872363 0.4793725
## 0.01 5 5 150 0.7472900 0.4993928
## 0.01 5 5 200 0.7212067 0.5175366
## 0.01 5 5 250 0.7049835 0.5295095
## 0.01 5 5 300 0.6930181 0.5405736
## 0.01 5 5 350 0.6841674 0.5490666
## 0.01 5 5 400 0.6772990 0.5559168
## 0.01 5 5 450 0.6717025 0.5616033
## 0.01 5 5 500 0.6668043 0.5667896
## 0.01 5 5 550 0.6622157 0.5714176
## 0.01 5 5 600 0.6584916 0.5754597
## 0.01 5 5 650 0.6546089 0.5796109
## 0.01 5 5 700 0.6514878 0.5828341
## 0.01 5 5 750 0.6483617 0.5863640
## 0.01 5 5 800 0.6455121 0.5895553
## 0.01 5 5 850 0.6428183 0.5925645
## 0.01 5 5 900 0.6406644 0.5948532
## 0.01 5 5 950 0.6385979 0.5970570
## 0.01 5 5 1000 0.6365423 0.5993914
## 0.01 5 10 100 0.7874223 0.4799805
## 0.01 5 10 150 0.7458360 0.5024468
## 0.01 5 10 200 0.7204912 0.5183746
## 0.01 5 10 250 0.7042693 0.5304053
## 0.01 5 10 300 0.6918046 0.5421446
## 0.01 5 10 350 0.6821859 0.5519478
## 0.01 5 10 400 0.6746886 0.5595503
## 0.01 5 10 450 0.6677936 0.5669772
## 0.01 5 10 500 0.6624900 0.5723305
## 0.01 5 10 550 0.6580612 0.5767721
## 0.01 5 10 600 0.6534645 0.5819417
## 0.01 5 10 650 0.6498786 0.5859420
## 0.01 5 10 700 0.6463462 0.5897131
## 0.01 5 10 750 0.6438842 0.5921459
## 0.01 5 10 800 0.6414909 0.5946621
## 0.01 5 10 850 0.6388987 0.5974223
## 0.01 5 10 900 0.6363960 0.6000989
## 0.01 5 10 950 0.6342188 0.6025557
## 0.01 5 10 1000 0.6322053 0.6047843
## 0.01 5 15 100 0.7862640 0.4821795
## 0.01 5 15 150 0.7456795 0.5027120
## 0.01 5 15 200 0.7207597 0.5185206
## 0.01 5 15 250 0.7031241 0.5325521
## 0.01 5 15 300 0.6916987 0.5420234
## 0.01 5 15 350 0.6825906 0.5506120
## 0.01 5 15 400 0.6755218 0.5579774
## 0.01 5 15 450 0.6697349 0.5639016
## 0.01 5 15 500 0.6639527 0.5699696
## 0.01 5 15 550 0.6591578 0.5750047
## 0.01 5 15 600 0.6548302 0.5796694
## 0.01 5 15 650 0.6509146 0.5838763
## 0.01 5 15 700 0.6476541 0.5874840
## 0.01 5 15 750 0.6447451 0.5905874
## 0.01 5 15 800 0.6420841 0.5934212
## 0.01 5 15 850 0.6392560 0.5966208
## 0.01 5 15 900 0.6370916 0.5989655
## 0.01 5 15 950 0.6349602 0.6012227
## 0.01 5 15 1000 0.6330463 0.6032486
## 0.01 5 20 100 0.7873558 0.4799123
## 0.01 5 20 150 0.7463984 0.5006795
## 0.01 5 20 200 0.7213490 0.5161567
## 0.01 5 20 250 0.7044258 0.5291237
## 0.01 5 20 300 0.6921028 0.5403938
## 0.01 5 20 350 0.6831163 0.5490590
## 0.01 5 20 400 0.6756078 0.5565893
## 0.01 5 20 450 0.6697313 0.5622591
## 0.01 5 20 500 0.6645921 0.5676408
## 0.01 5 20 550 0.6598332 0.5725822
## 0.01 5 20 600 0.6562284 0.5763723
## 0.01 5 20 650 0.6524275 0.5806748
## 0.01 5 20 700 0.6492943 0.5840310
## 0.01 5 20 750 0.6463438 0.5872763
## 0.01 5 20 800 0.6433568 0.5906648
## 0.01 5 20 850 0.6408256 0.5934666
## 0.01 5 20 900 0.6384458 0.5962161
## 0.01 5 20 950 0.6365146 0.5984375
## 0.01 5 20 1000 0.6345461 0.6006131
## 0.01 5 25 100 0.7867489 0.4808280
## 0.01 5 25 150 0.7456441 0.5022209
## 0.01 5 25 200 0.7204192 0.5169538
## 0.01 5 25 250 0.7034848 0.5303579
## 0.01 5 25 300 0.6920747 0.5395695
## 0.01 5 25 350 0.6827155 0.5488748
## 0.01 5 25 400 0.6753116 0.5564032
## 0.01 5 25 450 0.6692839 0.5626097
## 0.01 5 25 500 0.6642540 0.5677985
## 0.01 5 25 550 0.6601476 0.5719493
## 0.01 5 25 600 0.6560737 0.5763148
## 0.01 5 25 650 0.6520483 0.5809336
## 0.01 5 25 700 0.6492579 0.5838627
## 0.01 5 25 750 0.6465141 0.5869521
## 0.01 5 25 800 0.6438086 0.5898550
## 0.01 5 25 850 0.6414246 0.5925813
## 0.01 5 25 900 0.6391335 0.5950422
## 0.01 5 25 950 0.6373719 0.5970909
## 0.01 5 25 1000 0.6353877 0.5994948
## 0.01 5 30 100 0.7870274 0.4799681
## 0.01 5 30 150 0.7464022 0.4999200
## 0.01 5 30 200 0.7209300 0.5160511
## 0.01 5 30 250 0.7037068 0.5292487
## 0.01 5 30 300 0.6914507 0.5404194
## 0.01 5 30 350 0.6822154 0.5492570
## 0.01 5 30 400 0.6748355 0.5564816
## 0.01 5 30 450 0.6687854 0.5629121
## 0.01 5 30 500 0.6637467 0.5681808
## 0.01 5 30 550 0.6596036 0.5724291
## 0.01 5 30 600 0.6552953 0.5772123
## 0.01 5 30 650 0.6522331 0.5801387
## 0.01 5 30 700 0.6490945 0.5835896
## 0.01 5 30 750 0.6455726 0.5876305
## 0.01 5 30 800 0.6424598 0.5912139
## 0.01 5 30 850 0.6406217 0.5930877
## 0.01 5 30 900 0.6385799 0.5954035
## 0.01 5 30 950 0.6366409 0.5975434
## 0.01 5 30 1000 0.6342990 0.6003040
## 0.01 7 5 100 0.7695216 0.5163968
## 0.01 7 5 150 0.7256652 0.5357123
## 0.01 7 5 200 0.6996867 0.5490612
## 0.01 7 5 250 0.6823035 0.5612652
## 0.01 7 5 300 0.6700025 0.5715231
## 0.01 7 5 350 0.6610432 0.5797103
## 0.01 7 5 400 0.6542114 0.5861143
## 0.01 7 5 450 0.6490534 0.5909274
## 0.01 7 5 500 0.6439232 0.5959062
## 0.01 7 5 550 0.6396509 0.5999572
## 0.01 7 5 600 0.6356651 0.6040176
## 0.01 7 5 650 0.6318338 0.6081164
## 0.01 7 5 700 0.6290486 0.6107919
## 0.01 7 5 750 0.6261623 0.6137813
## 0.01 7 5 800 0.6240285 0.6159630
## 0.01 7 5 850 0.6216541 0.6186317
## 0.01 7 5 900 0.6200483 0.6202000
## 0.01 7 5 950 0.6180310 0.6223499
## 0.01 7 5 1000 0.6166900 0.6236590
## 0.01 7 10 100 0.7686168 0.5154224
## 0.01 7 10 150 0.7260926 0.5335263
## 0.01 7 10 200 0.6997965 0.5481774
## 0.01 7 10 250 0.6828830 0.5599605
## 0.01 7 10 300 0.6706789 0.5698060
## 0.01 7 10 350 0.6611576 0.5785881
## 0.01 7 10 400 0.6545619 0.5845949
## 0.01 7 10 450 0.6492733 0.5894410
## 0.01 7 10 500 0.6436305 0.5953570
## 0.01 7 10 550 0.6390809 0.6001072
## 0.01 7 10 600 0.6346108 0.6048925
## 0.01 7 10 650 0.6312033 0.6084665
## 0.01 7 10 700 0.6279372 0.6120311
## 0.01 7 10 750 0.6253465 0.6146631
## 0.01 7 10 800 0.6226942 0.6174079
## 0.01 7 10 850 0.6205524 0.6195944
## 0.01 7 10 900 0.6183679 0.6218963
## 0.01 7 10 950 0.6159455 0.6245259
## 0.01 7 10 1000 0.6142070 0.6263667
## 0.01 7 15 100 0.7681018 0.5166443
## 0.01 7 15 150 0.7236981 0.5374616
## 0.01 7 15 200 0.6978538 0.5511954
## 0.01 7 15 250 0.6795113 0.5646965
## 0.01 7 15 300 0.6681137 0.5731191
## 0.01 7 15 350 0.6588348 0.5815618
## 0.01 7 15 400 0.6517486 0.5881373
## 0.01 7 15 450 0.6460131 0.5935198
## 0.01 7 15 500 0.6408783 0.5985480
## 0.01 7 15 550 0.6362356 0.6033088
## 0.01 7 15 600 0.6324876 0.6071749
## 0.01 7 15 650 0.6290016 0.6107228
## 0.01 7 15 700 0.6252469 0.6148626
## 0.01 7 15 750 0.6229321 0.6171353
## 0.01 7 15 800 0.6206203 0.6193810
## 0.01 7 15 850 0.6181915 0.6219660
## 0.01 7 15 900 0.6165928 0.6234545
## 0.01 7 15 950 0.6151342 0.6248530
## 0.01 7 15 1000 0.6135553 0.6265416
## 0.01 7 20 100 0.7679094 0.5161732
## 0.01 7 20 150 0.7248328 0.5347602
## 0.01 7 20 200 0.6984108 0.5497191
## 0.01 7 20 250 0.6820432 0.5601976
## 0.01 7 20 300 0.6693606 0.5710543
## 0.01 7 20 350 0.6607917 0.5778760
## 0.01 7 20 400 0.6534321 0.5847543
## 0.01 7 20 450 0.6466573 0.5917078
## 0.01 7 20 500 0.6414590 0.5969490
## 0.01 7 20 550 0.6371930 0.6011749
## 0.01 7 20 600 0.6334835 0.6050260
## 0.01 7 20 650 0.6302268 0.6083461
## 0.01 7 20 700 0.6273651 0.6113682
## 0.01 7 20 750 0.6243937 0.6147086
## 0.01 7 20 800 0.6220178 0.6172641
## 0.01 7 20 850 0.6196194 0.6197630
## 0.01 7 20 900 0.6176206 0.6218913
## 0.01 7 20 950 0.6157340 0.6238019
## 0.01 7 20 1000 0.6138129 0.6260897
## 0.01 7 25 100 0.7678449 0.5155407
## 0.01 7 25 150 0.7243537 0.5350492
## 0.01 7 25 200 0.6982955 0.5493901
## 0.01 7 25 250 0.6803018 0.5622675
## 0.01 7 25 300 0.6682095 0.5719279
## 0.01 7 25 350 0.6595291 0.5792310
## 0.01 7 25 400 0.6524349 0.5858733
## 0.01 7 25 450 0.6469337 0.5910993
## 0.01 7 25 500 0.6416064 0.5968116
## 0.01 7 25 550 0.6374168 0.6009983
## 0.01 7 25 600 0.6334277 0.6049697
## 0.01 7 25 650 0.6298983 0.6087520
## 0.01 7 25 700 0.6267191 0.6120875
## 0.01 7 25 750 0.6243302 0.6145544
## 0.01 7 25 800 0.6221075 0.6168146
## 0.01 7 25 850 0.6200606 0.6189888
## 0.01 7 25 900 0.6180781 0.6210152
## 0.01 7 25 950 0.6161692 0.6230453
## 0.01 7 25 1000 0.6139110 0.6255649
## 0.01 7 30 100 0.7678087 0.5164161
## 0.01 7 30 150 0.7250719 0.5324222
## 0.01 7 30 200 0.6994580 0.5459681
## 0.01 7 30 250 0.6831021 0.5569059
## 0.01 7 30 300 0.6713130 0.5664460
## 0.01 7 30 350 0.6620975 0.5749919
## 0.01 7 30 400 0.6547883 0.5818099
## 0.01 7 30 450 0.6492663 0.5872514
## 0.01 7 30 500 0.6439137 0.5927009
## 0.01 7 30 550 0.6398163 0.5967608
## 0.01 7 30 600 0.6360010 0.6008291
## 0.01 7 30 650 0.6328984 0.6039989
## 0.01 7 30 700 0.6297302 0.6074873
## 0.01 7 30 750 0.6273695 0.6101271
## 0.01 7 30 800 0.6246275 0.6131050
## 0.01 7 30 850 0.6220238 0.6160633
## 0.01 7 30 900 0.6202196 0.6179986
## 0.01 7 30 950 0.6186321 0.6197121
## 0.01 7 30 1000 0.6167870 0.6217835
## 0.10 1 5 100 0.7511406 0.4399783
## 0.10 1 5 150 0.7381134 0.4563440
## 0.10 1 5 200 0.7321854 0.4636088
## 0.10 1 5 250 0.7258469 0.4717834
## 0.10 1 5 300 0.7227665 0.4756977
## 0.10 1 5 350 0.7209006 0.4778081
## 0.10 1 5 400 0.7183353 0.4815017
## 0.10 1 5 450 0.7186576 0.4809836
## 0.10 1 5 500 0.7183018 0.4816549
## 0.10 1 5 550 0.7175685 0.4824627
## 0.10 1 5 600 0.7177986 0.4822727
## 0.10 1 5 650 0.7162553 0.4844854
## 0.10 1 5 700 0.7161250 0.4850377
## 0.10 1 5 750 0.7157120 0.4856770
## 0.10 1 5 800 0.7152043 0.4866448
## 0.10 1 5 850 0.7143691 0.4877751
## 0.10 1 5 900 0.7136867 0.4887590
## 0.10 1 5 950 0.7127421 0.4903709
## 0.10 1 5 1000 0.7133511 0.4898279
## 0.10 1 10 100 0.7476638 0.4458849
## 0.10 1 10 150 0.7339803 0.4633093
## 0.10 1 10 200 0.7259836 0.4731235
## 0.10 1 10 250 0.7213175 0.4782668
## 0.10 1 10 300 0.7166965 0.4839427
## 0.10 1 10 350 0.7165001 0.4839581
## 0.10 1 10 400 0.7149407 0.4863078
## 0.10 1 10 450 0.7110890 0.4915588
## 0.10 1 10 500 0.7106903 0.4922030
## 0.10 1 10 550 0.7101181 0.4929054
## 0.10 1 10 600 0.7107389 0.4919343
## 0.10 1 10 650 0.7112502 0.4913039
## 0.10 1 10 700 0.7093931 0.4942361
## 0.10 1 10 750 0.7104999 0.4927321
## 0.10 1 10 800 0.7093164 0.4942386
## 0.10 1 10 850 0.7087833 0.4951995
## 0.10 1 10 900 0.7092350 0.4951110
## 0.10 1 10 950 0.7086626 0.4956649
## 0.10 1 10 1000 0.7085035 0.4957249
## 0.10 1 15 100 0.7491896 0.4420230
## 0.10 1 15 150 0.7362106 0.4591322
## 0.10 1 15 200 0.7279498 0.4695564
## 0.10 1 15 250 0.7229438 0.4754352
## 0.10 1 15 300 0.7201306 0.4787019
## 0.10 1 15 350 0.7168994 0.4830100
## 0.10 1 15 400 0.7144535 0.4866414
## 0.10 1 15 450 0.7115836 0.4907081
## 0.10 1 15 500 0.7111908 0.4913794
## 0.10 1 15 550 0.7104591 0.4926267
## 0.10 1 15 600 0.7084784 0.4952933
## 0.10 1 15 650 0.7069621 0.4971759
## 0.10 1 15 700 0.7072903 0.4968192
## 0.10 1 15 750 0.7063649 0.4984136
## 0.10 1 15 800 0.7065100 0.4980380
## 0.10 1 15 850 0.7063516 0.4982953
## 0.10 1 15 900 0.7069439 0.4978432
## 0.10 1 15 950 0.7067900 0.4979730
## 0.10 1 15 1000 0.7067813 0.4985647
## 0.10 1 20 100 0.7495547 0.4417203
## 0.10 1 20 150 0.7364525 0.4589284
## 0.10 1 20 200 0.7283559 0.4686168
## 0.10 1 20 250 0.7234171 0.4750146
## 0.10 1 20 300 0.7205287 0.4786183
## 0.10 1 20 350 0.7164708 0.4842118
## 0.10 1 20 400 0.7140907 0.4871494
## 0.10 1 20 450 0.7135827 0.4882436
## 0.10 1 20 500 0.7132683 0.4885307
## 0.10 1 20 550 0.7114705 0.4911166
## 0.10 1 20 600 0.7104253 0.4921974
## 0.10 1 20 650 0.7088646 0.4948515
## 0.10 1 20 700 0.7094242 0.4943588
## 0.10 1 20 750 0.7077669 0.4967065
## 0.10 1 20 800 0.7081496 0.4960680
## 0.10 1 20 850 0.7091352 0.4952489
## 0.10 1 20 900 0.7077001 0.4971907
## 0.10 1 20 950 0.7068650 0.4984126
## 0.10 1 20 1000 0.7068163 0.4985261
## 0.10 1 25 100 0.7491876 0.4418752
## 0.10 1 25 150 0.7383789 0.4547779
## 0.10 1 25 200 0.7304584 0.4656612
## 0.10 1 25 250 0.7241772 0.4738063
## 0.10 1 25 300 0.7208635 0.4779477
## 0.10 1 25 350 0.7170120 0.4833317
## 0.10 1 25 400 0.7159713 0.4844846
## 0.10 1 25 450 0.7149129 0.4858444
## 0.10 1 25 500 0.7130166 0.4884747
## 0.10 1 25 550 0.7102995 0.4928458
## 0.10 1 25 600 0.7105893 0.4927247
## 0.10 1 25 650 0.7090807 0.4947087
## 0.10 1 25 700 0.7081509 0.4958878
## 0.10 1 25 750 0.7075387 0.4967782
## 0.10 1 25 800 0.7081132 0.4960437
## 0.10 1 25 850 0.7074228 0.4972944
## 0.10 1 25 900 0.7061263 0.4988644
## 0.10 1 25 950 0.7073650 0.4972674
## 0.10 1 25 1000 0.7052950 0.5001121
## 0.10 1 30 100 0.7480765 0.4433324
## 0.10 1 30 150 0.7373498 0.4566989
## 0.10 1 30 200 0.7286851 0.4676670
## 0.10 1 30 250 0.7239185 0.4737696
## 0.10 1 30 300 0.7197262 0.4799748
## 0.10 1 30 350 0.7145564 0.4868577
## 0.10 1 30 400 0.7134877 0.4882913
## 0.10 1 30 450 0.7127618 0.4896006
## 0.10 1 30 500 0.7106710 0.4927458
## 0.10 1 30 550 0.7079788 0.4962565
## 0.10 1 30 600 0.7064257 0.4985336
## 0.10 1 30 650 0.7059672 0.4992180
## 0.10 1 30 700 0.7057565 0.4995686
## 0.10 1 30 750 0.7053111 0.5001759
## 0.10 1 30 800 0.7048372 0.5009016
## 0.10 1 30 850 0.7046149 0.5011172
## 0.10 1 30 900 0.7049305 0.5007455
## 0.10 1 30 950 0.7054768 0.5002868
## 0.10 1 30 1000 0.7042994 0.5017189
## 0.10 3 5 100 0.6806662 0.5384381
## 0.10 3 5 150 0.6638852 0.5593412
## 0.10 3 5 200 0.6542890 0.5714002
## 0.10 3 5 250 0.6493120 0.5774291
## 0.10 3 5 300 0.6450574 0.5826385
## 0.10 3 5 350 0.6441043 0.5834026
## 0.10 3 5 400 0.6380621 0.5910261
## 0.10 3 5 450 0.6367000 0.5929090
## 0.10 3 5 500 0.6334272 0.5970641
## 0.10 3 5 550 0.6313918 0.5995941
## 0.10 3 5 600 0.6312253 0.5999150
## 0.10 3 5 650 0.6297636 0.6020404
## 0.10 3 5 700 0.6286998 0.6034028
## 0.10 3 5 750 0.6278048 0.6047961
## 0.10 3 5 800 0.6264900 0.6064271
## 0.10 3 5 850 0.6255432 0.6075785
## 0.10 3 5 900 0.6247844 0.6085554
## 0.10 3 5 950 0.6241668 0.6092183
## 0.10 3 5 1000 0.6246890 0.6085967
## 0.10 3 10 100 0.6771312 0.5430879
## 0.10 3 10 150 0.6636853 0.5594346
## 0.10 3 10 200 0.6552477 0.5697334
## 0.10 3 10 250 0.6477201 0.5797804
## 0.10 3 10 300 0.6441104 0.5839859
## 0.10 3 10 350 0.6408798 0.5879707
## 0.10 3 10 400 0.6373070 0.5925923
## 0.10 3 10 450 0.6341085 0.5964518
## 0.10 3 10 500 0.6327469 0.5986397
## 0.10 3 10 550 0.6313676 0.6000980
## 0.10 3 10 600 0.6286578 0.6034676
## 0.10 3 10 650 0.6281949 0.6042453
## 0.10 3 10 700 0.6270470 0.6057412
## 0.10 3 10 750 0.6248206 0.6083633
## 0.10 3 10 800 0.6250501 0.6080632
## 0.10 3 10 850 0.6244335 0.6088365
## 0.10 3 10 900 0.6245502 0.6084718
## 0.10 3 10 950 0.6261540 0.6065971
## 0.10 3 10 1000 0.6251562 0.6079129
## 0.10 3 15 100 0.6721911 0.5500842
## 0.10 3 15 150 0.6589905 0.5659240
## 0.10 3 15 200 0.6517419 0.5746489
## 0.10 3 15 250 0.6453263 0.5824558
## 0.10 3 15 300 0.6397689 0.5891952
## 0.10 3 15 350 0.6358848 0.5943134
## 0.10 3 15 400 0.6325849 0.5983974
## 0.10 3 15 450 0.6315107 0.5999041
## 0.10 3 15 500 0.6285071 0.6036602
## 0.10 3 15 550 0.6263744 0.6063852
## 0.10 3 15 600 0.6245277 0.6087576
## 0.10 3 15 650 0.6227858 0.6105818
## 0.10 3 15 700 0.6215465 0.6120669
## 0.10 3 15 750 0.6208810 0.6127329
## 0.10 3 15 800 0.6192346 0.6149300
## 0.10 3 15 850 0.6188358 0.6153010
## 0.10 3 15 900 0.6184894 0.6157818
## 0.10 3 15 950 0.6162841 0.6185119
## 0.10 3 15 1000 0.6170655 0.6176744
## 0.10 3 20 100 0.6700850 0.5532344
## 0.10 3 20 150 0.6566781 0.5691919
## 0.10 3 20 200 0.6470866 0.5812716
## 0.10 3 20 250 0.6413100 0.5877870
## 0.10 3 20 300 0.6397465 0.5893363
## 0.10 3 20 350 0.6353505 0.5955134
## 0.10 3 20 400 0.6316035 0.5998750
## 0.10 3 20 450 0.6288430 0.6031304
## 0.10 3 20 500 0.6271035 0.6053744
## 0.10 3 20 550 0.6253177 0.6073684
## 0.10 3 20 600 0.6258466 0.6067596
## 0.10 3 20 650 0.6224135 0.6110240
## 0.10 3 20 700 0.6214504 0.6124721
## 0.10 3 20 750 0.6201271 0.6137497
## 0.10 3 20 800 0.6202952 0.6137679
## 0.10 3 20 850 0.6200025 0.6143460
## 0.10 3 20 900 0.6189451 0.6156746
## 0.10 3 20 950 0.6196423 0.6147308
## 0.10 3 20 1000 0.6186622 0.6159268
## 0.10 3 25 100 0.6707260 0.5523098
## 0.10 3 25 150 0.6594809 0.5656576
## 0.10 3 25 200 0.6517839 0.5749441
## 0.10 3 25 250 0.6480522 0.5791130
## 0.10 3 25 300 0.6431404 0.5852789
## 0.10 3 25 350 0.6402213 0.5888542
## 0.10 3 25 400 0.6357058 0.5948447
## 0.10 3 25 450 0.6364534 0.5936187
## 0.10 3 25 500 0.6338667 0.5970791
## 0.10 3 25 550 0.6301992 0.6016035
## 0.10 3 25 600 0.6299214 0.6020971
## 0.10 3 25 650 0.6265455 0.6060285
## 0.10 3 25 700 0.6254534 0.6075740
## 0.10 3 25 750 0.6249021 0.6085281
## 0.10 3 25 800 0.6239383 0.6095876
## 0.10 3 25 850 0.6233902 0.6100910
## 0.10 3 25 900 0.6227025 0.6108538
## 0.10 3 25 950 0.6228734 0.6108604
## 0.10 3 25 1000 0.6225208 0.6113118
## 0.10 3 30 100 0.6756271 0.5443138
## 0.10 3 30 150 0.6601926 0.5631938
## 0.10 3 30 200 0.6512031 0.5742305
## 0.10 3 30 250 0.6444657 0.5831648
## 0.10 3 30 300 0.6406510 0.5880372
## 0.10 3 30 350 0.6392785 0.5897883
## 0.10 3 30 400 0.6362858 0.5936244
## 0.10 3 30 450 0.6345351 0.5959781
## 0.10 3 30 500 0.6317231 0.5995558
## 0.10 3 30 550 0.6314061 0.5999615
## 0.10 3 30 600 0.6298592 0.6022848
## 0.10 3 30 650 0.6288545 0.6033465
## 0.10 3 30 700 0.6293293 0.6027380
## 0.10 3 30 750 0.6274037 0.6051695
## 0.10 3 30 800 0.6265400 0.6063321
## 0.10 3 30 850 0.6241982 0.6090422
## 0.10 3 30 900 0.6237506 0.6098387
## 0.10 3 30 950 0.6229361 0.6108163
## 0.10 3 30 1000 0.6221057 0.6119099
## 0.10 5 5 100 0.6418866 0.5900092
## 0.10 5 5 150 0.6329782 0.5992186
## 0.10 5 5 200 0.6252433 0.6086527
## 0.10 5 5 250 0.6214785 0.6128378
## 0.10 5 5 300 0.6195582 0.6146526
## 0.10 5 5 350 0.6140741 0.6213467
## 0.10 5 5 400 0.6135842 0.6217410
## 0.10 5 5 450 0.6095003 0.6267691
## 0.10 5 5 500 0.6083714 0.6280104
## 0.10 5 5 550 0.6087249 0.6277546
## 0.10 5 5 600 0.6079773 0.6283533
## 0.10 5 5 650 0.6084318 0.6279147
## 0.10 5 5 700 0.6075441 0.6287673
## 0.10 5 5 750 0.6069581 0.6295196
## 0.10 5 5 800 0.6061431 0.6307343
## 0.10 5 5 850 0.6062671 0.6306441
## 0.10 5 5 900 0.6062793 0.6306563
## 0.10 5 5 950 0.6063298 0.6305035
## 0.10 5 5 1000 0.6063082 0.6307028
## 0.10 5 10 100 0.6444296 0.5867167
## 0.10 5 10 150 0.6346881 0.5976467
## 0.10 5 10 200 0.6238742 0.6107052
## 0.10 5 10 250 0.6189371 0.6165627
## 0.10 5 10 300 0.6151606 0.6209642
## 0.10 5 10 350 0.6137516 0.6229157
## 0.10 5 10 400 0.6102211 0.6275516
## 0.10 5 10 450 0.6081639 0.6298301
## 0.10 5 10 500 0.6069000 0.6313927
## 0.10 5 10 550 0.6075834 0.6304090
## 0.10 5 10 600 0.6072367 0.6308160
## 0.10 5 10 650 0.6067277 0.6315104
## 0.10 5 10 700 0.6062798 0.6318635
## 0.10 5 10 750 0.6053641 0.6329116
## 0.10 5 10 800 0.6036325 0.6349564
## 0.10 5 10 850 0.6031210 0.6356598
## 0.10 5 10 900 0.6026836 0.6360350
## 0.10 5 10 950 0.6027851 0.6359925
## 0.10 5 10 1000 0.6020100 0.6369730
## 0.10 5 15 100 0.6400514 0.5918684
## 0.10 5 15 150 0.6304211 0.6028010
## 0.10 5 15 200 0.6240838 0.6103078
## 0.10 5 15 250 0.6194109 0.6160336
## 0.10 5 15 300 0.6148044 0.6213531
## 0.10 5 15 350 0.6122393 0.6245096
## 0.10 5 15 400 0.6103681 0.6266061
## 0.10 5 15 450 0.6102656 0.6266837
## 0.10 5 15 500 0.6099417 0.6272135
## 0.10 5 15 550 0.6085629 0.6290209
## 0.10 5 15 600 0.6096123 0.6276697
## 0.10 5 15 650 0.6080837 0.6293184
## 0.10 5 15 700 0.6080934 0.6291596
## 0.10 5 15 750 0.6090899 0.6281366
## 0.10 5 15 800 0.6101871 0.6270419
## 0.10 5 15 850 0.6091271 0.6281960
## 0.10 5 15 900 0.6094310 0.6277765
## 0.10 5 15 950 0.6107410 0.6263908
## 0.10 5 15 1000 0.6106957 0.6265164
## 0.10 5 20 100 0.6423213 0.5886565
## 0.10 5 20 150 0.6309630 0.6014053
## 0.10 5 20 200 0.6232203 0.6109271
## 0.10 5 20 250 0.6177054 0.6167185
## 0.10 5 20 300 0.6145957 0.6202127
## 0.10 5 20 350 0.6109040 0.6246857
## 0.10 5 20 400 0.6117776 0.6232270
## 0.10 5 20 450 0.6109365 0.6244534
## 0.10 5 20 500 0.6114536 0.6235179
## 0.10 5 20 550 0.6102425 0.6253051
## 0.10 5 20 600 0.6084408 0.6276272
## 0.10 5 20 650 0.6076118 0.6286330
## 0.10 5 20 700 0.6062318 0.6305115
## 0.10 5 20 750 0.6071234 0.6294168
## 0.10 5 20 800 0.6064421 0.6303486
## 0.10 5 20 850 0.6058770 0.6309786
## 0.10 5 20 900 0.6048254 0.6322532
## 0.10 5 20 950 0.6047189 0.6324482
## 0.10 5 20 1000 0.6047628 0.6323960
## 0.10 5 25 100 0.6444857 0.5859905
## 0.10 5 25 150 0.6300747 0.6030427
## 0.10 5 25 200 0.6249549 0.6094510
## 0.10 5 25 250 0.6186922 0.6164074
## 0.10 5 25 300 0.6153620 0.6204223
## 0.10 5 25 350 0.6121706 0.6242200
## 0.10 5 25 400 0.6105583 0.6260269
## 0.10 5 25 450 0.6087461 0.6284549
## 0.10 5 25 500 0.6060176 0.6315239
## 0.10 5 25 550 0.6065192 0.6308435
## 0.10 5 25 600 0.6070986 0.6302635
## 0.10 5 25 650 0.6066481 0.6307848
## 0.10 5 25 700 0.6064744 0.6311592
## 0.10 5 25 750 0.6080982 0.6292540
## 0.10 5 25 800 0.6080304 0.6293823
## 0.10 5 25 850 0.6075181 0.6297823
## 0.10 5 25 900 0.6067762 0.6304666
## 0.10 5 25 950 0.6071502 0.6302902
## 0.10 5 25 1000 0.6082597 0.6291137
## 0.10 5 30 100 0.6408766 0.5895509
## 0.10 5 30 150 0.6293751 0.6033609
## 0.10 5 30 200 0.6240810 0.6091087
## 0.10 5 30 250 0.6217256 0.6114998
## 0.10 5 30 300 0.6185626 0.6152576
## 0.10 5 30 350 0.6145541 0.6200742
## 0.10 5 30 400 0.6144466 0.6202693
## 0.10 5 30 450 0.6116216 0.6234931
## 0.10 5 30 500 0.6128201 0.6220374
## 0.10 5 30 550 0.6114148 0.6240265
## 0.10 5 30 600 0.6112142 0.6240014
## 0.10 5 30 650 0.6115962 0.6238663
## 0.10 5 30 700 0.6110702 0.6244990
## 0.10 5 30 750 0.6116581 0.6237746
## 0.10 5 30 800 0.6117463 0.6235149
## 0.10 5 30 850 0.6116950 0.6236880
## 0.10 5 30 900 0.6114865 0.6241736
## 0.10 5 30 950 0.6117010 0.6237595
## 0.10 5 30 1000 0.6125512 0.6227799
## 0.10 7 5 100 0.6262420 0.6093304
## 0.10 7 5 150 0.6182682 0.6173333
## 0.10 7 5 200 0.6136272 0.6221631
## 0.10 7 5 250 0.6109071 0.6254793
## 0.10 7 5 300 0.6061111 0.6312902
## 0.10 7 5 350 0.6062405 0.6310440
## 0.10 7 5 400 0.6053110 0.6321257
## 0.10 7 5 450 0.6045063 0.6333260
## 0.10 7 5 500 0.6041421 0.6337317
## 0.10 7 5 550 0.6040617 0.6338812
## 0.10 7 5 600 0.6052688 0.6325720
## 0.10 7 5 650 0.6049925 0.6330408
## 0.10 7 5 700 0.6045071 0.6336739
## 0.10 7 5 750 0.6042976 0.6339652
## 0.10 7 5 800 0.6035438 0.6347557
## 0.10 7 5 850 0.6034854 0.6347509
## 0.10 7 5 900 0.6037319 0.6345133
## 0.10 7 5 950 0.6040333 0.6340861
## 0.10 7 5 1000 0.6043596 0.6337367
## 0.10 7 10 100 0.6277986 0.6069899
## 0.10 7 10 150 0.6170616 0.6192064
## 0.10 7 10 200 0.6167999 0.6189899
## 0.10 7 10 250 0.6103516 0.6268953
## 0.10 7 10 300 0.6075622 0.6301791
## 0.10 7 10 350 0.6035138 0.6352017
## 0.10 7 10 400 0.6039888 0.6346255
## 0.10 7 10 450 0.6022405 0.6368590
## 0.10 7 10 500 0.6026053 0.6364371
## 0.10 7 10 550 0.6019566 0.6371310
## 0.10 7 10 600 0.6015163 0.6379078
## 0.10 7 10 650 0.6006709 0.6385860
## 0.10 7 10 700 0.6003450 0.6388860
## 0.10 7 10 750 0.6002119 0.6389775
## 0.10 7 10 800 0.5997182 0.6396704
## 0.10 7 10 850 0.6001551 0.6392196
## 0.10 7 10 900 0.6003371 0.6389950
## 0.10 7 10 950 0.5999016 0.6394370
## 0.10 7 10 1000 0.5999545 0.6393962
## 0.10 7 15 100 0.6276289 0.6078256
## 0.10 7 15 150 0.6174826 0.6189604
## 0.10 7 15 200 0.6121829 0.6247155
## 0.10 7 15 250 0.6112582 0.6247705
## 0.10 7 15 300 0.6076725 0.6291527
## 0.10 7 15 350 0.6054667 0.6313148
## 0.10 7 15 400 0.6042854 0.6325905
## 0.10 7 15 450 0.6038628 0.6329460
## 0.10 7 15 500 0.6025938 0.6342482
## 0.10 7 15 550 0.6012610 0.6361232
## 0.10 7 15 600 0.6003901 0.6370135
## 0.10 7 15 650 0.6001314 0.6373453
## 0.10 7 15 700 0.5998741 0.6377110
## 0.10 7 15 750 0.5985650 0.6394003
## 0.10 7 15 800 0.5986299 0.6392992
## 0.10 7 15 850 0.5982045 0.6398522
## 0.10 7 15 900 0.5983257 0.6397381
## 0.10 7 15 950 0.5985696 0.6395501
## 0.10 7 15 1000 0.5985932 0.6395018
## 0.10 7 20 100 0.6217736 0.6140236
## 0.10 7 20 150 0.6157479 0.6202946
## 0.10 7 20 200 0.6130532 0.6232256
## 0.10 7 20 250 0.6103952 0.6264260
## 0.10 7 20 300 0.6076924 0.6294671
## 0.10 7 20 350 0.6060521 0.6315639
## 0.10 7 20 400 0.6049995 0.6329461
## 0.10 7 20 450 0.6056798 0.6321931
## 0.10 7 20 500 0.6060043 0.6317105
## 0.10 7 20 550 0.6065249 0.6313271
## 0.10 7 20 600 0.6053050 0.6327807
## 0.10 7 20 650 0.6036283 0.6346530
## 0.10 7 20 700 0.6041338 0.6342001
## 0.10 7 20 750 0.6044271 0.6339972
## 0.10 7 20 800 0.6048367 0.6335351
## 0.10 7 20 850 0.6054355 0.6328133
## 0.10 7 20 900 0.6056787 0.6327232
## 0.10 7 20 950 0.6060187 0.6323059
## 0.10 7 20 1000 0.6065354 0.6317848
## 0.10 7 25 100 0.6238642 0.6111232
## 0.10 7 25 150 0.6144571 0.6217926
## 0.10 7 25 200 0.6058913 0.6319129
## 0.10 7 25 250 0.6048788 0.6326730
## 0.10 7 25 300 0.6021505 0.6356202
## 0.10 7 25 350 0.6012516 0.6365016
## 0.10 7 25 400 0.6005770 0.6373090
## 0.10 7 25 450 0.6000712 0.6379427
## 0.10 7 25 500 0.6005520 0.6374850
## 0.10 7 25 550 0.6005886 0.6376863
## 0.10 7 25 600 0.6013067 0.6369975
## 0.10 7 25 650 0.6002273 0.6383726
## 0.10 7 25 700 0.6008461 0.6376027
## 0.10 7 25 750 0.6015600 0.6369212
## 0.10 7 25 800 0.6021826 0.6362321
## 0.10 7 25 850 0.6022635 0.6362023
## 0.10 7 25 900 0.6026513 0.6357598
## 0.10 7 25 950 0.6029220 0.6354609
## 0.10 7 25 1000 0.6037320 0.6345994
## 0.10 7 30 100 0.6263323 0.6091437
## 0.10 7 30 150 0.6189028 0.6168958
## 0.10 7 30 200 0.6128673 0.6240814
## 0.10 7 30 250 0.6086936 0.6285431
## 0.10 7 30 300 0.6086021 0.6283672
## 0.10 7 30 350 0.6056133 0.6323399
## 0.10 7 30 400 0.6043593 0.6336683
## 0.10 7 30 450 0.6039708 0.6339123
## 0.10 7 30 500 0.6044768 0.6332000
## 0.10 7 30 550 0.6039399 0.6339563
## 0.10 7 30 600 0.6032109 0.6348760
## 0.10 7 30 650 0.6041930 0.6336729
## 0.10 7 30 700 0.6044888 0.6332364
## 0.10 7 30 750 0.6031341 0.6348340
## 0.10 7 30 800 0.6041333 0.6336424
## 0.10 7 30 850 0.6045545 0.6331102
## 0.10 7 30 900 0.6057932 0.6317917
## 0.10 7 30 950 0.6054014 0.6321957
## 0.10 7 30 1000 0.6061369 0.6313562
## MAE
## 0.7007821
## 0.6751240
## 0.6578812
## 0.6450937
## 0.6353415
## 0.6285420
## 0.6228386
## 0.6184055
## 0.6146765
## 0.6113438
## 0.6084680
## 0.6061422
## 0.6039675
## 0.6016577
## 0.5996514
## 0.5977615
## 0.5960306
## 0.5944684
## 0.5925101
## 0.7006754
## 0.6752394
## 0.6580105
## 0.6451243
## 0.6355604
## 0.6281103
## 0.6228025
## 0.6182497
## 0.6143539
## 0.6110263
## 0.6078053
## 0.6052112
## 0.6028523
## 0.6007721
## 0.5988935
## 0.5969982
## 0.5952162
## 0.5935210
## 0.5919265
## 0.7006809
## 0.6756427
## 0.6579288
## 0.6452283
## 0.6356299
## 0.6283799
## 0.6227142
## 0.6184021
## 0.6141875
## 0.6110104
## 0.6079904
## 0.6054818
## 0.6030901
## 0.6007030
## 0.5986625
## 0.5965246
## 0.5945325
## 0.5928298
## 0.5911378
## 0.7009608
## 0.6762410
## 0.6589001
## 0.6455447
## 0.6356373
## 0.6285123
## 0.6229770
## 0.6185307
## 0.6146053
## 0.6110684
## 0.6080384
## 0.6051960
## 0.6025990
## 0.6002663
## 0.5981100
## 0.5961901
## 0.5945662
## 0.5929685
## 0.5912341
## 0.7007359
## 0.6757744
## 0.6578233
## 0.6453289
## 0.6359257
## 0.6284098
## 0.6226800
## 0.6180703
## 0.6143136
## 0.6109312
## 0.6077027
## 0.6049328
## 0.6023747
## 0.5999889
## 0.5981775
## 0.5960838
## 0.5940096
## 0.5923720
## 0.5907366
## 0.7008099
## 0.6757486
## 0.6581980
## 0.6458006
## 0.6358122
## 0.6285698
## 0.6228267
## 0.6181769
## 0.6146356
## 0.6109061
## 0.6078581
## 0.6049671
## 0.6024965
## 0.6002066
## 0.5980779
## 0.5960159
## 0.5941042
## 0.5924404
## 0.5910797
## 0.6511342
## 0.6200587
## 0.6006920
## 0.5874784
## 0.5773231
## 0.5692566
## 0.5620492
## 0.5564035
## 0.5514328
## 0.5470532
## 0.5429064
## 0.5395698
## 0.5364672
## 0.5334609
## 0.5303270
## 0.5276877
## 0.5251515
## 0.5229649
## 0.5210181
## 0.6505710
## 0.6199308
## 0.6003651
## 0.5861419
## 0.5758228
## 0.5680629
## 0.5609122
## 0.5546588
## 0.5492914
## 0.5448470
## 0.5409048
## 0.5369095
## 0.5333427
## 0.5302031
## 0.5274522
## 0.5248274
## 0.5223250
## 0.5200929
## 0.5178060
## 0.6499240
## 0.6193813
## 0.5990033
## 0.5854764
## 0.5749291
## 0.5668960
## 0.5597475
## 0.5535910
## 0.5483360
## 0.5440770
## 0.5396917
## 0.5361301
## 0.5325754
## 0.5291584
## 0.5258328
## 0.5231365
## 0.5205518
## 0.5180853
## 0.5158441
## 0.6504336
## 0.6189602
## 0.5997926
## 0.5858202
## 0.5755845
## 0.5674098
## 0.5605759
## 0.5547274
## 0.5498311
## 0.5446348
## 0.5401851
## 0.5361487
## 0.5326551
## 0.5295155
## 0.5267303
## 0.5238913
## 0.5210514
## 0.5189430
## 0.5168928
## 0.6497852
## 0.6194152
## 0.5991613
## 0.5853804
## 0.5750029
## 0.5665606
## 0.5598171
## 0.5539842
## 0.5482727
## 0.5433899
## 0.5393090
## 0.5355945
## 0.5322732
## 0.5292424
## 0.5261987
## 0.5232456
## 0.5205885
## 0.5183460
## 0.5165224
## 0.6501424
## 0.6200441
## 0.6000188
## 0.5858816
## 0.5756610
## 0.5669622
## 0.5600645
## 0.5541613
## 0.5487747
## 0.5434266
## 0.5391097
## 0.5351755
## 0.5315487
## 0.5282809
## 0.5251693
## 0.5226226
## 0.5202786
## 0.5179524
## 0.5155790
## 0.6278792
## 0.5946983
## 0.5724940
## 0.5574728
## 0.5462361
## 0.5374771
## 0.5304821
## 0.5244526
## 0.5192618
## 0.5146356
## 0.5100622
## 0.5062578
## 0.5030516
## 0.5000032
## 0.4969543
## 0.4941876
## 0.4918162
## 0.4896806
## 0.4875247
## 0.6280321
## 0.5935518
## 0.5716364
## 0.5566812
## 0.5449216
## 0.5357122
## 0.5281883
## 0.5211392
## 0.5161215
## 0.5116689
## 0.5069626
## 0.5033107
## 0.4998300
## 0.4967824
## 0.4939306
## 0.4913549
## 0.4886780
## 0.4865293
## 0.4843474
## 0.6267693
## 0.5930231
## 0.5714023
## 0.5548717
## 0.5441119
## 0.5351637
## 0.5282371
## 0.5219739
## 0.5161208
## 0.5111257
## 0.5066602
## 0.5030406
## 0.4997299
## 0.4965532
## 0.4937846
## 0.4912157
## 0.4889429
## 0.4869083
## 0.4848391
## 0.6273260
## 0.5933186
## 0.5714456
## 0.5554377
## 0.5436313
## 0.5346648
## 0.5268184
## 0.5205770
## 0.5153546
## 0.5104300
## 0.5069445
## 0.5031821
## 0.4998149
## 0.4968212
## 0.4938961
## 0.4910802
## 0.4887220
## 0.4867814
## 0.4850788
## 0.6269718
## 0.5920108
## 0.5699134
## 0.5540857
## 0.5427975
## 0.5334496
## 0.5257199
## 0.5198335
## 0.5147911
## 0.5104275
## 0.5064290
## 0.5026474
## 0.4994872
## 0.4968005
## 0.4939354
## 0.4912606
## 0.4890301
## 0.4871672
## 0.4852815
## 0.6261474
## 0.5921473
## 0.5703398
## 0.5544155
## 0.5425448
## 0.5329593
## 0.5252873
## 0.5191657
## 0.5138030
## 0.5095416
## 0.5052649
## 0.5020684
## 0.4990940
## 0.4956731
## 0.4927509
## 0.4909111
## 0.4887047
## 0.4865207
## 0.4847474
## 0.6140620
## 0.5768174
## 0.5535349
## 0.5372405
## 0.5247158
## 0.5152859
## 0.5082541
## 0.5023949
## 0.4969054
## 0.4923783
## 0.4886114
## 0.4847363
## 0.4816142
## 0.4785763
## 0.4763118
## 0.4741487
## 0.4724132
## 0.4704064
## 0.4689579
## 0.6121570
## 0.5759703
## 0.5522008
## 0.5357335
## 0.5232530
## 0.5136946
## 0.5064080
## 0.5004732
## 0.4950801
## 0.4904315
## 0.4860211
## 0.4824756
## 0.4792107
## 0.4763632
## 0.4737961
## 0.4717443
## 0.4695616
## 0.4675411
## 0.4658464
## 0.6112682
## 0.5735567
## 0.5500300
## 0.5328692
## 0.5215797
## 0.5121435
## 0.5046006
## 0.4986310
## 0.4933990
## 0.4885289
## 0.4847197
## 0.4815464
## 0.4779900
## 0.4755692
## 0.4731894
## 0.4709059
## 0.4693715
## 0.4678811
## 0.4663417
## 0.6110154
## 0.5740677
## 0.5501158
## 0.5342583
## 0.5216883
## 0.5130537
## 0.5058008
## 0.4990776
## 0.4940836
## 0.4896068
## 0.4859758
## 0.4826448
## 0.4799077
## 0.4769419
## 0.4747545
## 0.4725560
## 0.4706887
## 0.4689066
## 0.4670372
## 0.6108166
## 0.5737768
## 0.5497813
## 0.5324759
## 0.5202007
## 0.5110903
## 0.5037869
## 0.4985343
## 0.4931036
## 0.4887821
## 0.4849341
## 0.4814072
## 0.4784088
## 0.4758353
## 0.4736795
## 0.4717383
## 0.4701872
## 0.4684530
## 0.4665694
## 0.6098201
## 0.5732694
## 0.5496382
## 0.5336289
## 0.5213814
## 0.5119128
## 0.5043568
## 0.4983148
## 0.4931367
## 0.4889735
## 0.4852427
## 0.4822412
## 0.4794142
## 0.4768733
## 0.4743795
## 0.4720966
## 0.4707335
## 0.4692628
## 0.4678990
## 0.5934569
## 0.5807314
## 0.5739238
## 0.5670635
## 0.5631199
## 0.5606372
## 0.5567746
## 0.5562418
## 0.5545573
## 0.5543570
## 0.5539294
## 0.5525256
## 0.5523288
## 0.5509347
## 0.5501577
## 0.5490640
## 0.5488749
## 0.5477958
## 0.5479274
## 0.5917678
## 0.5773820
## 0.5688719
## 0.5632094
## 0.5589492
## 0.5566581
## 0.5550811
## 0.5508412
## 0.5502847
## 0.5483732
## 0.5484193
## 0.5488633
## 0.5469835
## 0.5478870
## 0.5462817
## 0.5459653
## 0.5457405
## 0.5455777
## 0.5454593
## 0.5927600
## 0.5793847
## 0.5702447
## 0.5644688
## 0.5587906
## 0.5551099
## 0.5531090
## 0.5495349
## 0.5483905
## 0.5460721
## 0.5449110
## 0.5436219
## 0.5435842
## 0.5428652
## 0.5434649
## 0.5439380
## 0.5439589
## 0.5439868
## 0.5436794
## 0.5919593
## 0.5788222
## 0.5690214
## 0.5633349
## 0.5595815
## 0.5557335
## 0.5519260
## 0.5507475
## 0.5514287
## 0.5508810
## 0.5504703
## 0.5488603
## 0.5483754
## 0.5464865
## 0.5467118
## 0.5468056
## 0.5455474
## 0.5448498
## 0.5446361
## 0.5908564
## 0.5797612
## 0.5717990
## 0.5642073
## 0.5599607
## 0.5562169
## 0.5552651
## 0.5535125
## 0.5520443
## 0.5500161
## 0.5497579
## 0.5480324
## 0.5475106
## 0.5470092
## 0.5468811
## 0.5461615
## 0.5451427
## 0.5457114
## 0.5449234
## 0.5898428
## 0.5788280
## 0.5696161
## 0.5648094
## 0.5609025
## 0.5553386
## 0.5536682
## 0.5523325
## 0.5501419
## 0.5472244
## 0.5453999
## 0.5456109
## 0.5453960
## 0.5441954
## 0.5440461
## 0.5433736
## 0.5444692
## 0.5444746
## 0.5439471
## 0.5263177
## 0.5084848
## 0.4992013
## 0.4928694
## 0.4879786
## 0.4861510
## 0.4808425
## 0.4792991
## 0.4771168
## 0.4769109
## 0.4767241
## 0.4747533
## 0.4739571
## 0.4725489
## 0.4717843
## 0.4715769
## 0.4706111
## 0.4697342
## 0.4700683
## 0.5220245
## 0.5074855
## 0.4993111
## 0.4906700
## 0.4867171
## 0.4836720
## 0.4814693
## 0.4777908
## 0.4763877
## 0.4743859
## 0.4721954
## 0.4717212
## 0.4706552
## 0.4683534
## 0.4685989
## 0.4682356
## 0.4683706
## 0.4699838
## 0.4693411
## 0.5219420
## 0.5069783
## 0.4977402
## 0.4905698
## 0.4857460
## 0.4823711
## 0.4793836
## 0.4789673
## 0.4758686
## 0.4745161
## 0.4732145
## 0.4721160
## 0.4705090
## 0.4701567
## 0.4683533
## 0.4675962
## 0.4670258
## 0.4660963
## 0.4662463
## 0.5187660
## 0.5064495
## 0.4968821
## 0.4912464
## 0.4893934
## 0.4853303
## 0.4815751
## 0.4786057
## 0.4770245
## 0.4748725
## 0.4740371
## 0.4711281
## 0.4703270
## 0.4696412
## 0.4694800
## 0.4690430
## 0.4678911
## 0.4685267
## 0.4678914
## 0.5171976
## 0.5060813
## 0.4985066
## 0.4941564
## 0.4888641
## 0.4863339
## 0.4831467
## 0.4828756
## 0.4806800
## 0.4774393
## 0.4756839
## 0.4731870
## 0.4719289
## 0.4718952
## 0.4711494
## 0.4697196
## 0.4689770
## 0.4682858
## 0.4670247
## 0.5214628
## 0.5049000
## 0.4970503
## 0.4919668
## 0.4890441
## 0.4875609
## 0.4841902
## 0.4830766
## 0.4822909
## 0.4815675
## 0.4800941
## 0.4793980
## 0.4804355
## 0.4788841
## 0.4778532
## 0.4759678
## 0.4754574
## 0.4740169
## 0.4741295
## 0.4909211
## 0.4807206
## 0.4726998
## 0.4691495
## 0.4670994
## 0.4635920
## 0.4626671
## 0.4587682
## 0.4574037
## 0.4576414
## 0.4565695
## 0.4561495
## 0.4555567
## 0.4551129
## 0.4543374
## 0.4541680
## 0.4538653
## 0.4541778
## 0.4539539
## 0.4915928
## 0.4811097
## 0.4706715
## 0.4653031
## 0.4610127
## 0.4586880
## 0.4550179
## 0.4529089
## 0.4517655
## 0.4530837
## 0.4514534
## 0.4504900
## 0.4503745
## 0.4497168
## 0.4484459
## 0.4484197
## 0.4478380
## 0.4470556
## 0.4462460
## 0.4893861
## 0.4780358
## 0.4704088
## 0.4655148
## 0.4630798
## 0.4611776
## 0.4591558
## 0.4587279
## 0.4582120
## 0.4569142
## 0.4581285
## 0.4574688
## 0.4570226
## 0.4578389
## 0.4580890
## 0.4573704
## 0.4570255
## 0.4578893
## 0.4578374
## 0.4929570
## 0.4819255
## 0.4746670
## 0.4702888
## 0.4666076
## 0.4631916
## 0.4627342
## 0.4599586
## 0.4598264
## 0.4586624
## 0.4561316
## 0.4551811
## 0.4532781
## 0.4543141
## 0.4539853
## 0.4529486
## 0.4520824
## 0.4519157
## 0.4515758
## 0.4911302
## 0.4778913
## 0.4725731
## 0.4674705
## 0.4640775
## 0.4618054
## 0.4609293
## 0.4585353
## 0.4575450
## 0.4580367
## 0.4578818
## 0.4573202
## 0.4576157
## 0.4589130
## 0.4582891
## 0.4576639
## 0.4564651
## 0.4571487
## 0.4575168
## 0.4896071
## 0.4788357
## 0.4733189
## 0.4717441
## 0.4684487
## 0.4652201
## 0.4642527
## 0.4622471
## 0.4622860
## 0.4610421
## 0.4611106
## 0.4611912
## 0.4605332
## 0.4609588
## 0.4617284
## 0.4625168
## 0.4614754
## 0.4618168
## 0.4625259
## 0.4748853
## 0.4649034
## 0.4593153
## 0.4580190
## 0.4531275
## 0.4524163
## 0.4520214
## 0.4500376
## 0.4490030
## 0.4487914
## 0.4502542
## 0.4500595
## 0.4490266
## 0.4485823
## 0.4476637
## 0.4472320
## 0.4475648
## 0.4476605
## 0.4475518
## 0.4768957
## 0.4652580
## 0.4640658
## 0.4581750
## 0.4547631
## 0.4525704
## 0.4528383
## 0.4517631
## 0.4519890
## 0.4507430
## 0.4512040
## 0.4506519
## 0.4503101
## 0.4501231
## 0.4496384
## 0.4496564
## 0.4500337
## 0.4496707
## 0.4498376
## 0.4763878
## 0.4675431
## 0.4619140
## 0.4611357
## 0.4571374
## 0.4548786
## 0.4520064
## 0.4515509
## 0.4500464
## 0.4483715
## 0.4475966
## 0.4465949
## 0.4460670
## 0.4443188
## 0.4442310
## 0.4437988
## 0.4440428
## 0.4443141
## 0.4441695
## 0.4727555
## 0.4641316
## 0.4588047
## 0.4565785
## 0.4532471
## 0.4521008
## 0.4509664
## 0.4515647
## 0.4515304
## 0.4519656
## 0.4508549
## 0.4500305
## 0.4499921
## 0.4501206
## 0.4510203
## 0.4517797
## 0.4515823
## 0.4516312
## 0.4521250
## 0.4735324
## 0.4656428
## 0.4611229
## 0.4577979
## 0.4567890
## 0.4550651
## 0.4558976
## 0.4552945
## 0.4553614
## 0.4557519
## 0.4557717
## 0.4538645
## 0.4540928
## 0.4548622
## 0.4551709
## 0.4548216
## 0.4548508
## 0.4549313
## 0.4551356
## 0.4790386
## 0.4702734
## 0.4662476
## 0.4624799
## 0.4629804
## 0.4603218
## 0.4590561
## 0.4585702
## 0.4591198
## 0.4577799
## 0.4571804
## 0.4579234
## 0.4576986
## 0.4570939
## 0.4571729
## 0.4578192
## 0.4590431
## 0.4588929
## 0.4596330
##
## RMSE was used to select the optimal model using the smallest value.
## The final values used for the model were n.trees = 850, interaction.depth =
## 7, shrinkage = 0.1 and n.minobsinnode = 15.
Below are the parameters for the final model:
## n.trees interaction.depth shrinkage n.minobsinnode
## 852 850 7 0.1 15
Next, we try predicting the test set with the gradient boosted model
We compare the predictions with the actual values:
## RMSE Rsquared MAE
## 0.6580191 0.5829137 0.4868002
Below are the performance metrics on the test set for the various models
svm_metrics <- as.data.frame(postResample(pred = svm_predictions, obs = test_y))
ridge_Metrics <- as.data.frame(postResample(pred = ridge_predictions, obs = test_y))
lasso_Metrics <- as.data.frame(postResample(pred = lasso_predictions, obs = test_y))
nnet_Metrics <- as.data.frame(postResample(pred = nnet_predictions, obs = test_y))
rf_Metrics <- as.data.frame(postResample(pred = rf_predictions, obs = test_y))
gbm_metrics <- as.data.frame(postResample(pred = gbm_predictions, obs = test_y))
model_performance <- cbind(svm_metrics, ridge_Metrics, lasso_Metrics, nnet_Metrics,
rf_Metrics, gbm_metrics)
colnames(model_performance) <- c('SVM', 'Ridge', 'Lasso', 'NNet', 'RF', 'GBM')
model_performance <- as.data.frame(t(model_performance))
## RMSE Rsquared MAE
## SVM 0.7103096 0.5159691 0.5121865
## Ridge 0.8109992 0.3650728 0.6248076
## Lasso 0.8087921 0.3692186 0.6211638
## NNet 0.7511327 0.4713855 0.5603725
## RF 0.6074740 0.6523460 0.4370254
## GBM 0.6580191 0.5829137 0.4868002
With the lowest RMSE and MAE, and the highest RSquare = 0.6523460, among the models we fit, we selected Random Forest for the PH prediction of the unseen Student Evaluation data.
In general, the better performance of Random Forest and Gradient Boosting Machine models suggests that the underlying data contains non-linear relationships and complex feature interactions that linear models like Ridge and Lasso cannot adequately model.
# Dummy variable transformation
stu_eval_transformed <- predict(dummy_variables, newdata = stu_eval_raw)
stu_eval_transformed <- as.data.frame(stu_eval_transformed)
# Add the missing 'PH' column as zeros to match training data
stu_eval_transformed$PH <- 0
# Apply pre-processing (centering, scaling, imputing)
stu_eval_transformed_imputed <- predict(preProcess_model, newdata = stu_eval_transformed)
# Remove near-zero variance columns
nzv_cols <- nearZeroVar(stu_eval_transformed_imputed)
stu_eval_transformed_imputed <- stu_eval_transformed_imputed[, -nzv_cols]
# Use the trained random forest model to predict PH
predictions <- predict(rf_model, newdata = stu_eval_transformed_imputed)
# Add predictions to the original data
stu_eval_raw$Predicted_PH <- predictions
# Reverse centering and scaling
ph_center <- preProcess_model$mean["PH"]
ph_scale <- preProcess_model$std["PH"]
# Transform predictions back to the original scale
stu_eval_raw$Predicted_PH <- (stu_eval_raw$Predicted_PH * ph_scale) + ph_center
# View predicted PH
head(stu_eval_raw["Predicted_PH"])
## Predicted_PH
## 1 8.583530
## 2 8.442573
## 3 8.534915
## 4 8.518523
## 5 8.483863
## 6 8.534293