1.5 로지스틱 회귀분석
- 목표 변수가 이분형이거나 범주형인 경우, 로지스틱 회귀분석 수행
- 독립 변수가 하나이고 X이며, 목표 변수 Y가 이분형으로 값을 0 또는 1을 가지는 경우의 승산비
1.5.1 데이터 준비
autoparts <- read.csv("autoparts.csv", header = TRUE)
autoparts1 <- autoparts[autoparts$prod_no == "90784-76001", c(2:11)]
autoparts2 <- autoparts1[autoparts1$c_thickness < 1000, ]
autoparts2$y_faulty <- ifelse((autoparts2$c_thickness < 20) | (autoparts2$c_thickness > 32), 1, 0)
autoparts2$y_faulty <- as.factor(autoparts2$y_faulty)1.5.2 빈도표 작성
table(autoparts2$y_faulty) # 20과 32 사이에 있는 것이 1, 따라서 전체의 13%가 불량##
## 0 1
## 18925 2842
1.5.3 로지스틱 회귀모형 생성
- family = binomial(logit)을 통해 로지스틱 옵션
m <- glm(y_faulty ~ fix_time + a_speed + b_speed + separation + s_separation +
rate_terms + mpa + load_time + highpressure_time, data = autoparts2,
family = binomial(logit))
summary(m)##
## Call:
## glm(formula = y_faulty ~ fix_time + a_speed + b_speed + separation +
## s_separation + rate_terms + mpa + load_time + highpressure_time,
## family = binomial(logit), data = autoparts2)
##
## Deviance Residuals:
## Min 1Q Median 3Q Max
## -5.3641 -0.3738 -0.2150 -0.1184 5.1771
##
## Coefficients:
## Estimate Std. Error z value Pr(>|z|)
## (Intercept) -4.511e+02 9.981e+00 -45.197 < 2e-16 ***
## fix_time -3.034e-02 9.623e-03 -3.153 0.001617 **
## a_speed 1.965e+01 9.472e-01 20.743 < 2e-16 ***
## b_speed -1.854e+00 3.970e-01 -4.670 3.01e-06 ***
## separation 5.322e-01 1.121e-02 47.476 < 2e-16 ***
## s_separation 4.957e-01 1.094e-02 45.320 < 2e-16 ***
## rate_terms -2.332e-02 6.817e-03 -3.422 0.000623 ***
## mpa -1.416e-01 3.367e-03 -42.043 < 2e-16 ***
## load_time 1.835e-03 1.508e-02 0.122 0.903124
## highpressure_time 1.787e-04 1.863e-05 9.595 < 2e-16 ***
## ---
## Signif. codes: 0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
##
## (Dispersion parameter for binomial family taken to be 1)
##
## Null deviance: 16867.6 on 21766 degrees of freedom
## Residual deviance: 9993.8 on 21757 degrees of freedom
## AIC: 10014
##
## Number of Fisher Scoring iterations: 6
1.5.4 데이터셋 나누기
t_index <- sample(1:nrow(autoparts2), size = nrow(autoparts2) * 0.7)
train <- autoparts2[t_index, ] # 훈련데이터 (70%) index라는 것은 행 번호를
test <- autoparts2[-t_index, ] # 검증데이터 (30%)
nrow(train); nrow(test)## [1] 15236
## [1] 6531
head(train)## fix_time a_speed b_speed separation s_separation rate_terms mpa
## 23613 80.7 0.666 1.692 188.9 713.8 84 76.3
## 7863 85.5 0.605 1.666 254.0 650.1 81 77.6
## 21870 81.2 0.645 1.678 185.2 713.1 85 75.9
## 8874 85.8 0.602 1.651 243.4 651.7 81 80.9
## 2797 111.6 0.474 1.597 270.9 633.4 82 29.5
## 15501 81.8 0.651 1.622 186.5 713.4 87 75.4
## load_time highpressure_time c_thickness y_faulty
## 23613 19.2 73 20.7 0
## 7863 18.0 60 20.3 0
## 21870 19.2 68 25.1 0
## 8874 18.1 55 29.3 0
## 2797 18.8 69 31.5 0
## 15501 19.2 68 23.7 0
1.5.4 모델 만들기 - train set 만 이용
m <- glm(y_faulty ~ fix_time + a_speed + b_speed + separation + s_separation + rate_terms + mpa + load_time + highpressure_time, data = train, family = binomial(logit)) - 훈련데이터에 모델을 적합시킨(훈련데이터를 예측한) 결과를 확인해보면 다음과 같다.
head(m$fitted.values)## 23613 7863 21870 8874 2797 15501
## 0.115132593 0.426804315 0.007933978 0.002858798 0.983451135 0.023489965
1.5.6 기준값 설정
- 확률이 50%가 넘는 것만을 선택하도록 해준다.
- 값이 0.5가 넘으면 1(불량)을, 0.5보다 작으면 0(정상)을 가지도록 한다.
yhat <- ifelse(m$fitted.values >= 0.5, 1, 0)
head(yhat)## 23613 7863 21870 8874 2797 15501
## 0 0 0 0 1 0
1.5.7 빈도표 작성
table <- table(real = train$y_faulty, predict = yhat)
table## predict
## real 0 1
## 0 13043 222
## 1 1046 925
1.5.8 예측값 구하기
yhat_test <- predict(m, test, type = "response") # 원래는 이름을 입력해야하지만, test와 train의 변수명이 같으므로
head(yhat_test, n = 20) ## 7 11 12 13 19 20
## 0.03464147 0.39924532 0.06846813 0.23836566 0.01789040 0.03775477
## 21 27 38 39 44 48
## 0.72248631 0.07586966 0.10594576 0.07678122 0.31616370 0.03322495
## 51 52 57 60 66 70
## 0.19023789 0.15502251 0.03292441 0.32996299 0.06214658 0.17382086
## 71 72
## 0.01284900 0.10222956
1.5.9 ROC, AUC
library(Epi)
ROC(test = yhat_test, stat = test$y_faulty, plot = "ROC", AUC = T, main = "Logistics Regression")1.5.10 데이터 예측(1) - 1개의 데이터만 예측
new.data <- data.frame(fix_time = 87, a_speed = 0.609, b_speed = 1.715,
separation = 242.7, s_separation = 657.5, rate_terms = 95,
mpa = 78, load_time = 18.1, highpressure_time = 82)
possiblityOf1 <- predict(m, newdata = new.data, type = "response")
ifelse(possiblityOf1 >= 0.5, 1, 0)## 1
## 0
1.5.11 데이터예측(2) - 복수의 데이터 예측
new.data <- data.frame(fix_time = c(87, 85.7) , a_speed = c(0.609, 0472),
b_speed = c(1.715, 1.685), separation = c(242.7, 243.4),
s_separation = c(657.5, 657.9), rate_terms = c(95, 95),
mpa = c(78, 28.2), load_time = c(18.1,18.2), highpressure_time = c(82,60))
possiblityOf1 <- predict(m, newdata = new.data, type = "response")
ifelse(possiblityOf1 >= 0.5, 1, 0)## 1 2
## 0 1
1.5.12 데이터예측(3) - 데이터프레
new.data <- data.frame(fix_time = test$fix_time, a_speed = test$a_speed, b_speed = test$b_speed,
separation = test$separation, s_separation = test$s_separation,
rate_terms = test$rate_terms, mpa = test$mpa, load_time = test$load_time,
highpressure_time = test$highpressure_time)
possiblityOf1 <- predict(m, newdata = new.data, type = "response")
head(ifelse(possiblityOf1 >= 0.5, 1, 0))## 1 2 3 4 5 6
## 0 0 0 0 0 0
1.6 다항 로지스틱 회귀분석
- 이항 로지스틱 회귀모형에서는 값이 0, 1로만 나왔다.
- 그런데 결과 즉 종속변수가 둘 이상일 때는 분포가정을 이항분포가 아니라 다항분포로 가정해야한다.
- 이때 사용되는 모형을 다항 로지스틱 회귀모형이라고 한다. 정
1.6.1 데이터 준비
autoparts2$g_class <- as.factor(ifelse(autoparts2$c_thickness < 20, 1,
ifelse(autoparts2$c_thickness < 32, 2, 3)))
table(autoparts2$g_class)##
## 1 2 3
## 2141 18914 712
1.6.2 데이터셋 나누기
t_index <- sample(1:nrow(autoparts2), size = nrow(autoparts2) * 0.7)
train <- autoparts2[t_index, ] # 훈련데이터 (70%) index라는 것은 행 번호를
test <- autoparts2[-t_index, ] # 검증데이터 (30%)1.6.3 모델만들기 - train set만 이용
library(nnet)## Warning: package 'nnet' was built under R version 3.5.1
m <- multinom(g_class ~ fix_time + a_speed + b_speed + separation + s_separation + rate_terms + mpa + load_time + highpressure_time, data = train)## # weights: 33 (20 variable)
## initial value 16738.456830
## iter 10 value 5171.522255
## iter 20 value 4835.268453
## iter 30 value 4090.394789
## iter 40 value 3270.683830
## iter 50 value 3210.969667
## iter 60 value 3140.265492
## iter 70 value 2613.339546
## iter 80 value 1909.685586
## iter 90 value 1880.440998
## iter 100 value 1878.428702
## final value 1878.428702
## stopped after 100 iterations
summary(m)## Call:
## multinom(formula = g_class ~ fix_time + a_speed + b_speed + separation +
## s_separation + rate_terms + mpa + load_time + highpressure_time,
## data = train)
##
## Coefficients:
## (Intercept) fix_time a_speed b_speed separation s_separation
## 2 1990.403 0.1420046 -19.50741 4.222045 -2.137699 -2.150391
## 3 2098.059 0.1386844 -31.87339 4.678588 -2.237457 -2.258777
## rate_terms mpa load_time highpressure_time
## 2 0.09981973 -0.7561061 -0.3381408 0.0003838150
## 3 0.07868671 -0.8793112 -0.3618599 0.0004315865
##
## Std. Errors:
## (Intercept) fix_time a_speed b_speed separation
## 2 1.373478e-05 0.02932274 0.0024781562 7.607909e-04 0.004697192
## 3 2.185355e-05 0.03102138 0.0002687239 7.182199e-05 0.005226313
## s_separation rate_terms mpa load_time highpressure_time
## 2 0.003599761 0.01493239 0.01508395 0.09279170 0.002376522
## 3 0.004050097 0.02264706 0.01606554 0.09403988 0.002376670
##
## Residual Deviance: 3756.857
## AIC: 3796.857
1.6.4 적합 결과 확인
head(m$fitted.values)## 1 2 3
## 7848 2.245185e-03 0.9964322 1.322643e-03
## 15999 1.093702e-05 0.9993257 6.633417e-04
## 14807 8.471122e-01 0.1528219 6.590417e-05
## 7310 3.214153e-05 0.9989796 9.882730e-04
## 14644 2.176470e-01 0.7820089 3.440587e-04
## 8243 6.584333e-15 0.3824184 6.175816e-01
1.6.5 검증데이터로 예측값 및 정확도 구하기
yhat_test <- predict(m, test)
table <- table(real = test$g_class, predict = yhat_test)
table## predict
## real 1 2 3
## 1 477 120 3
## 2 79 5566 72
## 3 2 57 155
(table[1, 1] + table[2, 2] + table[3, 3])/ sum(table)## [1] 0.9490124
연습문제 1-1
로지스틱 모형으로 예측값 구하기기 - 우선 LASSO 를 이용하여 최적의 변수 선택하기
library("glmnet")
yvec <- occu.train$Occupancy
xmat <- as.matrix(occu.train[2:5])
fit.lasso=glmnet(x = xmat, y = yvec, alpha = 1, nlambda = 100) # 람다 100개 생성
fit.lasso.cv = cv.glmnet(x = xmat, y = yvec, nfolds = 4, alpha = 1, lambda = fit.lasso$lambda)
plot(fit.lasso.cv)
fit.lasso.param=fit.lasso.cv$lambda.min # 최적의 람다를 다른 이름으로 저장
fit.lasso.tune=glmnet(x=xmat,y=yvec,alpha=1,lambda = fit.lasso.param) # 최적 람다를 이용한 최종 LASSO 모델
coef(fit.lasso.tune) # 생성 모델의 설명변수 계수 출력 / 계수가 클수록 영향 큼 - train 데이터를 이용하여 로지스틱 모형을 적합하시오.
occu.train <- read.csv("occupancy_train.csv")
m <- glm(Occupancy ~ Light + CO2, data = occu.train,
family = binomial(logit))
summary(m)##
## Call:
## glm(formula = Occupancy ~ Light + CO2, family = binomial(logit),
## data = occu.train)
##
## Deviance Residuals:
## Min 1Q Median 3Q Max
## -6.4938 -0.0371 -0.0343 -0.0334 2.6900
##
## Coefficients:
## Estimate Std. Error z value Pr(>|z|)
## (Intercept) -9.2601315 0.3540292 -26.16 <2e-16 ***
## Light 0.0184191 0.0007278 25.31 <2e-16 ***
## CO2 0.0040918 0.0003222 12.70 <2e-16 ***
## ---
## Signif. codes: 0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
##
## (Dispersion parameter for binomial family taken to be 1)
##
## Null deviance: 8420.3 on 8142 degrees of freedom
## Residual deviance: 1093.7 on 8140 degrees of freedom
## AIC: 1099.7
##
## Number of Fisher Scoring iterations: 9
- test 데이터의 예측값 및 정확도
head(m$fitted.values)## 1 2 3 4 5 6
## 0.8231171 0.8281271 0.8184526 0.8152388 0.8129164 0.7901480
yhat <- ifelse(m$fitted.values >= 0.5, 1, 0)
head(yhat)## 1 2 3 4 5 6
## 1 1 1 1 1 1
#빈도표
table <- table(real = occu.train$Occupancy, predict = yhat)
table## predict
## real 0 1
## 0 6325 89
## 1 6 1723
(table[1,1] + table[2,2]) / sum(table) # 정확도## [1] 0.9883335
#예측값 구하기
test <- read.csv("Occupancy_test.csv")
yhat_test <- predict(m, test, type = "response")
head(yhat_test, n = 20) ## 1 2 3 4 5 6 7
## 0.9898922 0.9890659 0.9883091 0.9527731 0.9491625 0.9884186 0.9798441
## 8 9 10 11 12 13 14
## 0.9669571 0.9423446 0.9690478 0.9500028 0.9519298 0.9477434 0.9446614
## 15 16 17 18 19 20
## 0.9412643 0.9431798 0.9382741 0.9397963 0.9452468 0.9512463
- ROC, AUC
library(Epi)
ROC(test = yhat_test, stat = test$Occupancy, plot = "ROC", AUC = T, main = "Logistics Regression")연습문제1-2
SVM 모형으로 예측값 구하기
- train 데이터를 이용하여 SVM을 적합
m <- svm(factor(Occupancy) ~ Light + CO2, data = occu.train, cost = 10) - test 데이터의 예측값과 정확도를 구하시오
test <- read.csv("Occupancy_test.csv")
yhat_test <- predict(m, test) # 예측값
table <- table(real = as.factor(test$Occupancy), predict = yhat_test)
table## predict
## real -7.51088313122295 -7.51047395032725 -7.50781427450523
## 0 1 1 1
## 1 0 0 0
## predict
## real -7.50392705599613 -7.50338148146853 -7.50269951330904
## 0 1 1 1
## 1 0 0 0
## predict
## real -7.50229033241334 -7.5016765610698 -7.50147197062195
## 0 1 1 1
## 1 0 0 0
## predict
## real -7.50065360883056 -7.49928967251158 -7.49758475211285 -7.497380161665
## 0 3 1 1 1
## 1 0 0 0 0
## predict
## real -7.49656179987361 -7.49587983171411 -7.49574343808222
## 0 3 1 1
## 1 0 0 0
## predict
## real -7.49553884763437 -7.49519786355463 -7.49492507629083
## 0 1 1 2
## 1 0 0 0
## predict
## real -7.49451589539513 -7.49383392723564 -7.49349294315589
## 0 4 1 5
## 1 0 0 0
## predict
## real -7.49328835270805 -7.49246999091666 -7.49144703867742
## 0 1 4 3
## 1 0 0 0
## predict
## real -7.49110605459767 -7.49083326733387 -7.49042408643818
## 0 1 1 2
## 1 0 0 0
## predict
## real -7.49001490554248 -7.48974211827868 -7.48940113419894
## 0 2 1 3
## 1 0 0 0
## predict
## real -7.4883781819597 -7.48735522972046 -7.48701424564072
## 0 5 3 4
## 1 0 0 0
## predict
## real -7.48633227748122 -7.48592309658553 -7.48565030932173
## 0 6 1 3
## 1 0 0 0
## predict
## real -7.48530932524199 -7.48510473479414 -7.48428637300275
## 0 3 4 8
## 1 0 0 0
## predict
## real -7.48346801121136 -7.48326342076351 -7.48292243668376
## 0 2 7 3
## 1 0 0 0
## predict
## real -7.48264964941997 -7.48224046852427 -7.48183128762858
## 0 1 6 1
## 1 0 0 0
## predict
## real -7.48155850036478 -7.48121751628503 -7.48101292583719
## 0 4 4 1
## 1 0 0 0
## predict
## real -7.48019456404579 -7.4793762022544 -7.47917161180656
## 0 10 3 5
## 1 0 0 0
## predict
## real -7.47883062772681 -7.47855784046301 -7.47814865956732
## 0 2 3 3
## 1 0 0 0
## predict
## real -7.47773947867162 -7.47746669140782 -7.47712570732808
## 0 2 3 7
## 1 0 0 0
## predict
## real -7.47692111688023 -7.47610275508884 -7.4750798028496
## 0 3 11 7
## 1 0 0 0
## predict
## real -7.47473881876986 -7.47446603150606 -7.47405685061036
## 0 2 1 9
## 1 0 0 0
## predict
## real -7.47364766971467 -7.47337488245087 -7.47303389837113
## 0 1 4 2
## 1 0 0 0
## predict
## real -7.47282930792328 -7.47201094613189 -7.47098799389265
## 0 1 6 4
## 1 0 0 0
## predict
## real -7.4706470098129 -7.46996504165341 -7.46955586075771
## 0 4 6 1
## 1 0 0 0
## predict
## real -7.46928307349392 -7.46894208941417 -7.46791913717493
## 0 3 3 9
## 1 0 0 0
## predict
## real -7.46710077538354 -7.4668961849357 -7.46655520085595
## 0 2 6 2
## 1 0 0 0
## predict
## real -7.46587323269646 -7.46519126453696 -7.46485028045722
## 0 5 2 2
## 1 0 0 0
## predict
## real -7.46382732821798 -7.46300896642659 -7.46280437597874 -7.462463391899
## 0 7 1 3 3
## 1 0 0 0 0
## predict
## real -7.4617814237395 -7.46137224284381 -7.46109945558001
## 0 3 1 3
## 1 0 0 0
## predict
## real -7.46075847150026 -7.46055388105242 -7.45973551926103
## 0 5 1 1
## 1 0 0 0
## predict
## real -7.45891715746964 -7.45871256702179 -7.45768961478255
## 0 1 2 3
## 1 0 0 0
## predict
## real -7.45728043388685 -7.45700764662306 -7.45666666254331
## 0 1 3 2
## 1 0 0 0
## predict
## real -7.45564371030407 -7.45496174214458 -7.45482534851268
## 0 1 1 2
## 1 0 0 0
## predict
## real -7.45462075806483 -7.45427977398509 -7.4535978058256 -7.4529158376661
## 0 3 3 5 2
## 1 0 0 0 0
## predict
## real -7.45257485358636 -7.45155190134712 -7.45052894910788
## 0 1 3 1
## 1 0 0 0
## predict
## real -7.45018796502814 -7.44950599686864 -7.4484830446294
## 0 1 2 3
## 1 0 0 0
## predict
## real -7.44746009239017 -7.44664173059877 -7.44643714015093
## 0 6 1 2
## 1 0 0 0
## predict
## real -7.44609615607118 -7.44582336880738 -7.44541418791169
## 0 2 1 3
## 1 0 0 0
## predict
## real -7.44473221975219 -7.44439123567245 -7.44234533119397
## 0 2 2 1
## 1 0 0 0
## predict
## real -7.44200434711423 -7.44173155985043 -7.44132237895473
## 0 2 1 2
## 1 0 0 0
## predict
## real -7.4402994267155 -7.43927647447626 -7.43825352223702
## 0 2 5 2
## 1 0 0 0
## predict
## real -7.43791253815728 -7.43723056999778 -7.43682138910209
## 0 2 1 1
## 1 0 0 0
## predict
## real -7.43654860183829 -7.43620761775854 -7.4351846655193
## 0 2 4 5
## 1 0 0 0
## predict
## real -7.43416171328007 -7.43382072920032 -7.43313876104083
## 0 3 3 4
## 1 0 0 0
## predict
## real -7.43272958014513 -7.43245679288133 -7.43211580880159
## 0 1 1 2
## 1 0 0 0
## predict
## real -7.43109285656235 -7.42972892024337 -7.42863777118818
## 0 5 1 1
## 1 0 0 0
## predict
## real -7.42836498392438 -7.42802399984464 -7.42781940939679
## 0 2 2 1
## 1 0 0 0
## predict
## real -7.4270010476054 -7.42618268581401 -7.42597809536616
## 0 3 1 1
## 1 0 0 0
## predict
## real -7.42536432402262 -7.42495514312692 -7.42393219088768
## 0 1 3 2
## 1 0 0 0
## predict
## real -7.42290923864844 -7.4218862864092 -7.42154530232946
## 0 6 3 3
## 1 0 0 0
## predict
## real -7.42127251506566 -7.42086333416997 -7.42045415327427
## 0 1 1 2
## 1 0 0 0
## predict
## real -7.42018136601047 -7.41984038193073 -7.41963579148288
## 0 1 4 1
## 1 0 0 0
## predict
## real -7.41881742969149 -7.41779447745225 -7.41745349337251
## 0 4 3 1
## 1 0 0 0
## predict
## real -7.41677152521301 -7.41636234431732 -7.41608955705352
## 0 2 1 3
## 1 0 0 0
## predict
## real -7.41472562073454 -7.41390725894315 -7.4137026684953
## 0 9 1 3
## 1 0 0 0
## predict
## real -7.41336168441555 -7.41267971625606 -7.41227053536036
## 0 3 5 1
## 1 0 0 0
## predict
## real -7.41199774809657 -7.41165676401682 -7.41063381177758
## 0 2 1 5
## 1 0 0 0
## predict
## real -7.40961085953834 -7.4092698754586 -7.40858790729911
## 0 1 1 2
## 1 0 0 0
## predict
## real -7.40790593913961 -7.40756495505987 -7.40654200282063
## 0 2 1 4
## 1 0 0 0
## predict
## real -7.40572364102924 -7.40551905058139 -7.40517806650165
## 0 1 1 1
## 1 0 0 0
## predict
## real -7.40449609834215 -7.40408691744646 -7.40381413018266
## 0 4 1 2
## 1 0 0 0
## predict
## real -7.40347314610291 -7.40245019386368 -7.40142724162444
## 0 2 7 1
## 1 0 0 0
## predict
## real -7.4004042893852 -7.3999951084895 -7.3997223212257 -7.39835838490672
## 0 2 1 2 3
## 1 0 0 0 0
## predict
## real -7.39733543266748 -7.39631248042824 -7.39563051226875
## 0 5 1 1
## 1 0 0 0
## predict
## real -7.39528952818901 -7.39508493774116 -7.39426657594977
## 0 1 1 3
## 1 0 0 0
## predict
## real -7.39344821415838 -7.39324362371053 -7.39222067147129
## 0 1 2 1
## 1 0 0 0
## predict
## real -7.3915387033118 -7.39017476699281 -7.38915181475358
## 0 1 1 1
## 1 0 0 0
## predict
## real -7.38853804341003 -7.38812886251434 -7.38608295803586
## 0 1 3 1
## 1 0 0 0
## predict
## real -7.38471902171688 -7.38403705355738 -7.38301410131815
## 0 1 2 1
## 1 0 0 0
## predict
## real -7.38199114907891 -7.38096819683967 -7.38062721275992
## 0 2 3 2
## 1 0 0 0
## predict
## real -7.38035442549613 -7.37994524460043 -7.37953606370473
## 0 1 3 1
## 1 0 0 0
## predict
## real -7.37926327644094 -7.37892229236119 -7.37789934012195
## 0 1 2 2
## 1 0 0 0
## predict
## real -7.37687638788271 -7.37653540380297 -7.37585343564348
## 0 3 2 3
## 1 0 0 0
## predict
## real -7.37544425474778 -7.37483048340424 -7.373807531165 -7.37244359484602
## 0 1 2 3 2
## 1 0 0 0 0
## predict
## real -7.37217080758222 -7.37176162668652 -7.37107965852703
## 0 3 4 1
## 1 0 0 0
## predict
## real -7.37073867444728 -7.36971572220805 -7.36889736041666
## 0 1 2 2
## 1 0 0 0
## predict
## real -7.36835178588906 -7.36807899862526 -7.36766981772957
## 0 1 1 2
## 1 0 0 0
## predict
## real -7.36726063683387 -7.36698784957008 -7.36562391325109
## 0 1 3 5
## 1 0 0 0
## predict
## real -7.3648055514597 -7.36460096101185 -7.36425997693211
## 0 1 5 2
## 1 0 0 0
## predict
## real -7.36357800877262 -7.36255505653338 -7.36235046608553
## 0 7 3 1
## 1 0 0 0
## predict
## real -7.36153210429414 -7.3605091520549 -7.36016816797516
## 0 4 3 3
## 1 0 0 0
## predict
## real -7.35948619981566 -7.35907701891997 -7.35880423165617
## 0 5 2 1
## 1 0 0 0
## predict
## real -7.35846324757642 -7.35825865712858 -7.35744029533719
## 0 4 2 5
## 1 0 0 0
## predict
## real -7.35641734309795 -7.3560763590182 -7.35539439085871
## 0 2 2 3
## 1 0 0 0
## predict
## real -7.35471242269921 -7.35437143861947 -7.35334848638023
## 0 2 5 4
## 1 0 0 0
## predict
## real -7.35253012458884 -7.35232553414099 -7.35198455006125
## 0 1 3 1
## 1 0 0 0
## predict
## real -7.35171176279745 -7.35130258190175 -7.35089340100606
## 0 1 2 1
## 1 0 0 0
## predict
## real -7.35062061374226 -7.35027962966252 -7.35007503921467
## 0 1 7 2
## 1 0 0 0
## predict
## real -7.34925667742328 -7.34843831563189 -7.34823372518404
## 0 4 1 2
## 1 0 0 0
## predict
## real -7.3478927411043 -7.3476199538405 -7.3472107729448 -7.34680159204911
## 0 2 3 2 1
## 1 0 0 0 0
## predict
## real -7.34652880478531 -7.34618782070556 -7.34598323025771
## 0 7 2 1
## 1 0 0 0
## predict
## real -7.34516486846632 -7.34434650667493 -7.34414191622709
## 0 7 1 4
## 1 0 0 0
## predict
## real -7.34380093214734 -7.34311896398785 -7.34270978309215
## 0 2 2 1
## 1 0 0 0
## predict
## real -7.34243699582835 -7.34209601174861 -7.34189142130076
## 0 2 6 1
## 1 0 0 0
## predict
## real -7.34107305950937 -7.34005010727013 -7.33943633592659
## 0 6 4 1
## 1 0 0 0
## predict
## real -7.3386179741352 -7.3383451868714 -7.33800420279166 -7.33779961234381
## 0 1 2 4 1
## 1 0 0 0 0
## predict
## real -7.33766321871191 -7.33698125055242 -7.33595829831318
## 0 1 3 1
## 1 0 0 0
## predict
## real -7.33452616517824 -7.33425337791445 -7.3339123938347
## 0 2 1 2
## 1 0 0 0
## predict
## real -7.33370780338685 -7.33288944159546 -7.33186648935622
## 0 1 4 1
## 1 0 0 0
## predict
## real -7.33152550527648 -7.33125271801268 -7.33084353711699
## 0 1 1 5
## 1 0 0 0
## predict
## real -7.33043435622129 -7.33016156895749 -7.32982058487775
## 0 1 1 1
## 1 0 0 0
## predict
## real -7.32879763263851 -7.32777468039927 -7.32743369631953
## 0 3 1 1
## 1 0 0 0
## predict
## real -7.32675172816003 -7.32606976000054 -7.32572877592079
## 0 2 1 2
## 1 0 0 0
## predict
## real -7.32470582368156 -7.32388746189017 -7.32368287144232
## 0 4 1 1
## 1 0 0 0
## predict
## real -7.32265991920308 -7.3206140147246 -7.31959106248536
## 0 3 1 1
## 1 0 0 0
## predict
## real -7.31925007840562 -7.31754515800689 -7.31652220576765
## 0 1 3 2
## 1 0 0 0
## predict
## real -7.31549925352841 -7.31515826944867 -7.31488548218487
## 0 2 2 1
## 1 0 0 0
## predict
## real -7.31406712039348 -7.31379433312968 -7.31345334904993
## 0 1 4 2
## 1 0 0 0
## predict
## real -7.31324875860209 -7.3124303968107 -7.31038449233222
## 0 1 1 2
## 1 0 0 0
## predict
## real -7.30936154009298 -7.30892313199045 -7.3073156356145
## 0 1 1 2
## 1 0 0 0
## predict
## real -7.30697465153476 -7.30629268337526 -7.30561071521577
## 0 3 1 1
## 1 0 0 0
## predict
## real -7.30424677889679 -7.30288284257781 -7.30220087441831
## 0 1 1 4
## 1 0 0 0
## predict
## real -7.30151890625882 -7.30117792217907 -7.30015496993983
## 0 1 1 1
## 1 0 0 0
## predict
## real -7.29947300178034 -7.2991320177006 -7.29879103362085
## 0 1 1 1
## 1 0 0 0
## predict
## real -7.29810906546136 -7.29769988456566 -7.29742709730186
## 0 1 1 1
## 1 0 0 0
## predict
## real -7.29708611322212 -7.29606316098288 -7.29524479919149
## 0 1 2 1
## 1 0 0 0
## predict
## real -7.29504020874364 -7.2946992246639 -7.29360807560871
## 0 4 1 1
## 1 0 0 0
## predict
## real -7.29333528834491 -7.29299430426517 -7.29197135202593
## 0 1 1 4
## 1 0 0 0
## predict
## real -7.29094839978669 -7.28992544754745 -7.28924347938796
## 0 1 4 1
## 1 0 0 0
## predict
## real -7.28890249530821 -7.28787954306897 -7.28685659082973
## 0 1 2 1
## 1 0 0 0
## predict
## real -7.28651560674999 -7.28481068635126 -7.28460609590341
## 0 1 1 1
## 1 0 0 0
## predict
## real -7.28378773411202 -7.28242379779304 -7.28174182963354
## 0 4 2 2
## 1 0 0 0
## predict
## real -7.28133264873785 -7.27969592515507 -7.27662706843735
## 0 1 2 1
## 1 0 0 0
## predict
## real -7.2764224779895 -7.27628608435761 -7.27560411619811
## 0 1 1 3
## 1 0 0 0
## predict
## real -7.27478575440672 -7.27458116395887 -7.27287624356014
## 0 2 1 3
## 1 0 0 0
## predict
## real -7.2725352594804 -7.27151230724116 -7.27014837092218
## 0 1 1 1
## 1 0 0 0
## predict
## real -7.26946640276268 -7.26844345052344 -7.26742049828421
## 0 2 1 1
## 1 0 0 0
## predict
## real -7.26435164156649 -7.26332868932725 -7.26230573708801
## 0 1 3 1
## 1 0 0 0
## predict
## real -7.25821392813106 -7.25787294405132 -7.25719097589182
## 0 2 1 1
## 1 0 0 0
## predict
## real -7.25514507141334 -7.25432670962195 -7.25412211917411
## 0 3 1 1
## 1 0 0 0
## predict
## real -7.25309916693487 -7.25207621469563 -7.25105326245639
## 0 1 1 1
## 1 0 0 0
## predict
## real -7.25003031021715 -7.24832538981842 -7.24798440573868
## 0 2 1 1
## 1 0 0 0
## predict
## real -7.24696145349944 -7.2459385012602 -7.24559751718045
## 0 1 1 3
## 1 0 0 0
## predict
## real -7.24491554902096 -7.24389259678172 -7.24286964454248
## 0 2 1 2
## 1 0 0 0
## predict
## real -7.24184669230324 -7.2415057082235 -7.24082374006401
## 0 1 2 1
## 1 0 0 0
## predict
## real -7.23980078782477 -7.23877783558553 -7.23795947379414
## 0 2 2 1
## 1 0 0 0
## predict
## real -7.23775488334629 -7.23673193110705 -7.23570897886781
## 0 2 1 3
## 1 0 0 0
## predict
## real -7.23468602662858 -7.23332209030959 -7.2326401221501
## 0 2 1 1
## 1 0 0 0
## predict
## real -7.23195815399061 -7.23161716991086 -7.23141257946301
## 0 1 1 1
## 1 0 0 0
## predict
## real -7.23059421767162 -7.22957126543238 -7.22650240871467
## 0 1 2 2
## 1 0 0 0
## predict
## real -7.22547945647543 -7.22322896154911 -7.22241059975772
## 0 1 1 1
## 1 0 0 0
## predict
## real -7.22138764751848 -7.22036469527924 -7.21934174304 -7.21831879080076
## 0 1 1 2 2
## 1 0 0 0 0
## predict
## real -7.21695485448178 -7.21559091816279 -7.21422698184381
## 0 1 1 2
## 1 0 0 0
## predict
## real -7.21320402960457 -7.21013517288685 -7.20911222064762
## 0 1 1 1
## 1 0 0 0
## predict
## real -7.20808926840838 -7.2060433639299 -7.20502041169066
## 0 1 1 1
## 1 0 0 0
## predict
## real -7.20399745945142 -7.20092860273371 -7.19785974601599
## 0 1 1 1
## 1 0 0 0
## predict
## real -7.19581384153752 -7.19172203258056 -7.18763022362361
## 0 1 2 1
## 1 0 0 0
## predict
## real -7.18456136690589 -7.18149251018818 -7.18046955794894
## 0 1 1 1
## 1 0 0 0
## predict
## real -7.1794466057097 -7.17740070123123 -7.17603676491224
## 0 3 4 1
## 1 0 0 0
## predict
## real -7.17535479675275 -7.17467282859325 -7.17433184451351
## 0 1 2 3
## 1 0 0 0
## predict
## real -7.17330889227427 -7.17228594003503 -7.17126298779579
## 0 1 1 1
## 1 0 0 0
## predict
## real -7.16921708331732 -7.16819413107808 -7.16648921067935
## 0 1 1 1
## 1 0 0 0
## predict
## real -7.16594363615175 -7.16512527436036 -7.16348855077758
## 0 1 1 1
## 1 0 0 0
## predict
## real -7.16103346540341 -7.15966952908443 -7.15857838002924
## 0 1 1 1
## 1 0 0 0
## predict
## real -7.1579646086857 -7.15612329465507 -7.15557772012747
## 0 1 1 1
## 1 0 0 0
## predict
## real -7.15387279972874 -7.1528498474895 -7.15148591117052
## 0 1 1 1
## 1 0 0 0
## predict
## real -7.15080394301103 -7.14773508629331 -7.14712131494977
## 0 1 1 1
## 1 0 0 0
## predict
## real -7.14671213405407 -7.14568918181483 -7.14330229325661
## 0 1 1 1
## 1 0 0 0
## predict
## real -7.14193835693763 -7.14139278241003 -7.13921048429966
## 0 1 1 1
## 1 0 0 0
## predict
## real -7.13811933524447 -7.13730097345308 -7.13648261166169
## 0 2 1 5
## 1 0 0 0
## predict
## real -7.13511867534271 -7.13443670718321 -7.13402752628752
## 0 2 2 1
## 1 0 0 0
## predict
## real -7.13341375494397 -7.13239080270474 -7.1313678504655
## 0 2 2 1
## 1 0 0 0
## predict
## real -7.13102686638575 -7.13034489822626 -7.12932194598702
## 0 1 1 2
## 1 0 0 0
## predict
## real -7.12829899374778 -7.12625308926931 -7.12523013703007
## 0 2 1 1
## 1 0 0 0
## predict
## real -7.12318423255159 -7.12147931215286 -7.12011537583387
## 0 1 1 4
## 1 0 0 0
## predict
## real -7.11929701404248 -7.11909242359464 -7.11465963055794
## 0 1 1 1
## 1 0 0 0
## predict
## real -7.11397766239844 -7.11356848150275 -7.11329569423895
## 0 1 1 1
## 1 0 0 0
## predict
## real -7.11275011971136 -7.10988585344149 -7.10783994896301
## 0 1 1 3
## 1 0 0 0
## predict
## real -7.10579404448454 -7.10456650179745 -7.10102026736809
## 0 1 1 1
## 1 0 0 0
## predict
## real -7.09965633104911 -7.09863337880987 -7.09658747433139
## 0 1 2 1
## 1 0 0 0
## predict
## real -7.09420058577317 -7.09249566537444 -7.08601696785926
## 0 1 2 1
## 1 0 0 0
## predict
## real -7.08431204746053 -7.08328909522129 -7.08001564805573
## 0 2 3 1
## 1 0 0 0
## predict
## real -7.07783334994535 -7.07756056268156 -7.07646941362637
## 0 1 1 1
## 1 0 0 0
## predict
## real -7.07510547730738 -7.0734687537246 -7.06999071611119
## 0 1 1 2
## 1 0 0 0
## predict
## real -7.06896776387195 -7.06828579571246 -7.064875954915 -7.06201168864513
## 0 1 1 1 2
## 1 0 0 0 0
## predict
## real -7.06180709819728 -7.06078414595805 -7.05976119371881
## 0 2 1 1
## 1 0 0 0
## predict
## real -7.05873824147957 -7.0560103688416 -7.05464643252262
## 0 3 1 1
## 1 0 0 0
## predict
## real -7.05260052804414 -7.04748576684795 -7.04646281460871
## 0 1 2 1
## 1 0 0 0
## predict
## real -7.04543986236947 -7.04509887828973 -7.04400772923454
## 0 2 1 1
## 1 0 0 0
## predict
## real -7.04237100565176 -7.04100706933277 -7.03930214893404
## 0 3 1 1
## 1 0 0 0
## predict
## real -7.0382791966948 -7.03725624445556 -7.03500574952924
## 0 1 1 1
## 1 0 0 0
## predict
## real -7.03418738773785 -7.03111853102013 -7.02907262654166
## 0 1 1 2
## 1 0 0 0
## predict
## real -7.02873164246191 -7.02804967430242 -7.02764049340672
## 0 1 2 1
## 1 0 0 0
## predict
## real -7.02736770614292 -7.02702672206318 -7.02191196086699
## 0 1 2 4
## 1 0 0 0
## predict
## real -7.02088900862775 -7.01945687549282 -7.01884310414927
## 0 2 1 2
## 1 0 0 0
## predict
## real -7.01782015191003 -7.01618342832725 -7.01577424743156
## 0 1 1 1
## 1 0 0 0
## predict
## real -7.01509227927206 -7.0123644066341 -7.01065948623536
## 0 1 2 1
## 1 0 0 0
## predict
## real -7.00963653399613 -7.00759062951765 -7.00452177279993
## 0 2 1 1
## 1 0 0 0
## predict
## real -7.0034988205607 -7.003089639665 -7.00145291608222 -6.99981619249944
## 0 1 1 1 1
## 1 0 0 0 0
## predict
## real -6.99872504344425 -6.9983840593645 -6.99736110712527
## 0 1 1 2
## 1 0 0 0
## predict
## real -6.99633815488603 -6.99463323448729 -6.99429225040755
## 0 1 1 2
## 1 0 0 0
## predict
## real -6.99326929816831 -6.99190536184933 -6.99122339368984
## 0 3 1 1
## 1 0 0 0
## predict
## real -6.98815453697212 -6.98713158473288 -6.9850856802544
## 0 2 1 2
## 1 0 0 0
## predict
## real -6.98406272801517 -6.98201682353669 -6.98181223308884
## 0 1 1 3
## 1 0 0 0
## predict
## real -6.98099387129745 -6.98017550950606 -6.9769020623405
## 0 1 1 5
## 1 0 0 0
## predict
## real -6.97587911010126 -6.97553812602151 -6.97485615786202
## 0 4 1 2
## 1 0 0 0
## predict
## real -6.97383320562278 -6.97281025338354 -6.97076434890507
## 0 1 2 1
## 1 0 0 0
## predict
## real -6.97008238074557 -6.96974139666583 -6.96871844442659
## 0 1 1 1
## 1 0 0 0
## predict
## real -6.96564958770887 -6.96462663546964 -6.96326269915065
## 0 1 3 1
## 1 0 0 0
## predict
## real -6.96298991188686 -6.95807974113851 -6.95746596979497
## 0 2 1 1
## 1 0 0 0
## predict
## real -6.95542006531649 -6.95480629397295 -6.95439711307725
## 0 1 2 1
## 1 0 0 0
## predict
## real -6.95337416083801 -6.95132825635954 -6.9503053041203
## 0 1 1 1
## 1 0 0 0
## predict
## real -6.94689546332284 -6.94519054292411 -6.94416759068487
## 0 3 1 1
## 1 0 0 0
## predict
## real -6.94280365436588 -6.94109873396715 -6.93905282948868
## 0 1 2 1
## 1 0 0 0
## predict
## real -6.93871184540893 -6.93802987724944 -6.93598397277096
## 0 2 1 1
## 1 0 0 0
## predict
## real -6.93189216381401 -6.93086921157477 -6.92943707843984
## 0 1 1 1
## 1 0 0 0
## predict
## real -6.92916429117604 -6.92698199306566 -6.92677740261782
## 0 1 1 1
## 1 0 0 0
## predict
## real -6.92473149813934 -6.92166264142162 -6.92125346052593
## 0 1 1 1
## 1 0 0 0
## predict
## real -6.91654788022543 -6.91552492798619 -6.91416099166721
## 0 3 2 1
## 1 0 0 0
## predict
## real -6.91347902350772 -6.91306984261202 -6.91041016679 -6.90734131007229
## 0 1 1 1 1
## 1 0 0 0 0
## predict
## real -6.90652294828089 -6.90461343743431 -6.90120359663686
## 0 1 1 1
## 1 0 0 0
## predict
## real -6.90052162847736 -6.89833933036699 -6.89813473991914
## 0 1 1 1
## 1 0 0 0
## predict
## real -6.89779375583939 -6.8971117876799 -6.89588424499282
## 0 1 1 1
## 1 0 0 0
## predict
## real -6.89506588320142 -6.89404293096219 -6.89301997872295
## 0 1 1 2
## 1 0 0 0
## predict
## real -6.88961013792549 -6.88933735066169 -6.88770062707891
## 0 3 1 1
## 1 0 0 0
## predict
## real -6.88483636080904 -6.8838134085698 -6.88033537095639
## 0 1 1 1
## 1 0 0 0
## predict
## real -6.88006258369259 -6.87972159961285 -6.879517009165 -6.87767569513437
## 0 1 1 1 1
## 1 0 0 0 0
## predict
## real -6.87597077473564 -6.87562979065589 -6.87324290209767
## 0 1 2 1
## 1 0 0 0
## predict
## real -6.8705150294597 -6.86887830587692 -6.86846912498123
## 0 1 1 2
## 1 0 0 0
## predict
## real -6.86642322050275 -6.86505928418377 -6.86437731602427
## 0 3 1 1
## 1 0 0 0
## predict
## real -6.85960353890782 -6.85742124079745 -6.85414779363189
## 0 1 1 1
## 1 0 0 0
## predict
## real -6.85141992099392 -6.8439182712395 -6.83982646228255
## 0 1 1 1
## 1 0 0 0
## predict
## real -6.82959693989017 -6.82877857809878 -6.82857398765093
## 0 2 1 1
## 1 0 0 0
## predict
## real -6.82141332197626 -6.82004938565728 -6.81936741749778
## 0 2 1 2
## 1 0 0 0
## predict
## real -6.81527560854083 -6.8091378951054 -6.80504608614844
## 0 2 1 1
## 1 0 0 0
## predict
## real -6.80340936256566 -6.80300018166997 -6.80095427719149
## 0 1 2 1
## 1 0 0 0
## predict
## real -6.79686246823454 -6.7958395159953 -6.79413459559657
## 0 2 1 1
## 1 0 0 0
## predict
## real -6.79277065927758 -6.79072475479911 -6.78731491400165
## 0 1 1 1
## 1 0 0 0
## predict
## real -6.78663294584215 -6.78356408912444 -6.78185916872571
## 0 2 1 1
## 1 0 0 0
## predict
## real -6.78049523240672 -6.77640342344977 -6.77435751897129
## 0 2 1 1
## 1 0 0 0
## predict
## real -6.7692427577751 -6.76903816732725 -6.76105913986119
## 0 1 1 1
## 1 0 0 0
## predict
## real -6.76003618762195 -6.75867225130297 -6.755944378665 -6.75492142642576
## 0 1 1 2 1
## 1 0 0 0 0
## predict
## real -6.75287552194729 -6.74939748433388 -6.74673780851186
## 0 1 1 1
## 1 0 0 0
## predict
## real -6.74571485627262 -6.74469190403338 -6.73957714283719
## 0 1 2 1
## 1 0 0 0
## predict
## real -6.7379404192544 -6.73684927019922 -6.73548533388023
## 0 1 1 1
## 1 0 0 0
## predict
## real -6.73446238164099 -6.73412139756125 -6.72627876372709
## 0 1 1 1
## 1 0 0 0
## predict
## real -6.7204820343714 -6.71707219357394 -6.7160492413347 -6.71502628909546
## 0 1 1 1 1
## 1 0 0 0 0
## predict
## real -6.71195743237775 -6.70479676670308 -6.70002298958663
## 0 1 2 1
## 1 0 0 0
## predict
## real -6.69763610102841 -6.69661314878917 -6.6945672443107
## 0 1 1 1
## 1 0 0 0
## predict
## real -6.69252133983222 -6.68774756271577 -6.68433772191831
## 0 1 1 2
## 1 0 0 0
## predict
## real -6.68024591296136 -6.67615410400441 -6.67410819952593
## 0 1 1 1
## 1 0 0 0
## predict
## real -6.66694753385126 -6.66285572489431 -6.65978686817659
## 0 1 1 1
## 1 0 0 0
## predict
## real -6.65876391593735 -6.65671801145888 -6.65637702737913
## 0 1 1 1
## 1 0 0 0
## predict
## real -6.6546721069804 -6.64955734578421 -6.64000979155132
## 0 1 1 1
## 1 0 0 0
## predict
## real -6.63973700428752 -6.63830487115258 -6.63216715771715
## 0 1 1 1
## 1 0 0 0
## predict
## real -6.62909830099944 -6.62602944428172 -6.61436778875441
## 0 1 1 1
## 1 0 0 0
## predict
## real -6.60761630397543 -6.60659335173619 -6.60250154277924
## 0 1 1 1
## 1 0 0 0
## predict
## real -6.60181957461975 -6.59909170198178 -6.59738678158305
## 0 1 1 1
## 1 0 0 0
## predict
## real -6.59636382934381 -6.59124906814762 -6.58920316366914
## 0 1 1 1
## 1 0 0 0
## predict
## real -6.58715725919066 -6.58511135471219 -6.58081495530739
## 0 1 1 1
## 1 0 0 0
## predict
## real -6.57590478455904 -6.57283592784133 -6.56874411888437
## 0 1 1 1
## 1 0 0 0
## predict
## real -6.56690280485374 -6.55442278753503 -6.5533998352958
## 0 1 1 1
## 1 0 0 0
## predict
## real -6.54521621738189 -6.5401014561857 -6.53907850394646
## 0 1 1 1
## 1 0 0 0
## predict
## real -6.53635063130849 -6.5268030770756 -6.52516635349282
## 0 1 1 1
## 1 0 0 0
## predict
## real -6.52475717259712 -6.52168831587941 -6.51964241140093
## 0 1 1 1
## 1 0 0 0
## predict
## real -6.51657355468321 -6.5135046979655 -6.51248174572626
## 0 1 1 1
## 1 0 0 0
## predict
## real -6.50634403229083 -6.50088828701489 -6.49816041437692
## 0 1 1 1
## 1 0 0 0
## predict
## real -6.48724892382504 -6.48670334929745 -6.47872432183139
## 0 1 1 1
## 1 0 0 0
## predict
## real -6.46815381535926 -6.46747184719977 -6.45997019744536
## 0 1 1 1
## 1 0 0 0
## predict
## real -6.44905870689348 -6.44660362151931 -6.44087508897957
## 0 1 1 1
## 1 0 0 0
## predict
## real -6.43882918450109 -6.43780623226186 -6.43473737554414
## 0 1 1 1
## 1 0 0 0
## predict
## real -6.43269147106566 -6.43064556658719 -6.42859966210871
## 0 1 1 1
## 1 0 0 0
## predict
## real -6.42723572578973 -6.42655375763023 -6.42614457673454
## 0 1 1 1
## 1 0 0 0
## predict
## real -6.42553080539099 -6.41734718747709 -6.41496029891887
## 0 1 1 1
## 1 0 0 0
## predict
## real -6.40950455364293 -6.4071176650847 -6.40609471284546
## 0 1 1 1
## 1 0 0 0
## predict
## real -6.40097995164927 -6.39791109493156 -6.39177338149613
## 0 1 1 1
## 1 0 0 0
## predict
## real -6.38461271582146 -6.38086189094425 -6.38052090686451
## 0 1 1 1
## 1 0 0 0
## predict
## real -6.37131433671136 -6.36858646407339 -6.36619957551517
## 0 1 1 1
## 1 0 0 0
## predict
## real -6.34921856834381 -6.3457405307304 -6.33857986505573
## 0 1 1 1
## 1 0 0 0
## predict
## real -6.33776150326434 -6.33755691281649 -6.33448805609878
## 0 1 1 1
## 1 0 0 0
## predict
## real -6.33176018346081 -6.33039624714182 -6.32794116176765
## 0 1 1 1
## 1 0 0 0
## predict
## real -6.32732739042411 -6.32084869290893 -6.31812082027096
## 0 1 1 1
## 1 0 0 0
## predict
## real -6.30073063220391 -6.2925538654542 -6.29193324294646
## 0 1 1 1
## 1 0 0 0
## predict
## real -6.28538634861533 -6.27617977846219 -6.26356336751158
## 0 2 1 1
## 1 0 0 0
## predict
## real -6.26219943119259 -6.24753711576351 -6.24139940232808
## 0 1 1 1
## 1 0 0 0
## predict
## real -6.24037645008884 -6.2393534978496 -6.22946495953697
## 0 1 1 1
## 1 0 0 0
## predict
## real -6.22094035754331 -6.21582559634712 -6.21071083515093
## 0 1 1 1
## 1 0 0 0
## predict
## real -6.20661902619398 -6.20600525485043 -6.20355016947626
## 0 1 1 1
## 1 0 0 0
## predict
## real -6.2002767223107 -6.19673048788134 -6.19536655156235
## 0 1 1 1
## 1 0 0 0
## predict
## real -6.19229769484464 -6.1912747426054 -6.18922883812692
## 0 1 1 1
## 1 0 0 0
## predict
## real -6.18172718837251 -6.16263207990672 -6.15649436647129
## 0 1 1 1
## 1 0 0 0
## predict
## real -6.14626484407891 -6.14421893960043 -6.14080909880297
## 0 1 1 1
## 1 0 0 0
## predict
## real -6.13705827392576 -6.13398941720805 -6.12989760825109
## 0 1 1 1
## 1 0 0 0
## predict
## real -6.12887465601186 -6.12580579929414 -6.12375989481566
## 0 1 1 1
## 1 0 0 0
## predict
## real -6.12171399033719 -6.11353037242328 -6.1121664361043
## 0 1 1 1
## 1 0 0 0
## predict
## real -6.10636970674861 -6.09920904107394 -6.0767040918107
## 0 1 1 1
## 1 0 0 0
## predict
## real -6.07506736822792 -6.07124834653476 -6.06545161717908
## 0 1 1 1
## 1 0 0 0
## predict
## real -6.06442866493984 -6.06135980822212 -6.06033685598288
## 0 2 1 1
## 1 0 0 0
## predict
## real -6.04806142911202 -6.02187385178752 -6.00714333954249
## 0 1 1 1
## 1 0 0 0
## predict
## real -5.98668429475772 -5.98054658132229 -5.97850067684381
## 0 1 1 1
## 1 0 0 0
## predict
## real -5.97168099524889 -5.97113542072129 -5.96758918629193
## 0 1 1 1
## 1 0 0 0
## predict
## real -5.96417934549447 -5.962133441016 -5.95940556837803 -5.95394982310209
## 0 1 1 1 1
## 1 0 0 0 0
## predict
## real -5.91439566985153 -5.90791697233636 -5.90402975382725
## 0 1 1 1
## 1 0 0 0
## predict
## real -5.90348417929966 -5.88643497531235 -5.88336611859464
## 0 1 1 1
## 1 0 0 0
## predict
## real -5.85301853549723 -5.85083623738686 -5.81482831856566 -5.807667652891
## 0 1 1 1 1
## 1 0 0 0 0
## predict
## real -5.80459879617328 -5.8025528916948 -5.78618565586699
## 0 1 1 1
## 1 0 0 0
## predict
## real -5.78516270362775 -5.77288727675689 -5.74663150261644
## 0 1 1 1
## 1 0 0 0
## predict
## real -5.74253969365948 -5.72685442599116 -5.71969376031649
## 0 1 1 1
## 1 0 0 0
## predict
## real -5.69480192249503 -5.67407009044646 -5.63751659709768
## 0 1 1 1
## 1 0 0 0
## predict
## real -5.62906019191997 -5.61160180703697 -5.59796244384712
## 0 1 1 1
## 1 0 0 0
## predict
## real -5.58486865518487 -5.58466406473702 -5.57886733538134
## 0 1 1 1
## 1 0 0 0
## predict
## real -5.56031780144315 -5.5556804179586 -5.53112956421688
## 0 1 1 1
## 1 0 0 0
## predict
## real -5.49484885813189 -5.49259836320557 -5.47111636618156
## 0 1 1 1
## 1 0 0 0
## predict
## real -5.43633599004745 -5.42747040397405 -5.42201465869812
## 0 1 1 1
## 1 0 0 0
## predict
## real -5.42099170645888 -5.40973923182726 -5.40257856615259
## 0 1 1 1
## 1 0 0 0
## predict
## real -5.39609986863741 -5.3708670467362 -5.35381784274889
## 0 1 1 1
## 1 0 0 0
## predict
## real -5.33710962284133 -5.32483419597047 -5.28971283575662
## 0 1 1 1
## 1 0 0 0
## predict
## real -5.26345706161616 -5.2378832556352 -5.22663078100358
## 0 1 1 1
## 1 0 0 0
## predict
## real -5.21333240189348 -5.21005895472792 -5.20105697502262
## 0 1 1 1
## 1 0 0 0
## predict
## real -5.15502412425689 -5.13251917499365 -5.09194206950384
## 0 1 1 1
## 1 0 0 0
## predict
## real -5.08157615347957 -5.04352233017991 -5.04229478749282
## 0 1 1 1
## 1 0 0 0
## predict
## real -5.0360206804255 -5.0087419540458 -4.98828290926103 -4.96066319880159
## 0 1 1 1 1
## 1 0 0 0 0
## predict
## real -4.9289516793852 -4.89826311220805 -4.86416470423345
## 0 1 1 1
## 1 0 0 0
## predict
## real -4.81744988530822 -4.79085312708802 -4.78983017484878
## 0 1 1 1
## 1 0 0 0
## predict
## real -4.77243998678173 -4.73663665840838 -4.71720056586285
## 0 1 1 1
## 1 0 0 0
## predict
## real -4.68003330117054 -4.65786933598702 -4.61388238969977
## 0 1 1 1
## 1 0 0 0
## predict
## real -4.60262991506815 -4.59751515387196 -4.58217087028338
## 0 1 1 1
## 1 0 0 0
## predict
## real -4.51090519761643 -4.49726583442659 -4.46248545829249
## 0 1 1 1
## 1 0 0 0
## predict
## real -4.42872803439762 -4.41542965528752 -4.40520013289514
## 0 1 1 1
## 1 0 0 0
## predict
## real -4.39394765826352 -4.38474108811037 -4.3636000751661
## 0 1 1 1
## 1 0 0 0
## predict
## real -4.33052461943073 -4.32847871495226 -3.45256626041662
## 0 1 1 0
## 1 0 0 1
## predict
## real -2.39919948253416 -2.37567158103168 -2.35418958400767
## 0 1 1 1
## 1 0 0 0
## predict
## real -2.28430267604354 -2.25598616904079 -2.23758093720586
## 0 1 1 1
## 1 0 0 0
## predict
## real -2.21790268897347 -2.21451953239207 -2.19989872956287
## 0 1 1 1
## 1 0 0 0
## predict
## real -2.15981383016855 -2.15264723314035 -2.04930829723992
## 0 1 1 1
## 1 0 0 0
## predict
## real -2.00514523100176 -1.99365969542535 -1.95519669122998
## 0 1 1 1
## 1 0 0 0
## predict
## real -1.84631898485383 -1.81057199058938 -1.78494185049953
## 0 1 1 1
## 1 0 0 0
## predict
## real -1.65367779443144 -1.63696957452388 -1.57539956805747
## 0 1 1 0
## 1 0 0 1
## predict
## real -1.55101687245481 -0.630958435156652 -0.235341031685446
## 0 1 1 0
## 1 0 0 1
## predict
## real 0.0817796038190121 0.109603904726297 0.135009301515945
## 0 0 0 0
## 1 1 1 1
## predict
## real 0.139077385058777 0.151345892017179 0.170531947903777
## 0 0 0 0
## 1 1 1 1
## predict
## real 0.217844707480135 0.258762797049671 0.267635514496957
## 0 0 1 0
## 1 1 0 1
## predict
## real 0.275464097044772 0.28236602674199 0.290495076203418
## 0 0 0 0
## 1 1 1 1
## predict
## real 0.291580307654725 0.31020693543916 0.311049022644672
## 0 0 0 1
## 1 1 1 0
## predict
## real 0.312669138437568 0.332271860768352 0.336622828625632
## 0 1 0 1
## 1 0 1 0
## predict
## real 0.379717864337717 0.382246498495665 0.382587468825761
## 0 1 1 1
## 1 0 0 0
## predict
## real 0.391454043458091 0.41127758235268 0.422856212680221
## 0 0 0 1
## 1 1 1 0
## predict
## real 0.426362113905575 0.44768934385139 0.450988888972486 0.46653776300891
## 0 0 0 0 0
## 1 1 1 1 1
## predict
## real 0.469182596697455 0.472266295548645 0.481227942049937
## 0 0 0 0
## 1 1 1 1
## predict
## real 0.482060539089801 0.49000041962256 0.492656536632461
## 0 1 0 0
## 1 0 1 1
## predict
## real 0.497143291236637 0.508066644495576 0.512828558904958
## 0 0 0 0
## 1 1 1 1
## predict
## real 0.514569067424872 0.521059627647108 0.523864607880974
## 0 0 0 0
## 1 1 1 1
## predict
## real 0.524558411248227 0.524601116993645 0.525896672534553
## 0 0 0 0
## 1 1 1 1
## predict
## real 0.527056202367817 0.52783185841379 0.538636639600719
## 0 0 0 0
## 1 1 1 1
## predict
## real 0.544199094241604 0.545878523569804 0.549702290345784
## 0 0 0 0
## 1 1 1 1
## predict
## real 0.566757425686622 0.568803330165099 0.57751811003394
## 0 0 0 0
## 1 1 1 1
## predict
## real 0.602507371878193 0.653821061738964 0.669040786903403
## 0 0 0 0
## 1 1 1 1
## predict
## real 0.759770231765691 0.761114792329802 0.826603111395932
## 0 0 0 0
## 1 1 1 1
## predict
## real 0.842984187048645 0.849803868643568 0.850349443171161
## 0 0 0 0
## 1 1 1 1
## predict
## real 0.851167804962552 0.877355382287055 0.879810467661227
## 0 0 0 0
## 1 1 1 1
## predict
## real 0.889358021894118 0.911726577525465 0.914590843795333
## 0 0 0 0
## 1 1 1 1
## predict
## real 0.918273471856591 0.934640707684406 0.937095793058578
## 0 0 0 0
## 1 1 1 1
## predict
## real 0.948348267690201 0.958577790082585 0.979650606210896
## 0 0 0 0
## 1 1 1 1
## predict
## real 0.981287329793677 0.998472927412883 1.00529260900781 1.00972540204451
## 0 0 0 0 0
## 1 1 1 1 1
## predict
## real 1.01074835428374 1.01156671607513 1.01729524861487 1.03018444682927
## 0 0 0 0 0
## 1 1 1 1 1
## predict
## real 1.05255191765414 1.05657661460162 1.07949074476056 1.13761103973919
## 0 1 0 0 0
## 1 0 1 1 1
## predict
## real 1.15398242751448 1.24566801178254 1.30298655275966 1.32999249187555
## 0 0 0 0 0
## 1 1 1 1 1
## predict
## real 1.35556629785651 1.35863515457423 1.37295648592357 1.37418402861065
## 0 0 0 0 0
## 1 1 1 1 1
## predict
## real 1.38591388095392 1.40262210086148 1.40773686205767 1.41898933668929
## 0 0 0 0 0
## 1 1 1 1 1
## predict
## real 1.43635579934223 1.43842542923482 1.44183527003228 1.44251723819178
## 0 0 0 0 0
## 1 1 1 1 1
## predict
## real 1.44865495162721 1.4494733134186 1.46254337666673 1.46775006009299
## 0 0 0 0 0
## 1 1 1 1 1
## predict
## real 1.46809104417274 1.48238865010795 1.49448321194509 1.49914827507945
## 0 0 0 0 0
## 1 1 1 1 1
## predict
## real 1.51207799045999 1.54274283222302 1.54993612034211 1.5623835152164
## 0 0 0 0 0
## 1 1 1 1 1
## predict
## real 1.56394431747399 1.57206746308119 1.57220385671309 1.57714961558261
## 0 0 0 0 0
## 1 1 1 1 1
## predict
## real 1.58269135173509 1.5836994755905 1.60337040848689 1.60821177553428
## 0 0 0 0 0
## 1 1 1 1 1
## predict
## real 1.61735903215212 1.62012100319806 1.62288297424401 1.62349674558755
## 0 0 0 0 0
## 1 1 1 1 1
## predict
## real 1.64290911271896 1.64614108165876 1.64920104134618 1.65315998798405
## 0 0 0 0 0
## 1 1 1 1 1
## predict
## real 1.65807253127381 1.66147053686351 1.67728329425861 1.67780811592367
## 0 1 0 0 0
## 1 0 1 1 1
## predict
## real 1.68121795672113 1.68765692791731 1.68896568889881 1.69244372651222
## 0 0 0 1 1
## 1 1 1 0 0
## predict
## real 1.69502218403982 1.70580439858404 1.70799260054864 1.71603392097642
## 0 0 0 1 0
## 1 1 1 0 1
## predict
## real 1.71760835159748 1.72936490878129 1.72967328416627 1.7367301649038
## 0 1 0 0 0
## 1 0 1 1 1
## predict
## real 1.74045428494047 1.74972905190957 1.75968878021422 1.76282284057182
## 0 1 1 0 1
## 1 0 0 1 0
## predict
## real 1.77115118351187 1.77129113595589 1.77591662923407 1.78614615162646
## 0 0 0 1 1
## 1 1 1 0 0
## predict
## real 1.78737369431354 1.78901041789633 1.79446915634833 1.79935263600579
## 0 1 1 0 0
## 1 0 0 1 1
## predict
## real 1.81110618626387 1.82414366831654 1.82911014567447 1.84562566534054
## 0 1 0 1 1
## 1 0 1 0 0
## predict
## real 1.85202427583341 1.85297312740247 1.85503682594154 1.85585518773293
## 0 1 0 1 1
## 1 0 1 0 0
## predict
## real 1.86710766236455 1.86976733818657 1.89050388781866 1.89433717225959
## 0 1 1 1 1
## 1 0 0 0 0
## predict
## real 1.8947095787634 1.91814850996292 1.93053070119733 1.94914843195147
## 0 0 0 0 0
## 1 1 1 1 1
## predict
## real 1.95405860269982 1.9554225390188 1.9556953262826 1.96633402957068
## 0 0 0 0 0
## 1 1 1 1 1
## predict
## real 1.9671974696489 1.96837993404915 1.96843331623093 1.96906661979217
## 0 0 1 0 1
## 1 1 0 1 0
## predict
## real 1.97349469524535 1.97588158380357 1.98136339953574 1.98925703613544
## 0 0 1 0 0
## 1 1 0 1 1
## predict
## real 1.99770456490732 1.999984689102 2.0122393562355 2.01982165864828
## 0 0 1 1 0
## 1 1 0 0 1
## predict
## real 2.02521754537734 2.02776454610675 2.03372827317197 2.03375516197463
## 0 0 0 1 0
## 1 1 1 0 1
## predict
## real 2.03378363247158 2.03540374826447 2.03544706776972 2.03582953695005
## 0 0 0 0 0
## 1 1 1 1 1
## predict
## real 2.03869380321992 2.04114888859409 2.04204831221707 2.04682759776417
## 0 0 0 0 1
## 1 1 1 1 0
## predict
## real 2.05135587184306 2.05628858173482 2.06242629517025 2.06344924740949
## 0 0 0 0 0
## 1 2 2 1 1
## predict
## real 2.06979155129277 2.07674762651959 2.07852551631639 2.07981648323731
## 0 0 0 1 0
## 1 1 1 0 1
## predict
## real 2.08083943547654 2.0863920595268 2.09106895786893 2.09431393453755
## 0 0 0 0 0
## 1 1 1 1 1
## predict
## real 2.10468064140896 2.1049478927428 2.10794025562445 2.10877245036591
## 0 1 1 0 0
## 1 0 0 1 1
## predict
## real 2.10960665659936 2.11267551331707 2.11459685937141 2.1197116205676
## 0 0 0 0 0
## 1 1 1 1 1
## predict
## real 2.12392798794869 2.12491535206675 2.12625376981591 2.12994114295999
## 0 0 0 0 0
## 1 1 1 1 1
## predict
## real 2.13088406317552 2.13623900294074 2.13710180863466 2.14017066535237
## 0 0 0 0 0
## 1 1 1 1 1
## predict
## real 2.15327634422099 2.15721986933968 2.15736815317794 2.17426907332698
## 0 0 0 0 0
## 1 1 1 1 1
## predict
## real 2.18874755117075 2.19096249922484 2.20099434420701 2.2013462024349
## 0 0 0 0 0
## 1 1 1 1 1
## predict
## real 2.21109535393957 2.2128002743383 2.29154387134553 2.29669718633967
## 0 0 0 0 0
## 1 1 1 1 1
## predict
## real 2.29863399516214 2.29978680279475 2.30259771438214 2.30662427845026
## 0 0 0 0 0
## 1 1 1 1 1
## predict
## real 2.3120029161303 2.31573598132104 2.31819994310104 2.33555750872367
## 0 0 0 0 0
## 1 1 1 1 1
## predict
## real 2.34228230241176 2.35135308581801 2.35488391247784 2.35851910346051
## 0 0 0 0 0
## 1 1 1 1 1
## predict
## real 2.35870885177488 2.36198704402327 2.36362376760605 2.38935589947732
## 0 0 0 0 0
## 1 1 1 1 1
## predict
## real 2.40452050430287 2.40711971469301 2.4112826655182 2.4180578757113
## 0 0 0 0 0
## 1 1 1 1 1
## predict
## real 2.4219975015929 2.43036891132782 2.4390467055802 2.44506678050396
## 0 1 0 0 0
## 1 0 1 1 1
## predict
## real 2.46872220610734 2.47620899310381 2.50210010587417 2.51355717095364
## 0 1 0 0 0
## 1 0 1 1 1
## predict
## real 2.52894886412151 2.5453214381675 2.56943108170446 2.5774071572434
## 0 0 0 0 0
## 1 1 1 1 1
## predict
## real 2.58231142413752 2.58314457306379 2.58558012622026 2.60781998522964
## 0 0 0 0 0
## 1 1 1 1 1
## predict
## real 2.60923726935628 2.61198589485678 2.62031183775611 2.64269225609452
## 0 0 0 0 0
## 1 1 1 1 1
## predict
## real 2.67120743594124 2.67287087123939 2.68927959216779 2.6913314005005
## 0 0 0 0 0
## 1 1 1 1 1
## predict
## real 2.70825607356296 2.7155264555283 2.72133802701746 2.7237634349995
## 0 0 0 0 0
## 1 1 1 1 1
## predict
## real 2.74520098124615 2.74792885388412 2.76497805787143 2.76865474082951
## 0 0 0 0 0
## 1 1 1 1 1
## predict
## real 2.76905507281868 2.77417574474393 2.7846958209614 2.78884694345366
## 0 0 0 0 0
## 1 1 1 1 1
## predict
## real 2.79388760397967 2.79697124783226 2.79935515696407 2.80936530177373
## 0 0 0 0 0
## 1 1 1 1 1
## predict
## real 2.82669617630548 2.83735565308057 2.84861107963931 2.84945313934552
## 0 0 0 0 0
## 1 1 1 1 1
## predict
## real 2.85146938705633 2.85834838218657 2.86045360020035 2.87140658272764
## 0 0 0 0 0
## 1 1 1 1 1
## predict
## real 2.8733516816954 2.87954870866613 2.88909626289902 2.89421102409522
## 0 0 0 0 0
## 1 1 1 1 1
## predict
## real 2.89747561547943 2.89791739819507 2.91382923500068 2.92694613013515
## 0 0 0 0 0
## 1 1 1 1 1
## predict
## real 2.94449879603998 2.96415721122053 2.96423726386871 2.9709917555734
## 0 0 0 0 0
## 1 1 1 1 1
## predict
## real 2.97214809451881 2.97733105253096 2.98093362794404 2.98270378635678
## 0 0 0 0 0
## 1 1 1 1 1
## predict
## real 2.98551467044487 2.98578745770868 2.98582784591127 2.98890079957787
## 0 0 0 0 0
## 1 1 1 1 1
## predict
## real 2.99703996671442 2.99779009731573 2.99780789137632 3.00441416111618
## 0 0 0 0 0
## 1 1 1 1 1
## predict
## real 3.01828473022168 3.02124980200225 3.0362531015111 3.04259841232009
## 0 0 0 0 0
## 1 1 1 1 1
## predict
## real 3.06346070664702 3.08126300003759 3.08146466605762 3.08339787481902
## 0 0 0 0 0
## 1 1 2 1 1
## predict
## real 3.08637776123378 3.09149252242997 3.09353842690845 3.09435678869984
## 0 0 0 0 0
## 1 1 1 1 1
## predict
## real 3.10008532123957 3.10717779009828 3.10888271049702 3.11278772306672
## 0 0 0 0 0
## 1 1 1 1 1
## predict
## real 3.11522802130602 3.11577654776073 3.11862299934553 3.12025381219925
## 0 0 0 0 0
## 1 1 1 1 1
## predict
## real 3.12646270187705 3.14474535240567 3.15996808449583 3.17072541485451
## 0 0 0 0 0
## 1 1 1 1 1
## predict
## real 3.18020769669928 3.1810280631078 3.18783983623135 3.2097790993636
## 0 0 0 0 0
## 1 1 1 1 1
## predict
## real 3.23939957414126 3.24748534534639 3.25128066912472 3.25233327813162
## 0 0 0 0 0
## 1 1 1 1 1
## predict
## real 3.2778675417557 3.28561627624195 3.3065200900436 3.34157034276334
## 0 0 0 0 0
## 1 1 1 1 1
## predict
## real 3.34690446091778 3.35021936568463 3.37473467255412 3.37634775322068
## 0 0 0 0 0
## 1 1 1 1 1
## predict
## real 3.38706640603457 3.39763691250668 3.39852647803975 3.40061390199294
## 0 0 0 0 0
## 1 1 1 1 1
## predict
## real 3.40364123923594 3.41591365918109 3.42000847506376 3.421031427303
## 0 0 0 0 0
## 1 1 1 1 1
## predict
## real 3.42491563888639 3.42559760704589 3.43800942754864 3.44046451292281
## 0 0 0 0 0
## 1 1 1 1 1
## predict
## real 3.44386853236394 3.45088739744029 3.45121296687568 3.45192157800228
## 0 0 0 0 0
## 1 1 1 1 1
## predict
## real 3.47134644222505 3.47259889659697 3.48553673191032 3.50497282445585
## 0 0 0 0 0
## 1 1 1 1 1
## predict
## real 3.51827120356595 3.52270399660264 3.52618791057099 3.52943470789462
## 0 0 0 0 0
## 1 1 1 1 1
## predict
## real 3.53232562400642 3.5343715284849 3.53675841704311 3.53918685945018
## 0 0 0 0 0
## 1 1 1 1 1
## predict
## real 3.54327866840713 3.55912697267447 3.56279186854856 3.57633333003103
## 0 0 0 0 0
## 1 1 1 1 1
## predict
## real 3.57835847477215 3.5836215198066 3.59271302170126 3.59571215490735
## 0 0 0 0 0
## 1 1 1 1 1
## predict
## real 3.60700113747082 3.62029951658092 3.62189181001081 3.62982637982471
## 0 0 0 0 0
## 1 1 1 1 1
## predict
## real 3.6310361284512 3.6497042344614 3.65614436442898 3.66142219659831
## 0 0 0 0 0
## 1 1 1 1 1
## predict
## real 3.66443474670643 3.66665266043729 3.66769630366562 3.66960581451222
## 0 0 0 0 0
## 1 1 1 1 1
## predict
## real 3.68425574284713 3.69144068582233 3.69464890872353 3.70453447448458
## 0 0 0 0 0
## 1 1 1 1 1
## predict
## real 3.70567550190802 3.71165860472809 3.71270620219211 3.72683483329998
## 0 0 0 0 0
## 1 1 1 1 1
## predict
## real 3.72701277390589 3.727840094601 3.7300223170883 3.7305856581772
## 0 0 0 0 0
## 1 1 1 1 1
## predict
## real 3.73127948904372 3.73140411621613 3.73606512886724 3.74000568830917
## 0 0 0 0 0
## 1 1 1 1 1
## predict
## real 3.74884463829029 3.7566190753085 3.75676340491106 3.7700311386946
## 0 0 0 0 0
## 1 1 1 1 1
## predict
## real 3.77704846332562 3.78637366054937 3.78654859422956 3.78656045693662
## 0 0 0 0 0
## 1 1 1 1 1
## predict
## real 3.79148842174556 3.79997109041756 3.80017610158717 3.80275367889491
## 0 0 0 0 0
## 1 1 1 1 1
## predict
## real 3.8027829677383 3.80367487831347 3.81504301433893 3.82004211414128
## 0 0 0 0 0
## 1 1 1 1 1
## predict
## real 3.82204649914504 3.82920419914295 3.83854422475053 3.83947820668681
## 0 0 0 0 0
## 1 1 1 1 1
## predict
## real 3.84792572858387 3.8484593854058 3.85323319689637 3.85654223905796
## 0 0 0 0 0
## 1 1 1 1 1
## predict
## real 3.85707401500959 3.85868890779819 3.8626562408289 3.8637473898841
## 0 0 0 0 0
## 1 1 1 1 1
## predict
## real 3.87049787235447 3.87259275435689 3.875056143626 3.8761888671545
## 0 0 0 0 0
## 1 1 1 1 1
## predict
## real 3.8763638008347 3.88389418754609 3.8844881052133 3.89391532171777
## 0 0 0 0 0
## 1 1 1 1 1
## predict
## real 3.89696812253201 3.89746032862749 3.90267775723835 3.90548086869343
## 0 0 0 0 0
## 1 1 1 1 1
## predict
## real 3.91294397152472 3.91613437974086 3.91728189040423 3.93977201128365
## 0 0 0 0 0
## 1 1 1 1 1
## predict
## real 3.94079496352289 3.94279638284989 3.94333010154525 3.96329101575584
## 0 0 0 0 0
## 1 1 1 1 1
## predict
## real 3.97225736609101 3.97698376112745 3.97862936799089 3.99704250829718
## 0 0 0 0 0
## 1 1 1 1 1
## predict
## real 4.00379399307615 4.00877296752545 4.02060995215654 4.02132948555506
## 0 0 0 0 0
## 1 1 1 1 1
## predict
## real 4.02922246714598 4.03331427610293 4.0456134833865 4.05030511210292
## 0 0 0 0 0
## 1 1 1 1 1
## predict
## real 4.05062156271705 4.0605930024826 4.0612749706421 4.06145884260154
## 0 0 0 0 0
## 1 1 1 1 1
## predict
## real 4.06679602955513 4.07782607842936 4.0803700791079 4.09623921873565
## 0 0 0 0 0
## 1 1 1 1 1
## predict
## real 4.10113162292272 4.1018876986272 4.10287502837115 4.10833077364707
## 0 0 0 0 0
## 1 1 1 1 1
## predict
## real 4.11284173158684 4.12049951865228 4.13386606707903 4.13386609457835
## 0 0 0 0 0
## 1 1 1 1 1
## predict
## real 4.14034973488811 4.14142112651452 4.14676417607337 4.16250875727702
## 0 0 0 0 0
## 1 1 1 1 1
## predict
## real 4.16375406652539 4.16585335318564 4.1679941593206 4.17448168511783
## 0 0 0 0 0
## 1 1 1 1 1
## predict
## real 4.17695224700977 4.17713548458491 4.18095155435096 4.180960382633
## 0 0 0 0 0
## 1 1 1 1 1
## predict
## real 4.18216720683168 4.1852567820378 4.18728196802787 4.190661232247
## 0 0 0 0 0
## 1 1 1 1 1
## predict
## real 4.19118990502538 4.19197274056901 4.19323580950386 4.1942587617431
## 0 0 0 0 0
## 1 1 1 1 1
## predict
## real 4.19937352293929 4.20069897420858 4.20175158321547 4.20370851739099
## 0 0 0 0 0
## 1 1 1 1 1
## predict
## real 4.20789812493293 4.20978691729111 4.21198993388988 4.21329469417008
## 0 0 0 0 0
## 1 1 1 1 1
## predict
## real 4.2157407587671 4.22892889719944 4.2294483187729 4.23196280018027
## 0 0 0 0 0
## 1 1 1 1 1
## predict
## real 4.23236604972244 4.23435848952124 4.23507016944699 4.23834361661255
## 0 0 0 0 0
## 1 1 1 1 1
## predict
## real 4.24373533076966 4.24745227818349 4.24775477721354 4.25256703937969
## 0 0 0 0 0
## 1 1 1 1 1
## predict
## real 4.25711848698629 4.25826892207748 4.25836376873538 4.26279656177207
## 0 0 0 0 0
## 1 1 1 1 1
## predict
## real 4.26381951401131 4.26791132296826 4.26995722744674 4.27098017968598
## 0 0 0 0 0
## 1 1 1 1 1
## predict
## real 4.28125431097669 4.28328536644686 4.28734741551379 4.29413454341253
## 0 0 0 0 0
## 1 1 1 1 1
## predict
## real 4.29623379569867 4.29655398566694 4.30458352588555 4.30637438216221
## 0 0 0 0 0
## 1 1 1 1 1
## predict
## real 4.30858757830956 4.31380192437164 4.31619466866031 4.32170975497086
## 0 0 0 0 0
## 1 1 1 1 1
## predict
## real 4.32541017709271 4.32868366550722 4.33389925971343 4.33474420259849
## 0 0 0 0 0
## 1 1 1 1 1
## predict
## real 4.33870855620281 4.34346750493544 4.35060345663706 4.35999964614103
## 0 0 0 0 0
## 1 1 1 1 1
## predict
## real 4.36960171382781 4.37941311704525 4.38084525018018 4.38166361197157
## 0 0 0 0 0
## 1 1 1 1 1
## predict
## real 4.38982651139707 4.39029212264921 4.39100723764022 4.39165003136711
## 0 0 0 0 0
## 1 1 1 1 1
## predict
## real 4.39202952799584 4.39475740063383 4.39985508465163 4.4011383820619
## 0 0 0 0 0
## 1 1 1 1 1
## predict
## real 4.4113183940103 4.41317054094012 4.41635516217405 4.4263019223407
## 0 0 0 0 0
## 1 1 1 1 1
## predict
## real 4.42668244877731 4.4284763119797 4.43075601054202 4.43486606669122
## 0 0 0 0 0
## 1 1 1 1 1
## predict
## real 4.43567549020336 4.4366984424426 4.43718587073927 4.4383203788905
## 0 0 0 0 0
## 1 1 1 1 1
## predict
## real 4.44608888455519 4.44670969955804 4.44798057384187 4.44936230422144
## 0 0 0 0 0
## 1 1 1 1 1
## predict
## real 4.45021035027979 4.45306567827041 4.45345411317839 4.4572227321162
## 0 0 0 0 0
## 1 1 1 1 1
## predict
## real 4.45821009623426 4.4616377310923 4.46779323858832 4.46888438764352
## 0 0 0 0 0
## 1 1 1 1 1
## predict
## real 4.46939440154923 4.47050331716571 4.47203329700942 4.47499718939412
## 0 0 0 0 0
## 1 1 1 1 1
## predict
## real 4.4754283781713 4.47680710849999 4.47868693507962 4.47870472914021
## 0 0 0 0 0
## 1 1 1 1 1
## predict
## real 4.4831168311878 4.48482464851505 4.4896162196921 4.48995720377183
## 0 0 0 0 0
## 1 1 1 1 1
## predict
## real 4.49331669681253 4.49468063313151 4.49506306793773 4.49507196496802
## 0 0 0 0 0
## 1 1 1 1 1
## predict
## real 4.49621354427787 4.49670584662089 4.4971178694465 4.49778204354541
## 0 0 0 0 0
## 1 1 1 1 1
## predict
## real 4.49809633653426 4.49834541213359 4.49916377392498 4.49967969855974
## 0 0 0 0 0
## 1 1 3 1 1
## predict
## real 4.5048711185878 4.50693465213107 4.50732959777829 4.50837034407812
## 0 0 0 0 0
## 1 1 1 1 1
## predict
## real 4.50945260985266 4.5104073515263 4.51068015253975 4.51246215303508
## 0 0 0 0 0
## 1 1 1 1 1
## predict
## real 4.5142758149152 4.51530065215549 4.51767435789641 4.51962281870974
## 0 0 0 0 0
## 1 1 1 1 1
## predict
## real 4.52023374812337 4.52098675502872 4.52327341746194 4.52436114520151
## 0 0 0 0 0
## 1 1 1 1 1
## predict
## real 4.52748450778764 4.52813266106782 4.53218888190359 4.53292119781984
## 0 0 0 0 0
## 1 1 1 1 1
## predict
## real 4.53393525302879 4.53423642212744 4.53678076417845 4.53735399085653
## 0 0 0 0 0
## 1 1 1 1 1
## predict
## real 4.54179574967178 4.54314182318193 4.54586383321461 4.54826548140842
## 0 0 0 0 0
## 1 1 1 1 1
## predict
## real 4.55005306619435 4.5507798803179 4.55450402785387 4.55744832614744
## 0 0 0 0 0
## 1 1 1 1 1
## predict
## real 4.55880145706655 4.55882122824497 4.56298413094639 4.56396854313732
## 0 0 0 0 0
## 1 1 1 1 1
## predict
## real 4.56829992377831 4.57219368052594 4.58111858013466 4.58345512093612
## 0 0 0 0 0
## 1 1 1 1 1
## predict
## real 4.58370242525781 4.58429254555671 4.58515107555631 4.59838717545434
## 0 0 0 0 0
## 1 1 1 1 1
## predict
## real 4.59974518041978 4.60395370807776 4.60843286713623 4.61201711722678
## 0 0 0 0 0
## 1 1 1 1 1
## predict
## real 4.6288464657099 4.62913802184356 4.63322983080051 4.63434175334271
## 0 0 0 0 0
## 1 1 1 1 1
## predict
## real 4.64409697369741 4.64798316239864 4.64851480085378 4.64853259491437
## 0 0 0 0 0
## 1 1 1 1 1
## predict
## real 4.65062001886756 4.65570808897287 4.65783999807754 4.65944408546626
## 0 0 0 0 0
## 1 1 1 1 1
## predict
## real 4.66766922285488 4.6735341490265 4.67483885430807 4.67510378809911
## 0 0 0 0 1
## 1 1 1 1 0
## predict
## real 4.67615230537381 4.67717525761305 4.67963034298722 4.67994464972575
## 0 0 0 0 0
## 1 1 1 1 1
## predict
## real 4.68006680810916 4.68008104335763 4.68417285231458 4.68636706813063
## 0 0 0 0 0
## 1 1 1 1 1
## predict
## real 4.69042808904782 4.69047278566072 4.69071974664571 4.69145813828682
## 0 0 0 0 0
## 1 1 1 1 1
## predict
## real 4.69479732035419 4.69562991739405 4.6980702568823 4.69855949042618
## 0 0 0 0 0
## 1 1 1 1 1
## predict
## real 4.70176763082948 4.70581792031173 4.70631615400828 4.70649988847123
## 0 0 0 0 0
## 1 1 1 1 1
## predict
## real 4.71151988550742 4.71495047229259 4.72281981096729 4.73001606476314
## 0 0 0 0 0
## 1 1 1 1 1
## predict
## real 4.73293367196583 4.73420214099228 4.73791194328256 4.73968867430797
## 0 0 0 0 0
## 1 1 1 1 1
## predict
## real 4.74482059518045 4.74633278096357 4.74686895469824 4.75099872431779
## 0 0 0 0 0
## 1 1 1 1 1
## predict
## real 4.75755273627315 4.76450870150276 4.76526184590463 4.76826973342783
## 0 0 0 0 0
## 1 1 1 1 1
## predict
## real 4.76978062981787 4.77010084041061 4.77060144664857 4.77175417178328
## 0 0 0 0 0
## 1 1 1 1 1
## predict
## real 4.77806872205181 4.79342902029492 4.80128766878361 4.80469750958108
## 0 0 0 0 0
## 1 1 1 1 1
## predict
## real 4.80878931853803 4.81028953849171 4.82168814915749 4.82968720098614
## 0 0 0 0 0
## 1 1 1 1 1
## predict
## real 4.83689112429263 4.84819430132941 4.84827570453345 4.86085604280346
## 0 0 0 0 0
## 1 1 1 1 1
## predict
## real 4.86628809016457 4.87255030702555 4.87391424334453 4.87771753059283
## 0 0 0 0 0
## 1 1 1 1 1
## predict
## real 4.89616414440597 4.90082360970233 4.92234887980055 4.92330351835182
## 0 0 0 0 0
## 1 1 1 1 1
## predict
## real 4.92702954841622 4.95940822135793 4.96192965539374 4.96225291416115
## 0 0 0 0 0
## 1 1 1 1 1
## predict
## real 4.97751998703145 4.98718027135556 4.99385762796504 5.00798030022006
## 0 0 0 0 0
## 1 1 1 1 1
## predict
## real 5.0260425708574 5.04978798969678 5.05129708605628 5.07081922447695
## 0 0 0 0 0
## 1 1 1 1 1
## predict
## real 5.07242629129208 5.07830302517217 5.07898499333168 5.08063956597364
## 0 0 0 0 0
## 1 1 1 1 1
## predict
## real 5.10355369613258 5.10516076294771 5.11071444430515 5.13354608740107
## 0 0 0 0 0
## 1 1 1 1 1
## predict
## real 5.14734198332551 5.1555208561566 5.17105557744245 5.19100878089345
## 0 0 0 0 0
## 1 1 1 1 1
## predict
## real 5.19274335118501 5.20170084573883 5.21709851150918 5.23706529519988
## 0 0 0 0 0
## 1 1 1 1 1
## predict
## real 5.24303582696365 5.24957496928624 5.2546169184947 5.26312240631753
## 0 0 0 0 1
## 1 1 1 1 0
## predict
## real 5.27503014819659 5.28628961148892 5.28847087979142 5.28977853700011
## 0 0 0 0 0
## 1 1 1 1 1
## predict
## real 5.30616593942993 5.30942989642984 5.31101141717584 5.31473848913963
## 0 0 0 0 0
## 1 1 1 1 1
## predict
## real 5.3272303691654 5.33234513036159 5.3332406616223 5.3347872492559
## 0 0 0 0 0
## 1 1 1 1 1
## predict
## real 5.33860443654439 5.3405287482755 5.34963755421777 5.36356506431717
## 0 0 0 0 0
## 1 1 1 1 1
## predict
## real 5.36917141097417 5.36929794651615 5.36974429722731 5.37744399919104
## 0 0 0 0 0
## 1 1 1 1 1
## predict
## real 5.38709040931609 5.40339635449075 5.40515267995343 5.40744176992665
## 0 0 0 0 0
## 1 1 1 1 1
## predict
## real 5.41912511875167 5.42010320416549 5.4203783364714 5.42187865954746
## 0 0 0 0 0
## 1 1 1 1 1
## predict
## real 5.42905118792919 5.43234541545658 5.43507800567807 5.43575227682763
## 0 0 0 0 0
## 1 1 1 1 1
## predict
## real 5.43889230978771 5.44127621891951 5.44951023271395 5.45359515613256
## 0 0 0 0 0
## 1 1 1 1 1
## predict
## real 5.45813211578033 5.45979906864164 5.47044313077237 5.47892797894773
## 0 0 0 0 0
## 1 1 1 1 1
## predict
## real 5.49401310363597 5.4970458203461 5.49826505913825 5.50178279557136
## 0 0 0 0 0
## 1 1 1 1 1
## predict
## real 5.50592680043938 5.50628062203547 5.50900849467345 5.51327790381132
## 0 0 0 0 0
## 1 1 1 1 1
## predict
## real 5.51454925601664 5.51652596137059 5.52021847502107 5.53136865442877
## 0 0 0 0 0
## 1 1 1 1 1
## predict
## real 5.53201244098106 5.5349489872661 5.53602532168711 5.54150387256649
## 0 0 0 0 0
## 1 1 1 1 1
## predict
## real 5.5496143371202 5.55648140079512 5.55743614246875 5.55787110839431
## 0 0 0 0 0
## 1 1 1 1 1
## predict
## real 5.57364527761735 5.57628820293629 5.5791247483074 5.58543369389773
## 0 0 0 0 0
## 1 1 1 1 1
## predict
## real 5.58735150863925 5.6168593083243 5.6210667786751 5.62928005335666
## 0 0 0 0 1
## 1 1 1 1 0
## predict
## real 5.66796955275898 5.67069426200842 5.68495356685605 5.69123481904692
## 0 0 0 0 0
## 1 1 1 1 1
## predict
## real 5.70768034874134 5.71236154174409 5.73652692625461 5.74168617260228
## 0 0 0 0 0
## 1 1 1 1 1
## predict
## real 5.75089274275542 5.75240196286177 5.7529386472339 5.76639124153579
## 0 0 0 0 0
## 1 1 1 1 1
## predict
## real 5.77232728895118 5.77605736784069 5.79120536487636 5.80268779206283
## 0 0 0 0 1
## 1 1 1 1 0
## predict
## real 5.80610432756165 5.81412015004407 5.82217423236513 5.82758746960741
## 0 0 1 0 0
## 1 1 0 1 1
## predict
## real 5.83579777861221 5.84057155572867 5.84772932447481 5.85522504287569
## 0 0 0 0 0
## 1 1 1 1 1
## predict
## real 5.8576365766587 5.8767063780161 5.9198726312195 5.97596899522701
## 0 0 0 1 0
## 1 1 1 0 1
## predict
## real 5.99069354861921 6.00400086600855 6.06332379198944 6.1118971189957
## 0 0 1 1 0
## 1 1 0 0 1
## predict
## real 6.15318225622639 6.16750714638785 6.19088979847709 6.21130492374643
## 0 0 0 1 1
## 1 1 1 0 0
## predict
## real 6.215555692978 6.23029962212506 6.26896625570866 6.33037601250738
## 0 0 0 0 1
## 1 1 1 1 0
## predict
## real 6.35991706902116 6.43069943262292 6.45657274445787 6.48824154437919
## 0 0 0 0 0
## 1 1 1 1 1
## predict
## real 6.51136540549238 6.54287236196022 6.57344823342031 6.59676761773854
## 0 0 0 0 0
## 1 1 1 1 1
## predict
## real 6.78049476895071 6.79710495131254 6.7979363620817 6.82207803492772
## 0 0 0 0 0
## 1 1 1 1 1
## predict
## real 6.82352005365188 6.83134291630761 6.83171256859608 6.83227552801104
## 0 0 0 0 0
## 1 1 1 1 1
## predict
## real 6.85882426289212 6.86000535018473 6.86916349470034 6.87161858007451
## 0 0 0 0 0
## 1 1 1 1 1
## predict
## real 6.87212264200222 6.87296274521506 6.88389993829891 6.89205389944516
## 0 0 0 0 0
## 1 1 1 1 1
## predict
## real 6.90483931959726 6.91251294422993 6.91455884870841 6.93399494125394
## 0 0 0 0 0
## 1 1 1 1 1
## predict
## real 6.9481650643372 6.95808621821943 6.96439590653312 6.96490707921023
## 0 0 0 0 0
## 1 1 1 1 1
## predict
## real 6.97053361996855 6.97134307785482 6.97423995969426 6.99436098607606
## 0 0 0 0 0
## 1 1 1 1 1
## predict
## real 7.01449782940062 7.02548720464585 7.05715741604895 7.10180655931728
## 0 0 0 0 0
## 1 1 1 1 1
## predict
## real 7.11170102898346 7.11769049295474 7.11800242027702 7.12127033151262
## 0 0 0 0 0
## 1 1 1 1 1
## predict
## real 7.12861028052674 7.15982665567019 7.1990404042267 7.21199779925707
## 0 0 0 0 0
## 1 1 1 1 1
## predict
## real 7.22052240125071 7.2487529380014 7.25505076360802 7.2798565968033
## 0 0 0 0 0
## 1 1 1 1 1
## predict
## real 7.28353924548903 7.3437422124632 7.3587306835882 7.42116339262616
## 0 0 0 0 0
## 1 1 1 1 1
## predict
## real 7.42178784040606 7.48715419192572 7.52856151503914 7.56848532057816
## 0 0 0 0 0
## 1 1 1 1 1
## predict
## real 7.63653190994541 7.68163550323321 7.92899545173169 8.17689385713356
## 0 0 0 0 0
## 1 1 1 1 1
## predict
## real 8.20551991204235 8.31847835551553 8.40650365315397 8.42489899939967
## 0 0 0 0 0
## 1 1 1 1 1
## predict
## real 8.49176746715108 8.59680835777349 8.61560932610277 8.63936317092581
## 0 0 0 0 0
## 1 1 1 1 1
## predict
## real 8.70217243841505 8.74905926659265 8.88635722365973 8.90819383312696
## 0 0 0 0 0
## 1 1 1 1 1
## predict
## real 8.94710991023414 8.97224665795781 9.02445182172123 9.05881510848827
## 0 0 0 0 0
## 1 1 1 1 1
## predict
## real 9.22295002984388 9.42652642248262 9.45365096804465 9.47206410835094
## 0 0 0 0 0
## 1 1 1 1 1
## predict
## real 9.53275335006705 9.58237295242826 9.66581932099585 9.73300293398292
## 0 0 0 0 0
## 1 1 1 1 1
## predict
## real 9.77830222068827 9.81950022889549 9.84195772733049 9.90026897064385
## 0 0 0 0 0
## 1 1 1 2 1
## predict
## real 9.90681390160677 9.95455661556585 9.97069790807311 9.99489650128855
## 0 0 0 0 0
## 1 1 1 1 1
## predict
## real 10.0308842741322 10.0329503452127 10.037480991833 10.0717276492717
## 0 0 0 0 0
## 1 1 1 1 1
## predict
## real 10.1172771978471 10.1281827570455 10.1448880112763 10.1602322948648
## 0 0 0 0 0
## 1 1 1 1 1
## predict
## real 10.1688482259532 10.276103443995 10.2826218678292 10.3096752577734
## 0 0 0 0 0
## 1 1 1 1 1
## predict
## real 10.3290816935512 10.3948086094921 16.9456694998674 20.7525002251175
## 0 0 0 0 0
## 1 1 1 1 1
## predict
## real 25.8960170890924
## 0 0
## 1 1
(table[1,1] + table[2,2]) / sum(table) # 정확도## [1] 0.0003752345
- ROC & AUC
ROC(test = yhat_test, stat = test$Occupancy, plot = "ROC", AUC = T, main ="SVM")