trees, and SVR (radial kernel).
library(tseries)
## Warning: package 'tseries' was built under R version 3.5.2
library(quantmod)
## Warning: package 'quantmod' was built under R version 3.5.2
## Loading required package: xts
## Loading required package: zoo
##
## Attaching package: 'zoo'
## The following objects are masked from 'package:base':
##
## as.Date, as.Date.numeric
## Loading required package: TTR
## Version 0.4-0 included new data defaults. See ?getSymbols.
# Load data and examine top rows
dow_jones <- read.csv("dow_jones_index.data", header = TRUE)
head(dow_jones)
dow_jones<-na.omit(dow_jones)
anyNA(dow_jones)
## [1] FALSE
str(dow_jones)
## 'data.frame': 720 obs. of 16 variables:
## $ quarter : int 1 1 1 1 1 1 1 1 1 1 ...
## $ stock : Factor w/ 30 levels "AA","AXP","BA",..: 1 1 1 1 1 1 1 1 1 1 ...
## $ date : Factor w/ 25 levels "1/14/2011","1/21/2011",..: 1 2 3 8 5 6 7 12 9 10 ...
## $ open : Factor w/ 722 levels "$10.59","$10.89",..: 81 75 66 74 112 115 88 83 79 68 ...
## $ high : Factor w/ 713 levels "$10.94","$101.29",..: 76 72 75 106 107 111 103 82 78 71 ...
## $ low : Factor w/ 711 levels "$10.40","$10.41",..: 59 58 62 76 86 106 65 75 56 57 ...
## $ close : Factor w/ 711 levels "$10.52","$10.68",..: 65 63 73 108 111 110 81 80 71 72 ...
## $ volume : int 242963398 138428495 151379173 154387761 114691279 80023895 132981863 109493077 114332562 130374108 ...
## $ percent_change_price : num -4.428 -2.471 1.638 5.933 0.231 ...
## $ percent_change_volume_over_last_wk: num 1.38 -43.02 9.36 1.99 -25.71 ...
## $ previous_weeks_volume : int 239655616 242963398 138428495 151379173 154387761 114691279 80023895 132981863 109493077 114332562 ...
## $ next_weeks_open : Factor w/ 720 levels "$10.52","$10.59",..: 74 65 73 112 115 87 82 78 67 76 ...
## $ next_weeks_close : Factor w/ 715 levels "$10.52","$10.68",..: 65 76 111 114 113 84 83 74 75 109 ...
## $ percent_change_next_weeks_price : num -2.471 1.638 5.933 0.231 -0.633 ...
## $ days_to_next_dividend : int 19 12 5 97 90 83 76 69 62 55 ...
## $ percent_return_next_dividend : num 0.188 0.19 0.186 0.175 0.173 ...
## - attr(*, "na.action")= 'omit' Named int 1 13 25 37 49 61 73 85 97 109 ...
## ..- attr(*, "names")= chr "1" "13" "25" "37" ...
dow_jones$open=as.numeric(gsub("\\$","",dow_jones$open))
dow_jones$high=as.numeric(gsub("\\$","",dow_jones$high))
dow_jones$low=as.numeric(gsub("\\$","",dow_jones$low))
dow_jones$close=as.numeric(gsub("\\$","",dow_jones$close))
dow_jones$next_weeks_open=as.numeric(gsub("\\$","",dow_jones$next_weeks_open))
dow_jones$next_weeks_close=as.numeric(gsub("\\$","",dow_jones$next_weeks_close))
str(dow_jones)
## 'data.frame': 720 obs. of 16 variables:
## $ quarter : int 1 1 1 1 1 1 1 1 1 1 ...
## $ stock : Factor w/ 30 levels "AA","AXP","BA",..: 1 1 1 1 1 1 1 1 1 1 ...
## $ date : Factor w/ 25 levels "1/14/2011","1/21/2011",..: 1 2 3 8 5 6 7 12 9 10 ...
## $ open : num 16.7 16.2 15.9 16.2 17.3 ...
## $ high : num 16.7 16.4 16.6 17.4 17.5 ...
## $ low : num 15.6 15.6 15.8 16.2 17 ...
## $ close : num 16 15.8 16.1 17.1 17.4 ...
## $ volume : int 242963398 138428495 151379173 154387761 114691279 80023895 132981863 109493077 114332562 130374108 ...
## $ percent_change_price : num -4.428 -2.471 1.638 5.933 0.231 ...
## $ percent_change_volume_over_last_wk: num 1.38 -43.02 9.36 1.99 -25.71 ...
## $ previous_weeks_volume : int 239655616 242963398 138428495 151379173 154387761 114691279 80023895 132981863 109493077 114332562 ...
## $ next_weeks_open : num 16.2 15.9 16.2 17.3 17.4 ...
## $ next_weeks_close : num 15.8 16.1 17.1 17.4 17.3 ...
## $ percent_change_next_weeks_price : num -2.471 1.638 5.933 0.231 -0.633 ...
## $ days_to_next_dividend : int 19 12 5 97 90 83 76 69 62 55 ...
## $ percent_return_next_dividend : num 0.188 0.19 0.186 0.175 0.173 ...
## - attr(*, "na.action")= 'omit' Named int 1 13 25 37 49 61 73 85 97 109 ...
## ..- attr(*, "names")= chr "1" "13" "25" "37" ...
lag.plot(dow_jones$open,set.lag=1:12)
lag.plot(dow_jones$high, set.lag=1:12)
lag.plot(dow_jones$low, set.lag=1:12)
lag.plot(dow_jones$close, set.lag=1:12)
lag.plot(dow_jones$percent_change_volume_over_last_wk, set.lag=1:12)
lag.plot(dow_jones$percent_change_next_weeks_price, set.lag=1:12)
lag.plot(dow_jones$percent_return_next_dividend, set.lag=1:12)
library(dplyr)
##
## Attaching package: 'dplyr'
## The following objects are masked from 'package:xts':
##
## first, last
## The following objects are masked from 'package:stats':
##
## filter, lag
## The following objects are masked from 'package:base':
##
## intersect, setdiff, setequal, union
# Group and mutate all predictor variables into lag versions
# Open Lag
dow_jones=dow_jones %>%
group_by(stock)%>%
mutate(open.lag=dplyr::lag(open,n=1))
# High Lag
dow_jones=dow_jones %>%
group_by(stock)%>%
mutate(high.lag=dplyr::lag(high,n=1))
# Low Lag
dow_jones=dow_jones %>%
group_by(stock)%>%
mutate(low.lag=dplyr::lag(low,n=1))
# Close Lag
dow_jones=dow_jones %>%
group_by(stock)%>%
mutate(close.lag=dplyr::lag(close,n=1))
# Percent Change Volume Over Last Week Lag
dow_jones=dow_jones %>%
group_by(stock)%>%
mutate(percent_change_volume_over_last_wk.lag=dplyr::lag(percent_change_volume_over_last_wk,n=1))
# Percent Change Next Weeks Price Lag
dow_jones=dow_jones %>%
group_by(stock)%>%
mutate(percent_change_next_weeks_price.lag=dplyr::lag(percent_change_next_weeks_price,n=1))
# Percent Return Next Dividend Lag
dow_jones=dow_jones %>%
group_by(stock)%>%
mutate(percent_return_next_dividend.lag=dplyr::lag(percent_return_next_dividend,n=1))
# Next Weeks Close Lag
dow_jones=dow_jones %>%
group_by(stock)%>%
mutate(next_weeks_close.lag=dplyr::lag(next_weeks_close,n=1))
# Next Weeks Open Lag
dow_jones= dow_jones %>%
group_by(stock) %>%
mutate(next_weeks_open.lag = dplyr::lag(next_weeks_open, n =1))
# str(dow_jones)
newdata<-split(dow_jones,dow_jones$quarter)
train<-newdata[[1]]
test<-newdata[[2]]
train1<-split(train,train$stock)
test1<-split(test,test$stock)
glmfxn <- function (trainstock, teststock,formula) {
set.seed(123)
glmfit <- glm(formula,data = trainstock)
linear.predict <- predict.glm(glmfit, newdata = teststock)
linear.predict <- ifelse(linear.predict >= mean(linear.predict), "1", "0")
linear.actual <- ifelse(teststock$percent_change_next_weeks_price >= mean(teststock$percent_change_next_weeks_price), "1", "0")
confusion=caret::confusionMatrix(as.factor(linear.actual), as.factor(linear.predict))
print(confusion$overall)
print(summary(glmfit))
}
formula = percent_change_next_weeks_price ~ open.lag + high.lag + low.lag + close.lag + next_weeks_open.lag + next_weeks_close.lag + percent_return_next_dividend.lag + volume
for (l in names(train1))
{
x=train1[[l]]
y=test1[[l]]
glmfxn(x,y,formula)
namefilmodel = paste0("Linear_Accuracy", l, ".txt")
sink(namefilmodel)
print(glmfxn(x,y, formula))
sink()
}
## Accuracy Kappa AccuracyLower AccuracyUpper AccuracyNull
## 0.46153846 -0.04597701 0.19223244 0.74865452 0.69230769
## AccuracyPValue McnemarPValue
## 0.97867974 0.44969180
##
## Call:
## glm(formula = formula, data = trainstock)
##
## Deviance Residuals:
## 2 3 4 5 6 7
## -0.000524 -0.014270 0.102605 -0.009935 0.125832 -0.137112
## 8 9 10 11
## -0.056192 0.085689 0.035388 -0.131482
##
## Coefficients:
## Estimate Std. Error t value Pr(>|t|)
## (Intercept) -3.890e+03 3.140e+02 -12.388 0.0513 .
## open.lag -5.608e+00 1.438e+00 -3.900 0.1598
## high.lag -1.711e+00 3.346e+00 -0.511 0.6991
## low.lag -1.724e+00 1.599e+00 -1.078 0.4761
## close.lag 1.355e+02 1.193e+01 11.353 0.0559 .
## next_weeks_open.lag -1.001e+01 9.420e-01 -10.623 0.0598 .
## next_weeks_close.lag -4.428e+00 4.362e-01 -10.153 0.0625 .
## percent_return_next_dividend.lag 1.127e+04 8.586e+02 13.123 0.0484 *
## volume -5.333e-08 2.845e-08 -1.875 0.3120
## ---
## Signif. codes: 0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
##
## (Dispersion parameter for gaussian family taken to be 0.07450371)
##
## Null deviance: 71.608455 on 9 degrees of freedom
## Residual deviance: 0.074504 on 1 degrees of freedom
## (1 observation deleted due to missingness)
## AIC: -0.61614
##
## Number of Fisher Scoring iterations: 2
##
## Accuracy Kappa AccuracyLower AccuracyUpper AccuracyNull
## 0.6153846 0.2352941 0.3157776 0.8614207 0.5384615
## AccuracyPValue McnemarPValue
## 0.3937754 1.0000000
##
## Call:
## glm(formula = formula, data = trainstock)
##
## Deviance Residuals:
## 2 3 4 5 6 7 8
## -1.40020 0.42585 0.06117 0.47213 0.75478 -0.07361 -0.57018
## 9 10 11
## -0.14837 0.34897 0.12945
##
## Coefficients:
## Estimate Std. Error t value Pr(>|t|)
## (Intercept) 3.015e+03 3.884e+03 0.776 0.580
## open.lag -3.583e+00 8.135e+00 -0.440 0.736
## high.lag 6.974e+00 5.774e+00 1.208 0.440
## low.lag 3.430e+00 1.232e+01 0.278 0.827
## close.lag -4.869e+01 4.649e+01 -1.047 0.485
## next_weeks_open.lag 8.653e+00 1.937e+01 0.447 0.733
## next_weeks_close.lag 2.512e-01 1.076e+00 0.234 0.854
## percent_return_next_dividend.lag -3.828e+03 4.822e+03 -0.794 0.573
## volume -4.255e-08 2.101e-07 -0.203 0.873
##
## (Dispersion parameter for gaussian family taken to be 3.429323)
##
## Null deviance: 81.8985 on 9 degrees of freedom
## Residual deviance: 3.4293 on 1 degrees of freedom
## (1 observation deleted due to missingness)
## AIC: 37.677
##
## Number of Fisher Scoring iterations: 2
##
## Accuracy Kappa AccuracyLower AccuracyUpper AccuracyNull
## 0.30769231 -0.40963855 0.09092039 0.61426166 0.61538462
## AccuracyPValue McnemarPValue
## 0.99438593 1.00000000
##
## Call:
## glm(formula = formula, data = trainstock)
##
## Deviance Residuals:
## 2 3 4 5 6 7 8
## -0.47566 0.68489 -0.11085 0.08669 0.15775 -0.00266 -0.54181
## 9 10 11
## 0.26649 -0.37152 0.30669
##
## Coefficients:
## Estimate Std. Error t value Pr(>|t|)
## (Intercept) -4.872e+04 1.233e+04 -3.951 0.158
## open.lag -3.393e+00 1.056e+00 -3.212 0.192
## high.lag 1.635e+00 8.010e-01 2.041 0.290
## low.lag -4.708e+00 1.528e+00 -3.082 0.200
## close.lag 3.457e+02 8.776e+01 3.939 0.158
## next_weeks_open.lag 8.157e+00 2.244e+00 3.635 0.171
## next_weeks_close.lag -8.283e+00 1.831e+00 -4.525 0.138
## percent_return_next_dividend.lag 4.169e+04 1.053e+04 3.957 0.158
## volume -1.180e-06 2.983e-07 -3.955 0.158
##
## (Dispersion parameter for gaussian family taken to be 1.336687)
##
## Null deviance: 48.8315 on 9 degrees of freedom
## Residual deviance: 1.3367 on 1 degrees of freedom
## (1 observation deleted due to missingness)
## AIC: 28.255
##
## Number of Fisher Scoring iterations: 2
##
## Accuracy Kappa AccuracyLower AccuracyUpper AccuracyNull
## 0.53846154 0.04878049 0.25134548 0.80776756 0.69230769
## AccuracyPValue McnemarPValue
## 0.92934774 0.68309140
##
## Call:
## glm(formula = formula, data = trainstock)
##
## Deviance Residuals:
## 2 3 4 5 6 7 8
## -0.06952 0.14442 0.25608 1.29496 -0.92745 1.06025 1.86077
## 9 10 11
## -2.32173 -1.01466 -0.28311
##
## Coefficients:
## Estimate Std. Error t value Pr(>|t|)
## (Intercept) -1.439e+03 3.195e+03 -0.450 0.731
## open.lag 6.343e+00 1.627e+01 0.390 0.763
## high.lag -3.805e+00 2.420e+01 -0.157 0.901
## low.lag 8.521e+00 2.276e+01 0.374 0.772
## close.lag 2.112e+01 8.645e+01 0.244 0.847
## next_weeks_open.lag 2.122e+01 2.327e+01 0.912 0.529
## next_weeks_close.lag 1.526e+00 5.223e+00 0.292 0.819
## percent_return_next_dividend.lag 9.282e+03 2.264e+04 0.410 0.752
## volume 1.045e-08 2.016e-08 0.519 0.695
##
## (Dispersion parameter for gaussian family taken to be 13.71508)
##
## Null deviance: 80.109 on 9 degrees of freedom
## Residual deviance: 13.715 on 1 degrees of freedom
## (1 observation deleted due to missingness)
## AIC: 51.538
##
## Number of Fisher Scoring iterations: 2
##
## Accuracy Kappa AccuracyLower AccuracyUpper AccuracyNull
## 0.46153846 -0.02247191 0.19223244 0.74865452 0.61538462
## AccuracyPValue McnemarPValue
## 0.92110952 0.44969180
##
## Call:
## glm(formula = formula, data = trainstock)
##
## Deviance Residuals:
## 2 3 4 5 6 7 8
## -0.13350 0.24664 -0.24909 0.11162 0.10957 -0.33192 -0.07756
## 9 10 11
## 0.41762 -0.17330 0.07992
##
## Coefficients:
## Estimate Std. Error t value Pr(>|t|)
## (Intercept) -5.821e+02 3.335e+02 -1.745 0.331
## open.lag -3.720e-01 3.554e-01 -1.047 0.485
## high.lag -5.032e-01 5.053e-01 -0.996 0.501
## low.lag -1.305e+00 6.829e-01 -1.911 0.307
## close.lag 6.257e+00 2.020e+00 3.097 0.199
## next_weeks_open.lag -1.550e+00 9.525e-01 -1.627 0.351
## next_weeks_close.lag -3.286e-02 1.803e-01 -0.182 0.885
## percent_return_next_dividend.lag 7.232e+02 3.747e+02 1.930 0.304
## volume 3.785e-07 6.306e-08 6.003 0.105
##
## (Dispersion parameter for gaussian family taken to be 0.4921773)
##
## Null deviance: 60.21376 on 9 degrees of freedom
## Residual deviance: 0.49218 on 1 degrees of freedom
## (1 observation deleted due to missingness)
## AIC: 18.264
##
## Number of Fisher Scoring iterations: 2
##
## Accuracy Kappa AccuracyLower AccuracyUpper AccuracyNull
## 0.4615385 -0.1234568 0.1922324 0.7486545 0.7692308
## AccuracyPValue McnemarPValue
## 0.9966868 0.4496918
##
## Call:
## glm(formula = formula, data = trainstock)
##
## Deviance Residuals:
## 2 3 4 5 6 7 8
## -0.36109 0.46907 -0.45081 0.15219 0.02019 2.61889 -1.68494
## 9 10 11
## -0.51216 -0.72268 0.47135
##
## Coefficients:
## Estimate Std. Error t value Pr(>|t|)
## (Intercept) -2.262e+02 9.500e+02 -0.238 0.851
## open.lag 2.351e+01 9.454e+00 2.486 0.243
## high.lag -2.307e+01 9.146e+00 -2.522 0.240
## low.lag 3.411e+00 1.197e+01 0.285 0.823
## close.lag -9.067e+00 4.405e+01 -0.206 0.871
## next_weeks_open.lag 1.578e+01 2.121e+01 0.744 0.593
## next_weeks_close.lag -3.219e+00 4.294e+00 -0.750 0.590
## percent_return_next_dividend.lag 2.674e+02 1.505e+03 0.178 0.888
## volume 1.300e-08 2.089e-08 0.622 0.646
##
## (Dispersion parameter for gaussian family taken to be 11.28155)
##
## Null deviance: 253.209 on 9 degrees of freedom
## Residual deviance: 11.282 on 1 degrees of freedom
## (1 observation deleted due to missingness)
## AIC: 49.585
##
## Number of Fisher Scoring iterations: 2
##
## Accuracy Kappa AccuracyLower AccuracyUpper AccuracyNull
## 0.53846154 0.07142857 0.25134548 0.80776756 0.53846154
## AccuracyPValue McnemarPValue
## 0.61146992 1.00000000
##
## Call:
## glm(formula = formula, data = trainstock)
##
## Deviance Residuals:
## 2 3 4 5 6 7 8
## 0.23662 -0.59649 0.10088 0.26180 -0.02956 0.21745 -0.31366
## 9 10 11
## 0.15608 0.08661 -0.11974
##
## Coefficients:
## Estimate Std. Error t value Pr(>|t|)
## (Intercept) 8.924e+02 2.016e+02 4.425 0.1415
## open.lag 3.685e+00 4.848e-01 7.602 0.0833 .
## high.lag -5.052e+00 7.646e-01 -6.608 0.0956 .
## low.lag -1.759e+00 3.253e-01 -5.406 0.1164
## close.lag -1.843e+00 1.612e+00 -1.143 0.4575
## next_weeks_open.lag 1.153e+00 8.397e-01 1.373 0.4007
## next_weeks_close.lag 1.051e-01 1.571e-01 0.669 0.6245
## percent_return_next_dividend.lag -7.066e+02 1.491e+02 -4.738 0.1324
## volume 5.151e-08 3.814e-08 1.351 0.4057
## ---
## Signif. codes: 0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
##
## (Dispersion parameter for gaussian family taken to be 0.6832389)
##
## Null deviance: 56.04482 on 9 degrees of freedom
## Residual deviance: 0.68324 on 1 degrees of freedom
## (1 observation deleted due to missingness)
## AIC: 21.544
##
## Number of Fisher Scoring iterations: 2
##
## Accuracy Kappa AccuracyLower AccuracyUpper AccuracyNull
## 0.6153846 0.2168675 0.3157776 0.8614207 0.5384615
## AccuracyPValue McnemarPValue
## 0.3937754 1.0000000
##
## Call:
## glm(formula = formula, data = trainstock)
##
## Deviance Residuals:
## 2 3 4 5 6 7 8
## -0.16880 0.00781 0.18622 0.18185 -0.41128 0.09074 0.19995
## 9 10 11
## 0.24957 -0.34744 0.01136
##
## Coefficients:
## Estimate Std. Error t value Pr(>|t|)
## (Intercept) 1.086e+03 7.087e+02 1.532 0.368
## open.lag 2.046e+00 1.206e+00 1.696 0.339
## high.lag -9.870e+00 2.974e+00 -3.319 0.186
## low.lag 4.315e+00 1.536e+00 2.809 0.218
## close.lag -6.836e+00 5.032e+00 -1.358 0.404
## next_weeks_open.lag -6.571e-01 2.244e+00 -0.293 0.819
## next_weeks_close.lag 1.105e-01 4.416e-01 0.250 0.844
## percent_return_next_dividend.lag -6.448e+02 4.449e+02 -1.449 0.384
## volume 2.385e-07 7.355e-08 3.242 0.190
##
## (Dispersion parameter for gaussian family taken to be 0.4967913)
##
## Null deviance: 54.79677 on 9 degrees of freedom
## Residual deviance: 0.49679 on 1 degrees of freedom
## (1 observation deleted due to missingness)
## AIC: 18.357
##
## Number of Fisher Scoring iterations: 2
##
## Accuracy Kappa AccuracyLower AccuracyUpper AccuracyNull
## 0.6923077 0.3500000 0.3857383 0.9090796 0.6153846
## AccuracyPValue McnemarPValue
## 0.3966378 1.0000000
##
## Call:
## glm(formula = formula, data = trainstock)
##
## Deviance Residuals:
## 2 3 4 5 6 7 8
## -1.78633 1.51276 0.39411 0.01758 -0.29622 1.03945 -0.65223
## 9 10 11
## 0.41945 -1.11282 0.46425
##
## Coefficients:
## Estimate Std. Error t value Pr(>|t|)
## (Intercept) -4.150e+03 2.788e+03 -1.489 0.377
## open.lag 3.028e+00 2.798e+00 1.082 0.475
## high.lag 2.232e+00 4.871e+00 0.458 0.726
## low.lag -6.138e+00 3.674e+00 -1.671 0.343
## close.lag 5.445e+01 3.629e+01 1.500 0.374
## next_weeks_open.lag -2.922e+00 5.704e+00 -0.512 0.699
## next_weeks_close.lag -1.431e+00 1.234e+00 -1.159 0.453
## percent_return_next_dividend.lag 2.169e+03 1.437e+03 1.509 0.373
## volume 1.488e-07 9.595e-08 1.551 0.365
##
## (Dispersion parameter for gaussian family taken to be 8.858525)
##
## Null deviance: 83.7400 on 9 degrees of freedom
## Residual deviance: 8.8585 on 1 degrees of freedom
## (1 observation deleted due to missingness)
## AIC: 47.167
##
## Number of Fisher Scoring iterations: 2
##
## Accuracy Kappa AccuracyLower AccuracyUpper AccuracyNull
## 0.46153846 -0.09638554 0.19223244 0.74865452 0.61538462
## AccuracyPValue McnemarPValue
## 0.92110952 1.00000000
##
## Call:
## glm(formula = formula, data = trainstock)
##
## Deviance Residuals:
## 2 3 4 5 6 7 8 9
## -0.1361 -0.1492 -0.2767 0.7293 -0.3707 0.2087 0.1018 -0.6803
## 10 11
## 0.2146 0.3586
##
## Coefficients:
## Estimate Std. Error t value Pr(>|t|)
## (Intercept) -1.506e+02 2.108e+03 -0.071 0.955
## open.lag -7.365e+00 4.842e+00 -1.521 0.370
## high.lag 3.898e+00 1.014e+01 0.384 0.766
## low.lag 3.826e-01 1.961e+00 0.195 0.877
## close.lag 9.927e+00 4.520e+01 0.220 0.862
## next_weeks_open.lag -9.575e-01 3.422e+00 -0.280 0.826
## next_weeks_close.lag -5.063e+00 2.015e+00 -2.513 0.241
## percent_return_next_dividend.lag 2.035e+02 1.499e+03 0.136 0.914
## volume -2.525e-08 1.266e-08 -1.995 0.296
##
## (Dispersion parameter for gaussian family taken to be 1.477958)
##
## Null deviance: 41.063 on 9 degrees of freedom
## Residual deviance: 1.478 on 1 degrees of freedom
## (1 observation deleted due to missingness)
## AIC: 29.26
##
## Number of Fisher Scoring iterations: 2
##
## Accuracy Kappa AccuracyLower AccuracyUpper AccuracyNull
## 0.7692308 0.4935065 0.4618685 0.9496189 0.6923077
## AccuracyPValue McnemarPValue
## 0.3969090 1.0000000
##
## Call:
## glm(formula = formula, data = trainstock)
##
## Deviance Residuals:
## 2 3 4 5 6 7 8 9
## 0.5518 0.3121 -0.1493 1.6757 -0.4771 0.2546 0.7011 -1.0052
## 10 11
## -0.6936 -1.1702
##
## Coefficients:
## Estimate Std. Error t value Pr(>|t|)
## (Intercept) -2.611e+03 2.233e+04 -0.117 0.926
## open.lag 5.364e-01 6.002e+00 0.089 0.943
## high.lag -1.617e+00 6.456e+00 -0.250 0.844
## low.lag 3.817e+00 1.098e+01 0.348 0.787
## close.lag 3.326e+01 3.075e+02 0.108 0.931
## next_weeks_open.lag 1.248e+00 1.776e+01 0.070 0.955
## next_weeks_close.lag -4.184e+00 3.833e+00 -1.092 0.472
## percent_return_next_dividend.lag 2.067e+03 1.642e+04 0.126 0.920
## volume -1.031e-07 1.697e-07 -0.607 0.652
##
## (Dispersion parameter for gaussian family taken to be 6.87694)
##
## Null deviance: 59.8948 on 9 degrees of freedom
## Residual deviance: 6.8769 on 1 degrees of freedom
## (1 observation deleted due to missingness)
## AIC: 44.635
##
## Number of Fisher Scoring iterations: 2
##
## Accuracy Kappa AccuracyLower AccuracyUpper AccuracyNull
## 0.7692308 0.5301205 0.4618685 0.9496189 0.6153846
## AccuracyPValue McnemarPValue
## 0.1986072 1.0000000
##
## Call:
## glm(formula = formula, data = trainstock)
##
## Deviance Residuals:
## 2 3 4 5 6 7 8
## 1.65774 0.48931 0.49051 -1.01875 -1.06657 0.09504 0.08995
## 9 10 11
## 0.45500 -1.20897 0.01675
##
## Coefficients:
## Estimate Std. Error t value Pr(>|t|)
## (Intercept) 2.960e+02 7.356e+01 4.024 0.155
## open.lag -4.306e+01 1.219e+01 -3.533 0.176
## high.lag 4.031e+01 1.151e+01 3.503 0.177
## low.lag 1.230e+01 4.773e+00 2.578 0.236
## close.lag 5.010e+00 5.360e+00 0.935 0.521
## next_weeks_open.lag -2.246e+01 6.764e+00 -3.320 0.186
## next_weeks_close.lag 1.784e-01 8.880e-01 0.201 0.874
## percent_return_next_dividend.lag -1.198e+01 3.222e+01 -0.372 0.773
## volume 2.030e-07 6.920e-08 2.934 0.209
##
## (Dispersion parameter for gaussian family taken to be 7.089576)
##
## Null deviance: 145.3825 on 9 degrees of freedom
## Residual deviance: 7.0896 on 1 degrees of freedom
## (1 observation deleted due to missingness)
## AIC: 44.939
##
## Number of Fisher Scoring iterations: 2
##
## Accuracy Kappa AccuracyLower AccuracyUpper AccuracyNull
## 0.3846154 -0.2380952 0.1385793 0.6842224 0.5384615
## AccuracyPValue McnemarPValue
## 0.9180193 1.0000000
##
## Call:
## glm(formula = formula, data = trainstock)
##
## Deviance Residuals:
## 2 3 4 5 6 7 8
## -0.01088 0.04371 -0.18633 0.16262 0.19485 0.42992 -0.83824
## 9 10 11
## -0.24980 0.10613 0.34801
##
## Coefficients:
## Estimate Std. Error t value Pr(>|t|)
## (Intercept) 1.665e+02 6.346e+01 2.624 0.232
## open.lag -1.665e-01 5.051e-01 -0.330 0.797
## high.lag 4.117e-01 2.264e-01 1.818 0.320
## low.lag -7.767e-01 4.591e-01 -1.692 0.340
## close.lag 3.141e-02 1.212e+00 0.026 0.984
## next_weeks_open.lag 6.747e-01 1.186e+00 0.569 0.671
## next_weeks_close.lag -1.152e+00 3.689e-01 -3.122 0.197
## percent_return_next_dividend.lag -7.351e+00 5.670e+01 -0.130 0.918
## volume -3.209e-07 1.208e-07 -2.657 0.229
##
## (Dispersion parameter for gaussian family taken to be 1.183404)
##
## Null deviance: 34.0021 on 9 degrees of freedom
## Residual deviance: 1.1834 on 1 degrees of freedom
## (1 observation deleted due to missingness)
## AIC: 27.037
##
## Number of Fisher Scoring iterations: 2
##
## Accuracy Kappa AccuracyLower AccuracyUpper AccuracyNull
## 0.6153846 0.1975309 0.3157776 0.8614207 0.5384615
## AccuracyPValue McnemarPValue
## 0.3937754 0.3710934
##
## Call:
## glm(formula = formula, data = trainstock)
##
## Deviance Residuals:
## 2 3 4 5 6 7 8
## -1.21706 -0.78824 1.08278 2.37445 -0.39225 0.38602 -1.97540
## 9 10 11
## -0.67930 1.14224 0.06676
##
## Coefficients:
## Estimate Std. Error t value Pr(>|t|)
## (Intercept) 3.301e+03 3.748e+03 0.881 0.540
## open.lag -1.266e+00 6.341e+00 -0.200 0.875
## high.lag -7.320e+00 1.074e+01 -0.681 0.619
## low.lag 2.383e-01 1.101e+01 0.022 0.986
## close.lag -6.260e+01 9.060e+01 -0.691 0.615
## next_weeks_open.lag -9.314e+00 1.434e+01 -0.650 0.633
## next_weeks_close.lag -7.428e-02 4.159e+00 -0.018 0.989
## percent_return_next_dividend.lag -1.881e+03 2.201e+03 -0.855 0.550
## volume 1.315e-08 4.463e-08 0.295 0.818
##
## (Dispersion parameter for gaussian family taken to be 14.88865)
##
## Null deviance: 53.635 on 9 degrees of freedom
## Residual deviance: 14.889 on 1 degrees of freedom
## (1 observation deleted due to missingness)
## AIC: 52.359
##
## Number of Fisher Scoring iterations: 2
##
## Accuracy Kappa AccuracyLower AccuracyUpper AccuracyNull
## 0.6923077 0.3809524 0.3857383 0.9090796 0.5384615
## AccuracyPValue McnemarPValue
## 0.2032927 1.0000000
##
## Call:
## glm(formula = formula, data = trainstock)
##
## Deviance Residuals:
## 2 3 4 5 6 7
## 0.012086 0.031377 0.016816 -0.094080 -0.018603 0.049501
## 8 9 10 11
## 0.006441 0.011092 -0.096165 0.081536
##
## Coefficients:
## Estimate Std. Error t value Pr(>|t|)
## (Intercept) 1.876e+02 1.580e+01 11.868 0.0535 .
## open.lag 2.608e+00 3.536e-01 7.376 0.0858 .
## high.lag -1.255e+00 3.622e-01 -3.465 0.1789
## low.lag -3.909e+00 3.625e-01 -10.784 0.0589 .
## close.lag 6.322e-01 4.990e-01 1.267 0.4254
## next_weeks_open.lag 2.178e+00 6.582e-01 3.310 0.1868
## next_weeks_close.lag -2.287e+00 1.154e-01 -19.820 0.0321 *
## percent_return_next_dividend.lag -7.284e+01 4.560e+00 -15.974 0.0398 *
## volume -3.217e-08 7.725e-09 -4.165 0.1500
## ---
## Signif. codes: 0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
##
## (Dispersion parameter for gaussian family taken to be 0.02912122)
##
## Null deviance: 31.295553 on 9 degrees of freedom
## Residual deviance: 0.029121 on 1 degrees of freedom
## (1 observation deleted due to missingness)
## AIC: -10.01
##
## Number of Fisher Scoring iterations: 2
##
## Accuracy Kappa AccuracyLower AccuracyUpper AccuracyNull
## 0.53846154 0.09302326 0.25134548 0.80776756 0.61538462
## AccuracyPValue McnemarPValue
## 0.80507595 0.68309140
##
## Call:
## glm(formula = formula, data = trainstock)
##
## Deviance Residuals:
## 2 3 4 5 6 7 8
## -0.00595 -0.52585 0.56567 -0.13588 0.19354 -0.22117 0.50470
## 9 10 11
## -0.66253 0.19459 0.09288
##
## Coefficients:
## Estimate Std. Error t value Pr(>|t|)
## (Intercept) -1.777e+04 9.739e+03 -1.825 0.319
## open.lag 1.278e+01 7.272e+00 1.757 0.329
## high.lag -7.745e+00 5.036e+00 -1.538 0.367
## low.lag -9.113e+00 6.475e+00 -1.407 0.393
## close.lag 1.984e+02 1.105e+02 1.795 0.324
## next_weeks_open.lag -8.436e+00 5.781e+00 -1.459 0.382
## next_weeks_close.lag 6.897e+00 3.861e+00 1.786 0.325
## percent_return_next_dividend.lag 1.635e+04 8.937e+03 1.829 0.318
## volume 8.285e-08 6.322e-08 1.311 0.415
##
## (Dispersion parameter for gaussian family taken to be 1.441526)
##
## Null deviance: 46.6293 on 9 degrees of freedom
## Residual deviance: 1.4415 on 1 degrees of freedom
## (1 observation deleted due to missingness)
## AIC: 29.01
##
## Number of Fisher Scoring iterations: 2
##
## Accuracy Kappa AccuracyLower AccuracyUpper AccuracyNull
## 0.7692308 0.5301205 0.4618685 0.9496189 0.6153846
## AccuracyPValue McnemarPValue
## 0.1986072 1.0000000
##
## Call:
## glm(formula = formula, data = trainstock)
##
## Deviance Residuals:
## 2 3 4 5 6 7 8
## 0.42242 -0.76378 0.20828 0.09029 0.05549 -0.11299 -0.04609
## 9 10 11
## -0.14013 0.27167 0.01484
##
## Coefficients:
## Estimate Std. Error t value Pr(>|t|)
## (Intercept) 2.901e+02 3.976e+03 0.073 0.954
## open.lag -6.864e-01 9.488e-01 -0.723 0.601
## high.lag 5.634e+00 1.904e+00 2.960 0.207
## low.lag 1.885e+00 2.704e+00 0.697 0.612
## close.lag -3.611e+00 3.144e+01 -0.115 0.927
## next_weeks_open.lag -6.751e+00 2.098e+00 -3.218 0.192
## next_weeks_close.lag 1.291e+00 1.144e+00 1.129 0.461
## percent_return_next_dividend.lag -2.078e+02 2.700e+03 -0.077 0.951
## volume 5.644e-08 7.570e-08 0.746 0.592
##
## (Dispersion parameter for gaussian family taken to be 0.9249677)
##
## Null deviance: 30.78673 on 9 degrees of freedom
## Residual deviance: 0.92497 on 1 degrees of freedom
## (1 observation deleted due to missingness)
## AIC: 24.573
##
## Number of Fisher Scoring iterations: 2
##
## Accuracy Kappa AccuracyLower AccuracyUpper AccuracyNull
## 0.23076923 -0.51162791 0.05038107 0.53813154 0.61538462
## AccuracyPValue McnemarPValue
## 0.99910735 0.75182963
##
## Call:
## glm(formula = formula, data = trainstock)
##
## Deviance Residuals:
## 2 3 4 5 6 7 8
## -0.11824 0.88587 -0.10881 -0.90632 -0.03072 0.46258 0.40047
## 9 10 11
## -0.59638 -0.07876 0.09032
##
## Coefficients:
## Estimate Std. Error t value Pr(>|t|)
## (Intercept) -1.342e+04 9.577e+03 -1.401 0.395
## open.lag 4.568e+00 2.984e+00 1.531 0.368
## high.lag -4.490e+00 3.188e+00 -1.409 0.393
## low.lag -1.017e+01 4.508e+00 -2.257 0.266
## close.lag 2.243e+02 1.552e+02 1.446 0.385
## next_weeks_open.lag 3.022e+00 6.897e+00 0.438 0.737
## next_weeks_close.lag -3.116e+00 2.000e+00 -1.558 0.363
## percent_return_next_dividend.lag 7.256e+03 5.157e+03 1.407 0.393
## volume -1.301e-07 1.499e-07 -0.868 0.545
##
## (Dispersion parameter for gaussian family taken to be 2.377337)
##
## Null deviance: 32.0891 on 9 degrees of freedom
## Residual deviance: 2.3773 on 1 degrees of freedom
## (1 observation deleted due to missingness)
## AIC: 34.013
##
## Number of Fisher Scoring iterations: 2
##
## Accuracy Kappa AccuracyLower AccuracyUpper AccuracyNull
## 0.3846154 -0.2380952 0.1385793 0.6842224 0.5384615
## AccuracyPValue McnemarPValue
## 0.9180193 1.0000000
##
## Call:
## glm(formula = formula, data = trainstock)
##
## Deviance Residuals:
## 2 3 4 5 6 7 8
## 0.08836 -0.83493 0.57110 0.09497 0.76710 -0.30781 0.97224
## 9 10 11
## -1.09509 0.48000 -0.73595
##
## Coefficients:
## Estimate Std. Error t value Pr(>|t|)
## (Intercept) -1.105e+04 1.337e+04 -0.827 0.560
## open.lag -4.624e-01 1.504e+00 -0.307 0.810
## high.lag -7.977e-01 1.668e+00 -0.478 0.716
## low.lag 2.211e+00 1.790e+00 1.235 0.433
## close.lag 7.345e+01 9.029e+01 0.814 0.565
## next_weeks_open.lag -5.186e-01 3.082e+00 -0.168 0.894
## next_weeks_close.lag -4.329e-01 1.118e+00 -0.387 0.765
## percent_return_next_dividend.lag 6.814e+03 8.194e+03 0.832 0.558
## volume 2.056e-09 1.118e-07 0.018 0.988
##
## (Dispersion parameter for gaussian family taken to be 4.639793)
##
## Null deviance: 40.0671 on 9 degrees of freedom
## Residual deviance: 4.6398 on 1 degrees of freedom
## (1 observation deleted due to missingness)
## AIC: 40.7
##
## Number of Fisher Scoring iterations: 2
##
## Accuracy Kappa AccuracyLower AccuracyUpper AccuracyNull
## 0.46153846 -0.04597701 0.19223244 0.74865452 0.69230769
## AccuracyPValue McnemarPValue
## 0.97867974 0.44969180
##
## Call:
## glm(formula = formula, data = trainstock)
##
## Deviance Residuals:
## 2 3 4 5 6 7 8
## -0.36999 0.04079 0.10917 0.11795 0.47845 0.73723 0.52067
## 9 10 11
## -1.64248 0.09085 -0.08265
##
## Coefficients:
## Estimate Std. Error t value Pr(>|t|)
## (Intercept) -1.260e+04 1.139e+04 -1.106 0.468
## open.lag 8.747e-01 1.230e+00 0.711 0.607
## high.lag 4.208e-01 1.386e+00 0.304 0.812
## low.lag -1.196e+00 1.701e+00 -0.703 0.610
## close.lag 7.039e+01 6.242e+01 1.128 0.462
## next_weeks_open.lag -1.461e+00 2.021e+00 -0.723 0.602
## next_weeks_close.lag 5.545e-01 9.509e-01 0.583 0.664
## percent_return_next_dividend.lag 1.036e+04 9.329e+03 1.110 0.467
## volume 4.642e-07 5.020e-07 0.925 0.525
##
## (Dispersion parameter for gaussian family taken to be 3.920724)
##
## Null deviance: 42.6031 on 9 degrees of freedom
## Residual deviance: 3.9207 on 1 degrees of freedom
## (1 observation deleted due to missingness)
## AIC: 39.016
##
## Number of Fisher Scoring iterations: 2
##
## Accuracy Kappa AccuracyLower AccuracyUpper AccuracyNull
## 0.46153846 -0.07058824 0.19223244 0.74865452 0.53846154
## AccuracyPValue McnemarPValue
## 0.79806522 1.00000000
##
## Call:
## glm(formula = formula, data = trainstock)
##
## Deviance Residuals:
## 2 3 4 5 6 7 8
## 0.11725 -0.18912 0.00719 -0.21501 -0.30547 0.74500 0.07770
## 9 10 11
## 0.29963 -0.49461 -0.04255
##
## Coefficients:
## Estimate Std. Error t value Pr(>|t|)
## (Intercept) -7.910e+03 8.171e+03 -0.968 0.510
## open.lag -4.751e+00 6.033e+00 -0.787 0.575
## high.lag 3.992e+00 4.422e+00 0.903 0.533
## low.lag 8.779e+00 8.460e+00 1.038 0.488
## close.lag 1.044e+02 1.126e+02 0.927 0.524
## next_weeks_open.lag 8.935e+00 5.823e+00 1.535 0.368
## next_weeks_close.lag -4.101e+00 1.344e+00 -3.051 0.202
## percent_return_next_dividend.lag 3.509e+03 3.569e+03 0.983 0.505
## volume -1.575e-08 1.600e-08 -0.985 0.505
##
## (Dispersion parameter for gaussian family taken to be 1.086405)
##
## Null deviance: 27.3820 on 9 degrees of freedom
## Residual deviance: 1.0864 on 1 degrees of freedom
## (1 observation deleted due to missingness)
## AIC: 26.182
##
## Number of Fisher Scoring iterations: 2
##
## Accuracy Kappa AccuracyLower AccuracyUpper AccuracyNull
## 0.6153846 0.2352941 0.3157776 0.8614207 0.5384615
## AccuracyPValue McnemarPValue
## 0.3937754 1.0000000
##
## Call:
## glm(formula = formula, data = trainstock)
##
## Deviance Residuals:
## 2 3 4 5 6 7 8
## -0.46375 -0.21169 -0.37494 0.78964 1.22996 -0.45205 0.09962
## 9 10 11
## -1.17193 0.28095 0.27419
##
## Coefficients:
## Estimate Std. Error t value Pr(>|t|)
## (Intercept) -1.957e+03 1.876e+03 -1.043 0.487
## open.lag 7.713e-01 7.481e+00 0.103 0.935
## high.lag 1.009e+00 3.008e+00 0.335 0.794
## low.lag 1.730e+00 1.045e+01 0.166 0.896
## close.lag 3.828e+01 3.495e+01 1.095 0.471
## next_weeks_open.lag -1.987e+00 7.625e+00 -0.261 0.838
## next_weeks_close.lag -3.339e+00 4.976e+00 -0.671 0.624
## percent_return_next_dividend.lag 1.632e+03 1.554e+03 1.050 0.484
## volume 3.315e-09 1.655e-08 0.200 0.874
##
## (Dispersion parameter for gaussian family taken to be 4.27861)
##
## Null deviance: 16.4064 on 9 degrees of freedom
## Residual deviance: 4.2786 on 1 degrees of freedom
## (1 observation deleted due to missingness)
## AIC: 39.889
##
## Number of Fisher Scoring iterations: 2
##
## Accuracy Kappa AccuracyLower AccuracyUpper AccuracyNull
## 0.92307692 0.84337349 0.63970256 0.99805437 0.53846154
## AccuracyPValue McnemarPValue
## 0.00388447 1.00000000
##
## Call:
## glm(formula = formula, data = trainstock)
##
## Deviance Residuals:
## 2 3 4 5 6 7 8
## -1.94888 1.65512 -0.01126 -1.02891 -0.57389 0.45331 1.70917
## 9 10 11
## 0.96130 -1.30717 0.09120
##
## Coefficients:
## Estimate Std. Error t value Pr(>|t|)
## (Intercept) 4.791e+01 2.764e+03 0.017 0.989
## open.lag 1.034e+00 8.975e+00 0.115 0.927
## high.lag 4.839e+00 1.657e+01 0.292 0.819
## low.lag 1.460e+00 1.662e+01 0.088 0.944
## close.lag -2.908e+00 8.573e+01 -0.034 0.978
## next_weeks_open.lag 2.627e+00 2.236e+01 0.117 0.926
## next_weeks_close.lag -8.131e+00 8.970e+00 -0.907 0.531
## percent_return_next_dividend.lag -2.725e+01 1.314e+03 -0.021 0.987
## volume 1.557e-08 1.889e-08 0.824 0.561
##
## (Dispersion parameter for gaussian family taken to be 13.69351)
##
## Null deviance: 63.495 on 9 degrees of freedom
## Residual deviance: 13.694 on 1 degrees of freedom
## (1 observation deleted due to missingness)
## AIC: 51.522
##
## Number of Fisher Scoring iterations: 2
##
## Accuracy Kappa AccuracyLower AccuracyUpper AccuracyNull
## 0.84615385 0.67500000 0.54552894 0.98079333 0.61538462
## AccuracyPValue McnemarPValue
## 0.07186756 1.00000000
##
## Call:
## glm(formula = formula, data = trainstock)
##
## Deviance Residuals:
## 2 3 4 5 6 7 8
## 0.00029 -0.10692 0.13324 -0.07502 0.12966 -0.04195 0.30333
## 9 10 11
## -0.62884 0.17079 0.11543
##
## Coefficients:
## Estimate Std. Error t value Pr(>|t|)
## (Intercept) -4.661e+01 5.686e+01 -0.820 0.563
## open.lag -7.787e-01 1.456e+00 -0.535 0.687
## high.lag 1.059e+00 1.107e+00 0.956 0.514
## low.lag -3.917e+00 1.980e+00 -1.979 0.298
## close.lag 3.493e+00 3.627e+00 0.963 0.512
## next_weeks_open.lag 1.169e+00 1.758e+00 0.665 0.626
## next_weeks_close.lag -1.166e+00 7.068e-01 -1.650 0.347
## percent_return_next_dividend.lag 5.878e+01 3.820e+01 1.539 0.367
## volume 2.958e-08 4.256e-08 0.695 0.613
##
## (Dispersion parameter for gaussian family taken to be 0.5833192)
##
## Null deviance: 13.87737 on 9 degrees of freedom
## Residual deviance: 0.58332 on 1 degrees of freedom
## (1 observation deleted due to missingness)
## AIC: 19.963
##
## Number of Fisher Scoring iterations: 2
##
## Accuracy Kappa AccuracyLower AccuracyUpper AccuracyNull
## 0.53846154 0.07142857 0.25134548 0.80776756 0.53846154
## AccuracyPValue McnemarPValue
## 0.61146992 1.00000000
##
## Call:
## glm(formula = formula, data = trainstock)
##
## Deviance Residuals:
## 2 3 4 5 6 7 8 9
## -0.7966 -0.6692 -0.1787 -0.2110 -0.8526 1.0149 0.5474 0.1429
## 10 11
## 0.5550 0.4480
##
## Coefficients:
## Estimate Std. Error t value Pr(>|t|)
## (Intercept) -1.733e+04 1.961e+04 -0.884 0.539
## open.lag -9.973e+00 5.434e+00 -1.835 0.318
## high.lag 2.319e+00 5.259e+00 0.441 0.736
## low.lag 7.766e+00 6.935e+00 1.120 0.464
## close.lag 2.898e+02 3.482e+02 0.832 0.558
## next_weeks_open.lag 1.976e+01 7.957e+00 2.483 0.244
## next_weeks_close.lag -5.632e+00 2.913e+00 -1.933 0.304
## percent_return_next_dividend.lag 5.748e+03 6.428e+03 0.894 0.536
## volume -6.476e-08 5.418e-08 -1.195 0.444
##
## (Dispersion parameter for gaussian family taken to be 3.744787)
##
## Null deviance: 53.6770 on 9 degrees of freedom
## Residual deviance: 3.7448 on 1 degrees of freedom
## (1 observation deleted due to missingness)
## AIC: 38.557
##
## Number of Fisher Scoring iterations: 2
##
## Accuracy Kappa AccuracyLower AccuracyUpper AccuracyNull
## 0.46153846 0.04210526 0.19223244 0.74865452 0.61538462
## AccuracyPValue McnemarPValue
## 0.92110952 0.13057002
##
## Call:
## glm(formula = formula, data = trainstock)
##
## Deviance Residuals:
## 2 3 4 5 6 7 8
## 0.14190 -0.24672 -0.37580 1.10050 -0.59954 -0.04866 0.88200
## 9 10 11
## -0.85660 -0.12269 0.12560
##
## Coefficients:
## Estimate Std. Error t value Pr(>|t|)
## (Intercept) -1.057e+01 3.870e+01 -0.273 0.830
## open.lag -3.935e+00 3.543e+00 -1.111 0.467
## high.lag -1.030e+00 1.568e+00 -0.657 0.630
## low.lag 7.335e+00 7.030e+00 1.043 0.486
## close.lag -4.446e-01 3.790e+00 -0.117 0.926
## next_weeks_open.lag -1.683e+00 5.775e+00 -0.291 0.819
## next_weeks_close.lag -1.365e-01 8.329e-01 -0.164 0.897
## percent_return_next_dividend.lag 1.655e+01 2.002e+01 0.826 0.560
## volume 1.108e-07 1.750e-07 0.633 0.641
##
## (Dispersion parameter for gaussian family taken to be 3.337651)
##
## Null deviance: 29.0020 on 9 degrees of freedom
## Residual deviance: 3.3377 on 1 degrees of freedom
## (1 observation deleted due to missingness)
## AIC: 37.406
##
## Number of Fisher Scoring iterations: 2
##
## Accuracy Kappa AccuracyLower AccuracyUpper AccuracyNull
## 0.6153846 0.1558442 0.3157776 0.8614207 0.6153846
## AccuracyPValue McnemarPValue
## 0.6194222 1.0000000
##
## Call:
## glm(formula = formula, data = trainstock)
##
## Deviance Residuals:
## 2 3 4 5 6 7 8
## 0.07515 -0.30274 -0.05030 0.35520 -0.07556 0.03188 -0.55782
## 9 10 11
## 0.46519 0.07114 -0.01213
##
## Coefficients:
## Estimate Std. Error t value Pr(>|t|)
## (Intercept) -5.448e+01 4.540e+01 -1.200 0.442
## open.lag -2.306e+00 1.001e+00 -2.305 0.261
## high.lag 2.072e+00 5.905e-01 3.508 0.177
## low.lag 1.372e+00 8.465e-01 1.621 0.352
## close.lag -5.719e-01 1.766e+00 -0.324 0.801
## next_weeks_open.lag -1.750e+00 1.386e+00 -1.262 0.427
## next_weeks_close.lag 1.336e+00 4.988e-01 2.678 0.228
## percent_return_next_dividend.lag 7.281e+01 3.532e+01 2.062 0.288
## volume 7.697e-08 1.377e-07 0.559 0.675
##
## (Dispersion parameter for gaussian family taken to be 0.7654922)
##
## Null deviance: 27.38573 on 9 degrees of freedom
## Residual deviance: 0.76549 on 1 degrees of freedom
## (1 observation deleted due to missingness)
## AIC: 22.681
##
## Number of Fisher Scoring iterations: 2
##
## Accuracy Kappa AccuracyLower AccuracyUpper AccuracyNull
## 0.6153846 0.2168675 0.3157776 0.8614207 0.6153846
## AccuracyPValue McnemarPValue
## 0.6194222 1.0000000
##
## Call:
## glm(formula = formula, data = trainstock)
##
## Deviance Residuals:
## 2 3 4 5 6 7
## -0.069802 0.006460 0.011370 -0.123693 -0.065919 0.079207
## 8 9 10 11
## 0.000299 0.055894 0.076936 0.029249
##
## Coefficients:
## Estimate Std. Error t value Pr(>|t|)
## (Intercept) -8.557e+03 1.645e+03 -5.202 0.1209
## open.lag -1.329e+00 2.713e-01 -4.899 0.1282
## high.lag 2.935e+00 4.999e-01 5.872 0.1074
## low.lag -1.820e+00 2.730e-01 -6.667 0.0948 .
## close.lag 1.171e+02 2.216e+01 5.283 0.1191
## next_weeks_open.lag 5.075e+00 9.378e-01 5.411 0.1163
## next_weeks_close.lag -2.091e+00 3.515e-01 -5.949 0.1060
## percent_return_next_dividend.lag 3.112e+03 5.972e+02 5.210 0.1207
## volume 6.452e-08 6.019e-09 10.720 0.0592 .
## ---
## Signif. codes: 0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
##
## (Dispersion parameter for gaussian family taken to be 0.0408612)
##
## Null deviance: 13.622563 on 9 degrees of freedom
## Residual deviance: 0.040861 on 1 degrees of freedom
## (1 observation deleted due to missingness)
## AIC: -6.6228
##
## Number of Fisher Scoring iterations: 2
##
## Accuracy Kappa AccuracyLower AccuracyUpper AccuracyNull
## 0.46153846 -0.09638554 0.19223244 0.74865452 0.61538462
## AccuracyPValue McnemarPValue
## 0.92110952 1.00000000
##
## Call:
## glm(formula = formula, data = trainstock)
##
## Deviance Residuals:
## 2 3 4 5 6 7 8
## -0.28095 0.29300 -0.08076 0.23686 -0.20242 -0.10799 -0.07632
## 9 10 11
## -0.06589 0.33236 -0.04789
##
## Coefficients:
## Estimate Std. Error t value Pr(>|t|)
## (Intercept) 2.340e+03 7.087e+02 3.301 0.187
## open.lag 2.132e+00 4.892e-01 4.358 0.144
## high.lag -5.095e-01 4.975e-01 -1.024 0.492
## low.lag -6.745e+00 1.141e+00 -5.909 0.107
## close.lag -2.046e+01 6.971e+00 -2.935 0.209
## next_weeks_open.lag 4.719e+00 1.316e+00 3.586 0.173
## next_weeks_close.lag -9.404e-01 3.493e-01 -2.692 0.226
## percent_return_next_dividend.lag -1.706e+03 5.180e+02 -3.293 0.188
## volume 5.862e-08 3.050e-08 1.922 0.305
##
## (Dispersion parameter for gaussian family taken to be 0.4029669)
##
## Null deviance: 25.69711 on 9 degrees of freedom
## Residual deviance: 0.40297 on 1 degrees of freedom
## (1 observation deleted due to missingness)
## AIC: 16.264
##
## Number of Fisher Scoring iterations: 2
##
## Accuracy Kappa AccuracyLower AccuracyUpper AccuracyNull
## 0.6153846 0.2168675 0.3157776 0.8614207 0.6153846
## AccuracyPValue McnemarPValue
## 0.6194222 1.0000000
##
## Call:
## glm(formula = formula, data = trainstock)
##
## Deviance Residuals:
## 2 3 4 5 6 7 8 9
## -1.3106 1.9004 -0.8624 -0.0243 -0.3120 0.3152 0.3052 -0.8658
## 10 11
## -0.1419 0.9961
##
## Coefficients:
## Estimate Std. Error t value Pr(>|t|)
## (Intercept) 2.589e+02 3.308e+02 0.783 0.577
## open.lag 3.469e+00 3.377e+00 1.027 0.491
## high.lag -1.875e+00 1.898e+00 -0.988 0.504
## low.lag -3.731e+00 2.874e+00 -1.298 0.418
## close.lag -1.622e+00 5.862e+00 -0.277 0.828
## next_weeks_open.lag 2.609e+00 4.703e+00 0.555 0.678
## next_weeks_close.lag -4.883e-01 8.753e-01 -0.558 0.676
## percent_return_next_dividend.lag -2.262e+02 2.977e+02 -0.760 0.586
## volume -4.127e-08 8.741e-08 -0.472 0.719
##
## (Dispersion parameter for gaussian family taken to be 8.125535)
##
## Null deviance: 52.3465 on 9 degrees of freedom
## Residual deviance: 8.1255 on 1 degrees of freedom
## (1 observation deleted due to missingness)
## AIC: 46.303
##
## Number of Fisher Scoring iterations: 2
library(tree)
library(caret)
## Warning: package 'caret' was built under R version 3.5.2
## Loading required package: lattice
## Loading required package: ggplot2
treefxn <- function(trainstock, teststock, formula){
set.seed(123)
treefit <- tree(formula, data = trainstock)
tree.predict <- predict(treefit, newdata = teststock)
tree.predict <- ifelse(tree.predict >= mean(tree.predict), 1,0)
tree.obs <- ifelse(teststock$percent_change_next_weeks_price >= mean(teststock$percent_change_next_weeks_price ),1,0)
table <- table(tree.obs, tree.predict); print(summary(treefit))
accuracy <- sum(diag(table))/12
print(accuracy)
}
formula = percent_change_next_weeks_price ~ open.lag + high.lag + low.lag + close.lag + next_weeks_open.lag + next_weeks_close.lag + percent_return_next_dividend.lag + volume
for (i in names(train1)){
x= train1[[i]]
y= test1[[i]]
treefxn(x,y)
}
##
## Regression tree:
## tree(formula = formula, data = trainstock)
## Variables actually used in tree construction:
## character(0)
## Number of terminal nodes: 1
## Residual mean deviance: 0 = 0 / 9
## Distribution of residuals:
## Min. 1st Qu. Median Mean 3rd Qu. Max.
## 0 0 0 0 0 0
## [1] 0.5
##
## Regression tree:
## tree(formula = formula, data = trainstock)
## Variables actually used in tree construction:
## character(0)
## Number of terminal nodes: 1
## Residual mean deviance: 0 = 0 / 9
## Distribution of residuals:
## Min. 1st Qu. Median Mean 3rd Qu. Max.
## 0 0 0 0 0 0
## [1] 0.5833333
##
## Regression tree:
## tree(formula = formula, data = trainstock)
## Variables actually used in tree construction:
## character(0)
## Number of terminal nodes: 1
## Residual mean deviance: 0 = 0 / 9
## Distribution of residuals:
## Min. 1st Qu. Median Mean 3rd Qu. Max.
## 0 0 0 0 0 0
## [1] 0.5833333
##
## Regression tree:
## tree(formula = formula, data = trainstock)
## Variables actually used in tree construction:
## character(0)
## Number of terminal nodes: 1
## Residual mean deviance: 0 = 0 / 9
## Distribution of residuals:
## Min. 1st Qu. Median Mean 3rd Qu. Max.
## 0 0 0 0 0 0
## [1] 0.5833333
##
## Regression tree:
## tree(formula = formula, data = trainstock)
## Variables actually used in tree construction:
## character(0)
## Number of terminal nodes: 1
## Residual mean deviance: 0 = 0 / 9
## Distribution of residuals:
## Min. 1st Qu. Median Mean 3rd Qu. Max.
## 0 0 0 0 0 0
## [1] 0.6666667
##
## Regression tree:
## tree(formula = formula, data = trainstock)
## Variables actually used in tree construction:
## character(0)
## Number of terminal nodes: 1
## Residual mean deviance: 0 = 0 / 9
## Distribution of residuals:
## Min. 1st Qu. Median Mean 3rd Qu. Max.
## 0 0 0 0 0 0
## [1] 0.5
##
## Regression tree:
## tree(formula = formula, data = trainstock)
## Variables actually used in tree construction:
## character(0)
## Number of terminal nodes: 1
## Residual mean deviance: 0 = 0 / 9
## Distribution of residuals:
## Min. 1st Qu. Median Mean 3rd Qu. Max.
## 0 0 0 0 0 0
## [1] 0.5833333
##
## Regression tree:
## tree(formula = formula, data = trainstock)
## Variables actually used in tree construction:
## character(0)
## Number of terminal nodes: 1
## Residual mean deviance: 0 = 0 / 9
## Distribution of residuals:
## Min. 1st Qu. Median Mean 3rd Qu. Max.
## 0 0 0 0 0 0
## [1] 0.6666667
##
## Regression tree:
## tree(formula = formula, data = trainstock)
## Variables actually used in tree construction:
## character(0)
## Number of terminal nodes: 1
## Residual mean deviance: 0 = 0 / 9
## Distribution of residuals:
## Min. 1st Qu. Median Mean 3rd Qu. Max.
## 0 0 0 0 0 0
## [1] 0.6666667
##
## Regression tree:
## tree(formula = formula, data = trainstock)
## Variables actually used in tree construction:
## character(0)
## Number of terminal nodes: 1
## Residual mean deviance: 0 = 0 / 9
## Distribution of residuals:
## Min. 1st Qu. Median Mean 3rd Qu. Max.
## 0 0 0 0 0 0
## [1] 0.5833333
##
## Regression tree:
## tree(formula = formula, data = trainstock)
## Variables actually used in tree construction:
## character(0)
## Number of terminal nodes: 1
## Residual mean deviance: 0 = 0 / 9
## Distribution of residuals:
## Min. 1st Qu. Median Mean 3rd Qu. Max.
## 0 0 0 0 0 0
## [1] 0.6666667
##
## Regression tree:
## tree(formula = formula, data = trainstock)
## Variables actually used in tree construction:
## character(0)
## Number of terminal nodes: 1
## Residual mean deviance: 0 = 0 / 9
## Distribution of residuals:
## Min. 1st Qu. Median Mean 3rd Qu. Max.
## 0 0 0 0 0 0
## [1] 0.5
##
## Regression tree:
## tree(formula = formula, data = trainstock)
## Variables actually used in tree construction:
## character(0)
## Number of terminal nodes: 1
## Residual mean deviance: 0 = 0 / 9
## Distribution of residuals:
## Min. 1st Qu. Median Mean 3rd Qu. Max.
## 0 0 0 0 0 0
## [1] 0.5833333
##
## Regression tree:
## tree(formula = formula, data = trainstock)
## Variables actually used in tree construction:
## character(0)
## Number of terminal nodes: 1
## Residual mean deviance: 0 = 0 / 9
## Distribution of residuals:
## Min. 1st Qu. Median Mean 3rd Qu. Max.
## 0 0 0 0 0 0
## [1] 0.8333333
##
## Regression tree:
## tree(formula = formula, data = trainstock)
## Variables actually used in tree construction:
## character(0)
## Number of terminal nodes: 1
## Residual mean deviance: 0 = 0 / 9
## Distribution of residuals:
## Min. 1st Qu. Median Mean 3rd Qu. Max.
## 0 0 0 0 0 0
## [1] 0.5833333
##
## Regression tree:
## tree(formula = formula, data = trainstock)
## Variables actually used in tree construction:
## character(0)
## Number of terminal nodes: 1
## Residual mean deviance: 0 = 0 / 9
## Distribution of residuals:
## Min. 1st Qu. Median Mean 3rd Qu. Max.
## 0 0 0 0 0 0
## [1] 0.5
##
## Regression tree:
## tree(formula = formula, data = trainstock)
## Variables actually used in tree construction:
## character(0)
## Number of terminal nodes: 1
## Residual mean deviance: 0 = 0 / 9
## Distribution of residuals:
## Min. 1st Qu. Median Mean 3rd Qu. Max.
## 0 0 0 0 0 0
## [1] 0.5833333
##
## Regression tree:
## tree(formula = formula, data = trainstock)
## Variables actually used in tree construction:
## character(0)
## Number of terminal nodes: 1
## Residual mean deviance: 0 = 0 / 9
## Distribution of residuals:
## Min. 1st Qu. Median Mean 3rd Qu. Max.
## 0 0 0 0 0 0
## [1] 0.5
##
## Regression tree:
## tree(formula = formula, data = trainstock)
## Variables actually used in tree construction:
## character(0)
## Number of terminal nodes: 1
## Residual mean deviance: 0 = 0 / 9
## Distribution of residuals:
## Min. 1st Qu. Median Mean 3rd Qu. Max.
## 0 0 0 0 0 0
## [1] 0.5833333
##
## Regression tree:
## tree(formula = formula, data = trainstock)
## Variables actually used in tree construction:
## character(0)
## Number of terminal nodes: 1
## Residual mean deviance: 0 = 0 / 9
## Distribution of residuals:
## Min. 1st Qu. Median Mean 3rd Qu. Max.
## 0 0 0 0 0 0
## [1] 0.5
##
## Regression tree:
## tree(formula = formula, data = trainstock)
## Variables actually used in tree construction:
## character(0)
## Number of terminal nodes: 1
## Residual mean deviance: 0 = 0 / 9
## Distribution of residuals:
## Min. 1st Qu. Median Mean 3rd Qu. Max.
## 0 0 0 0 0 0
## [1] 0.5833333
##
## Regression tree:
## tree(formula = formula, data = trainstock)
## Variables actually used in tree construction:
## character(0)
## Number of terminal nodes: 1
## Residual mean deviance: 0 = 0 / 9
## Distribution of residuals:
## Min. 1st Qu. Median Mean 3rd Qu. Max.
## 0 0 0 0 0 0
## [1] 0.5
##
## Regression tree:
## tree(formula = formula, data = trainstock)
## Variables actually used in tree construction:
## character(0)
## Number of terminal nodes: 1
## Residual mean deviance: 0 = 0 / 9
## Distribution of residuals:
## Min. 1st Qu. Median Mean 3rd Qu. Max.
## 0 0 0 0 0 0
## [1] 0.6666667
##
## Regression tree:
## tree(formula = formula, data = trainstock)
## Variables actually used in tree construction:
## character(0)
## Number of terminal nodes: 1
## Residual mean deviance: 0 = 0 / 9
## Distribution of residuals:
## Min. 1st Qu. Median Mean 3rd Qu. Max.
## 0 0 0 0 0 0
## [1] 0.6666667
##
## Regression tree:
## tree(formula = formula, data = trainstock)
## Variables actually used in tree construction:
## character(0)
## Number of terminal nodes: 1
## Residual mean deviance: 0 = 0 / 9
## Distribution of residuals:
## Min. 1st Qu. Median Mean 3rd Qu. Max.
## 0 0 0 0 0 0
## [1] 0.5833333
##
## Regression tree:
## tree(formula = formula, data = trainstock)
## Variables actually used in tree construction:
## character(0)
## Number of terminal nodes: 1
## Residual mean deviance: 0 = 0 / 9
## Distribution of residuals:
## Min. 1st Qu. Median Mean 3rd Qu. Max.
## 0 0 0 0 0 0
## [1] 0.8333333
##
## Regression tree:
## tree(formula = formula, data = trainstock)
## Variables actually used in tree construction:
## character(0)
## Number of terminal nodes: 1
## Residual mean deviance: 0 = 0 / 9
## Distribution of residuals:
## Min. 1st Qu. Median Mean 3rd Qu. Max.
## 0 0 0 0 0 0
## [1] 0.75
##
## Regression tree:
## tree(formula = formula, data = trainstock)
## Variables actually used in tree construction:
## character(0)
## Number of terminal nodes: 1
## Residual mean deviance: 0 = 0 / 9
## Distribution of residuals:
## Min. 1st Qu. Median Mean 3rd Qu. Max.
## 0 0 0 0 0 0
## [1] 0.5833333
##
## Regression tree:
## tree(formula = formula, data = trainstock)
## Variables actually used in tree construction:
## character(0)
## Number of terminal nodes: 1
## Residual mean deviance: 0 = 0 / 9
## Distribution of residuals:
## Min. 1st Qu. Median Mean 3rd Qu. Max.
## 0 0 0 0 0 0
## [1] 0.5833333
##
## Regression tree:
## tree(formula = formula, data = trainstock)
## Variables actually used in tree construction:
## character(0)
## Number of terminal nodes: 1
## Residual mean deviance: 0 = 0 / 9
## Distribution of residuals:
## Min. 1st Qu. Median Mean 3rd Qu. Max.
## 0 0 0 0 0 0
## [1] 0.5833333
library(e1071)
svmfxn <- function(trainstock, teststock, formula){
set.seed(123)
svmfit <- svm(formula, data = trainstock, kernerl ="radial", cost= 0.1, gamma=0.01)
svm.predict <- predict(svmfit, newdata = teststock, type="class")
svm.predict <- ifelse(svm.predict >= mean(svm.predict), 1,0)
svm.obs <- ifelse(teststock$percent_change_next_weeks_price >= mean(teststock$percent_change_next_weeks_price ),1,0)
table <- table(svm.obs, svm.predict)
accuracy <- sum(diag(table))/12
print(summary(svmfit))
print(accuracy)
}
formula = percent_change_next_weeks_price ~ open.lag + high.lag + low.lag + close.lag + next_weeks_open.lag + next_weeks_close.lag + percent_return_next_dividend.lag + volume
for (i in names(train1)){
x= train1[[i]]
y= test1[[i]]
svmfxn(x,y, formula)
}
##
## Call:
## svm(formula = formula, data = trainstock, kernerl = "radial",
## cost = 0.1, gamma = 0.01)
##
##
## Parameters:
## SVM-Type: eps-regression
## SVM-Kernel: radial
## cost: 0.1
## gamma: 0.01
## epsilon: 0.1
##
##
## Number of Support Vectors: 10
##
##
##
##
##
## [1] 0.4166667
##
## Call:
## svm(formula = formula, data = trainstock, kernerl = "radial",
## cost = 0.1, gamma = 0.01)
##
##
## Parameters:
## SVM-Type: eps-regression
## SVM-Kernel: radial
## cost: 0.1
## gamma: 0.01
## epsilon: 0.1
##
##
## Number of Support Vectors: 8
##
##
##
##
##
## [1] 0.4166667
##
## Call:
## svm(formula = formula, data = trainstock, kernerl = "radial",
## cost = 0.1, gamma = 0.01)
##
##
## Parameters:
## SVM-Type: eps-regression
## SVM-Kernel: radial
## cost: 0.1
## gamma: 0.01
## epsilon: 0.1
##
##
## Number of Support Vectors: 8
##
##
##
##
##
## [1] 0.4166667
##
## Call:
## svm(formula = formula, data = trainstock, kernerl = "radial",
## cost = 0.1, gamma = 0.01)
##
##
## Parameters:
## SVM-Type: eps-regression
## SVM-Kernel: radial
## cost: 0.1
## gamma: 0.01
## epsilon: 0.1
##
##
## Number of Support Vectors: 8
##
##
##
##
##
## [1] 0.5
##
## Call:
## svm(formula = formula, data = trainstock, kernerl = "radial",
## cost = 0.1, gamma = 0.01)
##
##
## Parameters:
## SVM-Type: eps-regression
## SVM-Kernel: radial
## cost: 0.1
## gamma: 0.01
## epsilon: 0.1
##
##
## Number of Support Vectors: 8
##
##
##
##
##
## [1] 0.5833333
##
## Call:
## svm(formula = formula, data = trainstock, kernerl = "radial",
## cost = 0.1, gamma = 0.01)
##
##
## Parameters:
## SVM-Type: eps-regression
## SVM-Kernel: radial
## cost: 0.1
## gamma: 0.01
## epsilon: 0.1
##
##
## Number of Support Vectors: 8
##
##
##
##
##
## [1] 0.5833333
##
## Call:
## svm(formula = formula, data = trainstock, kernerl = "radial",
## cost = 0.1, gamma = 0.01)
##
##
## Parameters:
## SVM-Type: eps-regression
## SVM-Kernel: radial
## cost: 0.1
## gamma: 0.01
## epsilon: 0.1
##
##
## Number of Support Vectors: 10
##
##
##
##
##
## [1] 0.5833333
##
## Call:
## svm(formula = formula, data = trainstock, kernerl = "radial",
## cost = 0.1, gamma = 0.01)
##
##
## Parameters:
## SVM-Type: eps-regression
## SVM-Kernel: radial
## cost: 0.1
## gamma: 0.01
## epsilon: 0.1
##
##
## Number of Support Vectors: 10
##
##
##
##
##
## [1] 0.5833333
##
## Call:
## svm(formula = formula, data = trainstock, kernerl = "radial",
## cost = 0.1, gamma = 0.01)
##
##
## Parameters:
## SVM-Type: eps-regression
## SVM-Kernel: radial
## cost: 0.1
## gamma: 0.01
## epsilon: 0.1
##
##
## Number of Support Vectors: 8
##
##
##
##
##
## [1] 0.5833333
##
## Call:
## svm(formula = formula, data = trainstock, kernerl = "radial",
## cost = 0.1, gamma = 0.01)
##
##
## Parameters:
## SVM-Type: eps-regression
## SVM-Kernel: radial
## cost: 0.1
## gamma: 0.01
## epsilon: 0.1
##
##
## Number of Support Vectors: 8
##
##
##
##
##
## [1] 0.5
##
## Call:
## svm(formula = formula, data = trainstock, kernerl = "radial",
## cost = 0.1, gamma = 0.01)
##
##
## Parameters:
## SVM-Type: eps-regression
## SVM-Kernel: radial
## cost: 0.1
## gamma: 0.01
## epsilon: 0.1
##
##
## Number of Support Vectors: 8
##
##
##
##
##
## [1] 0.75
##
## Call:
## svm(formula = formula, data = trainstock, kernerl = "radial",
## cost = 0.1, gamma = 0.01)
##
##
## Parameters:
## SVM-Type: eps-regression
## SVM-Kernel: radial
## cost: 0.1
## gamma: 0.01
## epsilon: 0.1
##
##
## Number of Support Vectors: 10
##
##
##
##
##
## [1] 0.4166667
##
## Call:
## svm(formula = formula, data = trainstock, kernerl = "radial",
## cost = 0.1, gamma = 0.01)
##
##
## Parameters:
## SVM-Type: eps-regression
## SVM-Kernel: radial
## cost: 0.1
## gamma: 0.01
## epsilon: 0.1
##
##
## Number of Support Vectors: 8
##
##
##
##
##
## [1] 0.8333333
##
## Call:
## svm(formula = formula, data = trainstock, kernerl = "radial",
## cost = 0.1, gamma = 0.01)
##
##
## Parameters:
## SVM-Type: eps-regression
## SVM-Kernel: radial
## cost: 0.1
## gamma: 0.01
## epsilon: 0.1
##
##
## Number of Support Vectors: 8
##
##
##
##
##
## [1] 0.6666667
##
## Call:
## svm(formula = formula, data = trainstock, kernerl = "radial",
## cost = 0.1, gamma = 0.01)
##
##
## Parameters:
## SVM-Type: eps-regression
## SVM-Kernel: radial
## cost: 0.1
## gamma: 0.01
## epsilon: 0.1
##
##
## Number of Support Vectors: 10
##
##
##
##
##
## [1] 0.6666667
##
## Call:
## svm(formula = formula, data = trainstock, kernerl = "radial",
## cost = 0.1, gamma = 0.01)
##
##
## Parameters:
## SVM-Type: eps-regression
## SVM-Kernel: radial
## cost: 0.1
## gamma: 0.01
## epsilon: 0.1
##
##
## Number of Support Vectors: 8
##
##
##
##
##
## [1] 0.5833333
##
## Call:
## svm(formula = formula, data = trainstock, kernerl = "radial",
## cost = 0.1, gamma = 0.01)
##
##
## Parameters:
## SVM-Type: eps-regression
## SVM-Kernel: radial
## cost: 0.1
## gamma: 0.01
## epsilon: 0.1
##
##
## Number of Support Vectors: 8
##
##
##
##
##
## [1] 0.6666667
##
## Call:
## svm(formula = formula, data = trainstock, kernerl = "radial",
## cost = 0.1, gamma = 0.01)
##
##
## Parameters:
## SVM-Type: eps-regression
## SVM-Kernel: radial
## cost: 0.1
## gamma: 0.01
## epsilon: 0.1
##
##
## Number of Support Vectors: 8
##
##
##
##
##
## [1] 0.5
##
## Call:
## svm(formula = formula, data = trainstock, kernerl = "radial",
## cost = 0.1, gamma = 0.01)
##
##
## Parameters:
## SVM-Type: eps-regression
## SVM-Kernel: radial
## cost: 0.1
## gamma: 0.01
## epsilon: 0.1
##
##
## Number of Support Vectors: 8
##
##
##
##
##
## [1] 0.5
##
## Call:
## svm(formula = formula, data = trainstock, kernerl = "radial",
## cost = 0.1, gamma = 0.01)
##
##
## Parameters:
## SVM-Type: eps-regression
## SVM-Kernel: radial
## cost: 0.1
## gamma: 0.01
## epsilon: 0.1
##
##
## Number of Support Vectors: 8
##
##
##
##
##
## [1] 0.6666667
##
## Call:
## svm(formula = formula, data = trainstock, kernerl = "radial",
## cost = 0.1, gamma = 0.01)
##
##
## Parameters:
## SVM-Type: eps-regression
## SVM-Kernel: radial
## cost: 0.1
## gamma: 0.01
## epsilon: 0.1
##
##
## Number of Support Vectors: 10
##
##
##
##
##
## [1] 0.75
##
## Call:
## svm(formula = formula, data = trainstock, kernerl = "radial",
## cost = 0.1, gamma = 0.01)
##
##
## Parameters:
## SVM-Type: eps-regression
## SVM-Kernel: radial
## cost: 0.1
## gamma: 0.01
## epsilon: 0.1
##
##
## Number of Support Vectors: 8
##
##
##
##
##
## [1] 0.5
##
## Call:
## svm(formula = formula, data = trainstock, kernerl = "radial",
## cost = 0.1, gamma = 0.01)
##
##
## Parameters:
## SVM-Type: eps-regression
## SVM-Kernel: radial
## cost: 0.1
## gamma: 0.01
## epsilon: 0.1
##
##
## Number of Support Vectors: 8
##
##
##
##
##
## [1] 0.6666667
##
## Call:
## svm(formula = formula, data = trainstock, kernerl = "radial",
## cost = 0.1, gamma = 0.01)
##
##
## Parameters:
## SVM-Type: eps-regression
## SVM-Kernel: radial
## cost: 0.1
## gamma: 0.01
## epsilon: 0.1
##
##
## Number of Support Vectors: 8
##
##
##
##
##
## [1] 0.5
##
## Call:
## svm(formula = formula, data = trainstock, kernerl = "radial",
## cost = 0.1, gamma = 0.01)
##
##
## Parameters:
## SVM-Type: eps-regression
## SVM-Kernel: radial
## cost: 0.1
## gamma: 0.01
## epsilon: 0.1
##
##
## Number of Support Vectors: 8
##
##
##
##
##
## [1] 0.5
##
## Call:
## svm(formula = formula, data = trainstock, kernerl = "radial",
## cost = 0.1, gamma = 0.01)
##
##
## Parameters:
## SVM-Type: eps-regression
## SVM-Kernel: radial
## cost: 0.1
## gamma: 0.01
## epsilon: 0.1
##
##
## Number of Support Vectors: 9
##
##
##
##
##
## [1] 0.8333333
##
## Call:
## svm(formula = formula, data = trainstock, kernerl = "radial",
## cost = 0.1, gamma = 0.01)
##
##
## Parameters:
## SVM-Type: eps-regression
## SVM-Kernel: radial
## cost: 0.1
## gamma: 0.01
## epsilon: 0.1
##
##
## Number of Support Vectors: 10
##
##
##
##
##
## [1] 0.8333333
##
## Call:
## svm(formula = formula, data = trainstock, kernerl = "radial",
## cost = 0.1, gamma = 0.01)
##
##
## Parameters:
## SVM-Type: eps-regression
## SVM-Kernel: radial
## cost: 0.1
## gamma: 0.01
## epsilon: 0.1
##
##
## Number of Support Vectors: 10
##
##
##
##
##
## [1] 0.9166667
##
## Call:
## svm(formula = formula, data = trainstock, kernerl = "radial",
## cost = 0.1, gamma = 0.01)
##
##
## Parameters:
## SVM-Type: eps-regression
## SVM-Kernel: radial
## cost: 0.1
## gamma: 0.01
## epsilon: 0.1
##
##
## Number of Support Vectors: 10
##
##
##
##
##
## [1] 0.75
##
## Call:
## svm(formula = formula, data = trainstock, kernerl = "radial",
## cost = 0.1, gamma = 0.01)
##
##
## Parameters:
## SVM-Type: eps-regression
## SVM-Kernel: radial
## cost: 0.1
## gamma: 0.01
## epsilon: 0.1
##
##
## Number of Support Vectors: 8
##
##
##
##
##
## [1] 0.4166667
# CAPM and Stock Risk and Reward Calculations
dow_jones_2 = read.csv("dow_jones_index.data", header = TRUE)
# convert factors to numeric
dow_jones_2$open=as.numeric(gsub("\\$","",dow_jones_2$open))
dow_jones_2$high=as.numeric(gsub("\\$","",dow_jones_2$high))
dow_jones_2$low=as.numeric(gsub("\\$","",dow_jones_2$low))
dow_jones_2$close=as.numeric(gsub("\\$","",dow_jones_2$close))
dow_jones_2$next_weeks_open=as.numeric(gsub("\\$","",dow_jones_2$next_weeks_open))
dow_jones_2$next_weeks_close=as.numeric(gsub("\\$","",dow_jones_2$next_weeks_close))
DowJData3 <- split(dow_jones_2, dow_jones_2$stock)
SP500Dta <- read.csv("^GSPC.csv")
#compute percent change
ReturnSP500 = na.omit(Delt(SP500Dta[,5]))
ReturnAA = na.omit(Delt(DowJData3$AA[,7])); ReturnAA
## Delt.1.arithmetic
## [1,] -0.027405603
## [2,] -0.011271133
## [3,] 0.021532616
## [4,] 0.062616243
## [5,] 0.013418903
## [6,] -0.005181347
## [7,] -0.034722222
## [8,] -0.005995204
## [9,] -0.033172497
## [10,] 0.004990643
## [11,] 0.060831782
## [12,] 0.022235225
## [13,] 0.025758443
## [14,] -0.078125000
## [15,] 0.027239709
## [16,] 0.001767826
## [17,] 0.008823529
## [18,] -0.002915452
## [19,] -0.049122807
## [20,] 0.013530135
## [21,] -0.033980583
## [22,] -0.040201005
## [23,] -0.036649215
## [24,] 0.034646739
## attr(,"na.action")
## [1] 1
## attr(,"class")
## [1] "omit"
ReturnAXP = na.omit(Delt(DowJData3$AXP[,7]))
ReturnBA = na.omit(Delt(DowJData3$BA[,7]))
ReturnBAC = na.omit(Delt(DowJData3$BAC[,7]))
ReturnCAT = na.omit(Delt(DowJData3$CAT[,7]))
ReturnCSCO = na.omit(Delt(DowJData3$CSCO[,7]))
ReturnCVX = na.omit(Delt(DowJData3$CVX[,7]))
ReturnDD = na.omit(Delt(DowJData3$DD[,7]))
ReturnDIS = na.omit(Delt(DowJData3$DIS[,7]))
ReturnGE = na.omit(Delt(DowJData3$GE[,7]))
ReturnHD = na.omit(Delt(DowJData3$HD[,7]))
ReturnHPQ = na.omit(Delt(DowJData3$HPQ[,7]))
ReturnIBM = na.omit(Delt(DowJData3$IBM[,7]))
ReturnINTC = na.omit(Delt(DowJData3$INTC[,7]))
ReturnJNJ = na.omit(Delt(DowJData3$JNJ[,7]))
ReturnJPM = na.omit(Delt(DowJData3$JPM[,7]))
ReturnKO = na.omit(Delt(DowJData3$KO[,7]))
ReturnKRFT = na.omit(Delt(DowJData3$KRFT[,7]))
ReturnMCD = na.omit(Delt(DowJData3$MCD[,7]))
ReturnMMM = na.omit(Delt(DowJData3$MMM[,7]))
ReturnMRK = na.omit(Delt(DowJData3$MRK[,7]))
ReturnMSFT = na.omit(Delt(DowJData3$MSFT[,7]))
ReturnPFE = na.omit(Delt(DowJData3$PFE[,7]))
ReturnPG = na.omit(Delt(DowJData3$PG[,7]))
ReturnT = na.omit(Delt(DowJData3$T[,7]))
ReturnTRV = na.omit(Delt(DowJData3$TRV[,7]))
ReturnUTX = na.omit(Delt(DowJData3$UTX[,7]))
ReturnWMT= na.omit(Delt(DowJData3$WMT[,7]))
ReturnVZ = na.omit(Delt(DowJData3$VZ[,7]))
ReturnXOM = na.omit(Delt(DowJData3$XOM[,7]))
#combine datas
MyData = cbind(ReturnSP500,ReturnAA,ReturnAXP,
ReturnBA,
ReturnBAC,
ReturnCAT,
ReturnCSCO,
ReturnCVX,
ReturnDD,
ReturnDIS,
ReturnGE,
ReturnHD,
ReturnHPQ,
ReturnIBM,
ReturnINTC,
ReturnJNJ,
ReturnJPM,
ReturnKO,
ReturnKRFT,
ReturnMCD,
ReturnMMM,
ReturnMRK,
ReturnMSFT,
ReturnPFE,
ReturnPG,
ReturnT,
ReturnTRV,
ReturnUTX,
ReturnWMT,
ReturnVZ,
ReturnXOM)
colnames(MyData) = c("SP500", "AA", "AXP", "BA", "BAC", "CAT", "CSCO", "CVX", "DD", "DIS", "GE", "HD", "HPQ", "IBM", "INTC", "JNJ", "JPM", "KO", "KRFT", "MCD", "MMM", "MRK", "MSFT", "PFE", "PG", "T", "TRV", "UTX", "WMT", "VZ", "XOM")
head(MyData)
## SP500 AA AXP BA BAC
## [1,] 0.017097908 -0.027405603 0.0426059513 0.009945229 0.070175439
## [2,] -0.007647470 -0.011271133 -0.0054054054 0.022977023 -0.065573770
## [3,] -0.005462275 0.021532616 -0.0465217391 -0.034179688 -0.045614035
## [4,] 0.027053943 0.062616243 -0.0009119927 0.031055901 0.050735294
## [5,] 0.013944960 0.013418903 0.0668644455 0.010647240 0.033589923
## [6,] 0.010427706 -0.005181347 -0.0260962567 0.012475742 -0.001354096
## CAT CSCO CVX DD DIS
## [1,] 0.002987304 0.01144492 0.017984428 0.0008038585 -0.004055767
## [2,] -0.013402829 -0.02310231 0.010233761 -0.0291164659 0.011453296
## [3,] 0.031590296 0.01013514 -0.004371934 0.0401240951 -0.022395571
## [4,] 0.040865385 0.05351171 0.040055692 0.0445416584 0.047876448
## [5,] 0.039662617 -0.15192744 -0.006796416 0.0390253189 0.066322771
## [6,] 0.022406799 0.00802139 0.023535511 0.0256504214 0.003455425
## GE HD HPQ IBM INTC
## [1,] 0.021161150 0.043920884 0.0257263251 0.0139931048 0.020329138
## [2,] 0.048884166 0.017275007 0.0211891892 0.0366666667 -0.012333966
## [3,] 0.023302938 0.005204054 -0.0364175312 0.0238585209 0.030739673
## [4,] 0.017821782 0.002724796 0.0421885300 0.0300860499 0.010251631
## [5,] 0.037451362 0.018478261 0.0255112798 -0.0009146341 0.003690037
## [6,] 0.005157056 0.026680896 0.0006167763 0.0060421117 0.017463235
## JNJ JPM KO KRFT MCD
## [1,] -0.000798722 0.029101742 0.003337572 0.004809234 -0.004168347
## [2,] 0.001758593 0.008461367 -0.005702519 0.000319081 0.012827437
## [3,] -0.042291733 -0.016559947 -0.008921459 -0.026156300 -0.023063592
## [4,] 0.013831028 0.001122586 0.005626105 0.021618081 0.010507642
## [5,] -0.002301118 0.044404575 0.016144501 -0.016992626 0.028224173
## [6,] 0.006754530 0.030706463 0.015416077 0.008153947 -0.000131337
## MMM MRK MSFT PFE PG
## [1,] 0.021686188 -0.083534137 -0.0104895105 0.000000000 0.015968992
## [2,] 0.013507378 -0.009640666 -0.0098939929 0.001090513 0.005798871
## [3,] -0.020719005 -0.024483776 -0.0096359743 -0.011437908 -0.025944470
## [4,] 0.009720952 -0.005443000 0.0007207207 0.063360882 -0.009190031
## [5,] 0.039755352 0.005472788 -0.0187252431 -0.024352332 0.017607294
## [6,] 0.012636166 -0.006652555 -0.0069724771 0.019118428 -0.006642979
## T TRV UTX WMT VZ
## [1,] -0.014558059 0.024376524 0.000000000 0.013498521 -0.013080991
## [2,] -0.003517411 0.006772835 0.014162873 0.016785258 -0.014382403
## [3,] -0.029650547 0.014727273 0.015336658 0.017405347 0.019456366
## [4,] 0.017460895 0.028668697 0.013385730 -0.011816578 0.019085041
## [5,] 0.017876296 0.027521338 0.032476975 -0.006068178 0.002203250
## [6,] 0.003512469 0.032717410 -0.002230047 -0.005566529 0.006320418
## XOM
## [1,] 0.0297658420
## [2,] 0.0146454265
## [3,] 0.0001266143
## [4,] 0.0543106722
## [5,] -0.0055235351
## [6,] 0.0202849553
#compute mean and standard deviation
DataMean = apply(MyData, 2, mean)
DataSD = apply(MyData, 2, sd)
cbind(DataMean, DataSD)
## DataMean DataSD
## SP500 7.115672e-06 0.01497110
## AA -2.556261e-03 0.03450483
## AXP 3.978729e-03 0.02867944
## BA 1.470660e-03 0.02740461
## BAC -1.203075e-02 0.03342655
## CAT 3.237159e-03 0.03336199
## CSCO -1.327265e-02 0.03920114
## CVX 3.252364e-03 0.02448295
## DD 2.143464e-03 0.02664204
## DIS -1.662886e-03 0.02746833
## GE -7.380844e-04 0.02560781
## HD 1.101705e-03 0.02335630
## HPQ -9.842898e-03 0.03914964
## IBM 4.734383e-03 0.01806670
## INTC 1.558625e-03 0.03211640
## JNJ 1.811071e-03 0.02068670
## JPM -3.912704e-03 0.02246357
## KO 1.437651e-03 0.01628497
## KRFT 4.482347e-03 0.01777007
## MCD 4.173738e-03 0.01919738
## MMM 2.399982e-03 0.02069039
## MRK -2.898597e-03 0.02654569
## MSFT -6.599840e-03 0.01858821
## PFE 4.102367e-03 0.02598915
## PG -1.100626e-03 0.01777888
## T 2.412863e-03 0.01926273
## TRV 2.720362e-03 0.01934159
## UTX 2.888433e-03 0.02084871
## WMT -1.130565e-03 0.01894121
## VZ 2.326181e-04 0.01783121
## XOM 9.597025e-04 0.02533114
lm.AA <- lm(AA~ SP500, data = as.data.frame(MyData))
#2
lm.AXP <- lm(AXP~ SP500, data = as.data.frame(MyData))
#3
lm.BA <- lm(BA~ SP500, data = as.data.frame(MyData))
#4
lm.BAC <- lm(BAC~ SP500, data = as.data.frame(MyData))
#5
lm.CAT <- lm(CAT~ SP500, data = as.data.frame(MyData))
#6
lm.CSCO <- lm(CSCO~ SP500, data = as.data.frame(MyData))
#7
lm.CVX <- lm(CVX~ SP500, data = as.data.frame(MyData))
#8
lm.DD <- lm(DD~ SP500, data = as.data.frame(MyData))
#9
lm.DIS <- lm(DIS~ SP500, data = as.data.frame(MyData))
#10
lm.GE <- lm(GE~ SP500, data = as.data.frame(MyData))
#11
lm.HD <- lm(HD~ SP500, data = as.data.frame(MyData))
#12
lm.HPQ <- lm(HPQ~ SP500, data = as.data.frame(MyData))
#13
lm.IBM <- lm(IBM~ SP500, data = as.data.frame(MyData))
#14
lm.INTC <- lm(INTC~ SP500, data = as.data.frame(MyData))
#15
lm.JNJ <- lm(JNJ~ SP500, data = as.data.frame(MyData))
#16
lm.JPM <- lm(JPM~ SP500, data = as.data.frame(MyData))
#17
lm.KO <- lm(KO~ SP500, data = as.data.frame(MyData))
#18
lm.KRFT <- lm(KRFT~ SP500, data = as.data.frame(MyData))
#19
lm.MCD <- lm(MCD~ SP500, data = as.data.frame(MyData))
#20
lm.MMM <- lm(MMM~ SP500, data = as.data.frame(MyData))
#21
lm.MRK <- lm(MRK~ SP500, data = as.data.frame(MyData))
#22
lm.MSFT <- lm(MSFT~ SP500, data = as.data.frame(MyData))
#23
lm.PFE <- lm(PFE~ SP500, data = as.data.frame(MyData))
#24
lm.PG <- lm(PG~ SP500, data = as.data.frame(MyData))
#25
lm.T <- lm(T~ SP500, data = as.data.frame(MyData))
#26
lm.TRV <- lm(TRV~ SP500, data = as.data.frame(MyData))
#27
lm.UTX <- lm(UTX~ SP500, data = as.data.frame(MyData))
#28
lm.WMT <- lm(WMT~ SP500, data = as.data.frame(MyData))
#29
lm.VZ <- lm(VZ~ SP500, data = as.data.frame(MyData))
#30
lm.XOM <- lm(XOM~ SP500, data = as.data.frame(MyData))
#1
BetaDowJ <- summary(lm.AA)$coefficient[2,1]
paste("Beta of AA:" , BetaDowJ)
## [1] "Beta of AA: 1.2833481132258"
#2
BetaDowJ <- summary(lm.AXP)$coefficient[2,1]
paste("Beta of AXP:" , BetaDowJ)
## [1] "Beta of AXP: 1.00635300424091"
#3
BetaDowJ <- summary(lm.BA)$coefficient[2,1]
paste("Beta of BA:" , BetaDowJ)
## [1] "Beta of BA: 1.41543537150699"
#4
BetaDowJ <- summary(lm.BAC)$coefficient[2,1]
paste("Beta of BAC:" , BetaDowJ)
## [1] "Beta of BAC: 0.955091855655937"
#5
BetaDowJ <- summary(lm.CAT)$coefficient[2,1]
paste("Beta of CAT:" , BetaDowJ)
## [1] "Beta of CAT: 1.47045665275587"
#6
BetaDowJ <- summary(lm.CSCO)$coefficient[2,1]
paste("Beta of CSCO:" , BetaDowJ)
## [1] "Beta of CSCO: 0.740944862500916"
#7
BetaDowJ <- summary(lm.CVX)$coefficient[2,1]
paste("Beta of CVX:" , BetaDowJ)
## [1] "Beta of CVX: 0.884911936383771"
#8
BetaDowJ <- summary(lm.DD)$coefficient[2,1]
paste("Beta of DD:" , BetaDowJ)
## [1] "Beta of DD: 1.1856017925928"
#9
BetaDowJ <- summary(lm.DIS)$coefficient[2,1]
paste("Beta of DIS:" , BetaDowJ)
## [1] "Beta of DIS: 1.37172413225927"
#10
BetaDowJ <- summary(lm.GE)$coefficient[2,1]
paste("Beta of GE:" , BetaDowJ)
## [1] "Beta of GE: 1.14991258972729"
#11
BetaDowJ <- summary(lm.HD)$coefficient[2,1]
paste("Beta of HD:" , BetaDowJ)
## [1] "Beta of HD: 0.914377519178353"
#12
BetaDowJ <- summary(lm.HPQ)$coefficient[2,1]
paste("Beta of HPQ:" , BetaDowJ)
## [1] "Beta of HPQ: 1.18207574932582"
#13
BetaDowJ <- summary(lm.IBM)$coefficient[2,1]
paste("Beta of IBM:" , BetaDowJ)
## [1] "Beta of IBM: 0.791459024197683"
#14
BetaDowJ <- summary(lm.INTC)$coefficient[2,1]
paste("Beta of INTC:" , BetaDowJ)
## [1] "Beta of INTC: 1.16831599899469"
#15
BetaDowJ <- summary(lm.JNJ)$coefficient[2,1]
paste("Beta of JNJ:" , BetaDowJ)
## [1] "Beta of JNJ: 0.690732207496706"
#16
BetaDowJ <- summary(lm.JPM)$coefficient[2,1]
paste("Beta of JPM:" , BetaDowJ)
## [1] "Beta of JPM: 0.78005638075441"
#17
BetaDowJ <- summary(lm.KO)$coefficient[2,1]
paste("Beta of KO:" , BetaDowJ)
## [1] "Beta of KO: 0.646355311225748"
#18
BetaDowJ <- summary(lm.KRFT)$coefficient[2,1]
paste("Beta of KRFT:" , BetaDowJ)
## [1] "Beta of KRFT: 0.213432329668554"
#19
BetaDowJ <- summary(lm.MCD)$coefficient[2,1]
paste("Beta of MCD:" , BetaDowJ)
## [1] "Beta of MCD: 0.619919412874853"
#20
BetaDowJ <- summary(lm.MMM)$coefficient[2,1]
paste("Beta of MMM:" , BetaDowJ)
## [1] "Beta of MMM: 1.07851814959725"
#21
BetaDowJ <- summary(lm.MRK)$coefficient[2,1]
paste("Beta of MRK:" , BetaDowJ)
## [1] "Beta of MRK: 0.277562973613413"
#22
BetaDowJ <- summary(lm.MSFT)$coefficient[2,1]
paste("Beta of MSFT:" , BetaDowJ)
## [1] "Beta of MSFT: 0.616661446843324"
#23
BetaDowJ <- summary(lm.PFE)$coefficient[2,1]
paste("Beta of PFE:" , BetaDowJ)
## [1] "Beta of PFE: 0.684162973742174"
#24
BetaDowJ <- summary(lm.PG)$coefficient[2,1]
paste("Beta of PG:" , BetaDowJ)
## [1] "Beta of PG: 0.391590332490925"
#25
BetaDowJ <- summary(lm.T)$coefficient[2,1]
paste("Beta of T:" , BetaDowJ)
## [1] "Beta of T: 0.730661575201244"
#26
BetaDowJ <- summary(lm.TRV)$coefficient[2,1]
paste("Beta of TRV:" , BetaDowJ)
## [1] "Beta of TRV: 0.959463703842149"
#27
BetaDowJ <- summary(lm.UTX)$coefficient[2,1]
paste("Beta of UTX:" , BetaDowJ)
## [1] "Beta of UTX: 1.02542284538263"
#28
BetaDowJ <- summary(lm.WMT)$coefficient[2,1]
paste("Beta of WMT:" , BetaDowJ)
## [1] "Beta of WMT: 0.495437774372672"
#29
BetaDowJ <- summary(lm.VZ)$coefficient[2,1]
paste("Beta of VZ:" , BetaDowJ)
## [1] "Beta of VZ: 0.711131233223788"
#30
BetaDowJ <- summary(lm.XOM)$coefficient[2,1]
paste("Beta of XOM:" , BetaDowJ)
## [1] "Beta of XOM: 1.22737255293153"