library(ISLR2)
data(Smarket)
names(Smarket)
## [1] "Year"      "Lag1"      "Lag2"      "Lag3"      "Lag4"      "Lag5"     
## [7] "Volume"    "Today"     "Direction"
dim(Smarket)
## [1] 1250    9
library(MASS)
## 
## Attaching package: 'MASS'
## The following object is masked from 'package:ISLR2':
## 
##     Boston
data("Smarket")
train <- subset(Smarket, Year < 2005)
lda.fit <- lda(Direction ~ Lag1 + Lag2, data = train)
summary(lda.fit)
##         Length Class  Mode     
## prior   2      -none- numeric  
## counts  2      -none- numeric  
## means   4      -none- numeric  
## scaling 2      -none- numeric  
## lev     2      -none- character
## svd     1      -none- numeric  
## N       1      -none- numeric  
## call    3      -none- call     
## terms   3      terms  call     
## xlevels 0      -none- list
# Load the MASS library
library(MASS)

# Load the Smarket dataset
data("Smarket")

# Subset the data to include only observations before 2005
train <- subset(Smarket, Year < 2005)

# Fit LDA model using only the observations before 2005
lda.fit <- lda(Direction ~ Lag1 + Lag2, data = train)

# Display prior probabilities
prior_probabilities <- lda.fit$prior
cat("Prior Probabilities:\n", prior_probabilities, "\n\n")
## Prior Probabilities:
##  0.491984 0.508016
group_means <- lda.fit$means
cat("Group Means:\n", group_means, "\n\n")
## Group Means:
##  0.04279022 -0.03954635 0.03389409 -0.03132544
# Display coefficients of linear discriminants
coefficients <- lda.fit$scaling
cat("Coefficients of Linear Discriminants:\n", coefficients, "\n\n")
## Coefficients of Linear Discriminants:
##  -0.642019 -0.5135293
# Plot linear discriminants
# Make predictions
predictions <- predict(lda.fit)
class_predictions <- predictions$class
posterior_probabilities <- predictions$posterior
linear_discriminants <- predictions$x

# Display predictions
cat("Predictions:\n", head(class_predictions), "\n\n")
## Predictions:
##  2 1 1 2 2 1
# Display posterior probabilities
cat("Posterior Probabilities:\n", head(posterior_probabilities), "\n\n")
## Posterior Probabilities:
##  0.4950989 0.5094612 0.5168756 0.4947529 0.4935503 0.5017058 0.5049011 0.4905388 0.4831244 0.5052471 0.5064497 0.4982942
# Display linear discriminants
cat("Linear Discriminants:\n", head(linear_discriminants), "\n\n")
## Linear Discriminants:
##  -0.145003 -0.8103423 -1.15403 -0.1289757 -0.07326231 -0.4510484
# Load the required libraries
library(MASS)
library(ISLR)
## 
## Attaching package: 'ISLR'
## The following objects are masked from 'package:ISLR2':
## 
##     Auto, Credit
# Load the Smarket dataset
data("Smarket")

# Split the data into training and testing sets
train <- subset(Smarket, Year < 2005)
test <- subset(Smarket, Year >= 2005)

# Fit LDA model
lda.fit <- lda(Direction ~ Lag1 + Lag2, data = train)
lda.pred <- predict(lda.fit, newdata = test)

# Fit logistic regression model
glm.fit <- glm(Direction ~ Lag1 + Lag2, data = train, family = binomial)
glm.probs <- predict(glm.fit, newdata = test, type = "response")
glm.pred <- ifelse(glm.probs > 0.5, "Up", "Down")

# Compare predictions
accuracy_lda <- mean(lda.pred$class == test$Direction)
accuracy_logistic <- mean(glm.pred == test$Direction)

cat("Accuracy of LDA:", accuracy_lda, "\n")
## Accuracy of LDA: 0.5595238
cat("Accuracy of Logistic Regression:", accuracy_logistic, "\n")
## Accuracy of Logistic Regression: 0.5595238
# Load required libraries
library(MASS)
library(ISLR)

# Load the Smarket dataset
data("Smarket")

# Subset the data to include only observations before 2005
train <- subset(Smarket, Year < 2005)

# Fit LDA model using only the observations before 2005
lda.fit <- lda(Direction ~ Lag1 + Lag2, data = train)

# Make predictions with LDA
lda.pred <- predict(lda.fit)
posterior_probabilities <- lda.pred$posterior

# Recreate predictions using a 50% threshold on posterior probabilities
threshold <- 0.5
recreated_predictions <- ifelse(posterior_probabilities[, "Up"] > threshold, "Up", "Down")

# Compare predictions with original lda.pred$class
identical(lda.pred$class, recreated_predictions)
## [1] FALSE
# Load required libraries
library(MASS)
library(ISLR)

# Load the Smarket dataset
data("Smarket")

# Subset the data to include only observations before 2005
train <- subset(Smarket, Year < 2005)

# Fit LDA model using only the observations before 2005
lda.fit <- lda(Direction ~ Lag1 + Lag2, data = train)

# Make predictions with LDA
lda.pred <- predict(lda.fit)
posterior_probabilities <- lda.pred$posterior

# Recreate predictions using a 50% threshold on posterior probabilities
threshold <- 0.5
recreated_predictions <- ifelse(posterior_probabilities[, "Down"] > threshold, "Down", "Up")

# Compare predictions with original lda.pred$class
identical(lda.pred$class, recreated_predictions)
## [1] FALSE
# Load required libraries
library(MASS)
library(ISLR)

# Load the Smarket dataset
data("Smarket")

# Subset the data to include only observations before 2005
train <- subset(Smarket, Year < 2005)

# Fit LDA model using only the observations before 2005
lda.fit <- lda(Direction ~ Lag1 + Lag2, data = train)

# Make predictions with LDA
lda.pred <- predict(lda.fit)
posterior_probabilities <- lda.pred$posterior

# Define the threshold for predicting a market decrease
threshold <- 0.9

# Recreate predictions using the specified threshold on posterior probabilities
recreated_predictions <- ifelse(posterior_probabilities[, "Down"] > threshold, "Down", "Up")

# Compare predictions with original lda.pred$class
identical(lda.pred$class, recreated_predictions)
## [1] FALSE
## [1] FALSE
# Load required libraries
library(MASS)
library(ISLR)

# Load the Smarket dataset
data("Smarket")

# Subset the data to include only observations before 2005
train <- subset(Smarket, Year < 2005)

# Fit LDA model using only the observations before 2005
lda.fit <- lda(Direction ~ Lag1 + Lag2, data = train)

# Make predictions with LDA
lda.pred <- predict(lda.fit)
posterior_probabilities <- lda.pred$posterior

# Define the threshold for predicting a market decrease
threshold <- 0.9

# Check if any days meet the threshold for predicting a market decrease
max_posterior <- max(posterior_probabilities[, "Down"])
if (max_posterior < threshold) {
  cat("No days meet the threshold for predicting a market decrease.\n")
} else {
  cat("At least one day meets the threshold for predicting a market decrease.\n")
}
## No days meet the threshold for predicting a market decrease.
#4.7.4

# Load the MASS library
library(MASS)

# Load the Smarket dataset
data("Smarket")

# Subset the data to include only observations before 2005
train <- subset(Smarket, Year < 2005)

# Fit QDA model using only the observations before 2005
qda.fit <- qda(Direction ~ Lag1 + Lag2, data = train)

# Display the summary of the QDA model
summary(qda.fit)
##         Length Class  Mode     
## prior   2      -none- numeric  
## counts  2      -none- numeric  
## means   4      -none- numeric  
## scaling 8      -none- numeric  
## ldet    2      -none- numeric  
## lev     2      -none- character
## N       1      -none- numeric  
## call    3      -none- call     
## terms   3      terms  call     
## xlevels 0      -none- list
# Make predictions with QDA
qda.pred <- predict(qda.fit)

# Print the first few predictions
head(qda.pred)
## $class
##   [1] Up   Down Down Up   Up   Up   Down Down Up   Down Down Up   Up   Up   Down
##  [16] Up   Up   Up   Up   Up   Up   Up   Up   Up   Up   Up   Up   Up   Up   Up  
##  [31] Up   Up   Down Down Up   Up   Up   Up   Down Down Down Up   Up   Down Up  
##  [46] Up   Up   Down Up   Up   Up   Down Down Down Down Up   Up   Up   Up   Up  
##  [61] Down Down Up   Down Down Down Down Up   Down Down Up   Up   Up   Down Down
##  [76] Down Down Down Down Up   Up   Down Up   Up   Up   Up   Up   Up   Down Down
##  [91] Up   Down Down Up   Up   Up   Up   Up   Up   Down Down Down Up   Up   Up  
## [106] Up   Up   Up   Up   Up   Up   Up   Down Down Up   Up   Up   Up   Down Down
## [121] Down Down Up   Up   Up   Up   Up   Down Down Up   Up   Up   Up   Up   Up  
## [136] Up   Down Down Down Up   Up   Down Up   Up   Up   Up   Up   Up   Up   Up  
## [151] Up   Up   Up   Up   Up   Up   Up   Up   Down Down Up   Up   Up   Up   Up  
## [166] Up   Up   Up   Up   Up   Up   Up   Up   Up   Down Down Up   Up   Down Down
## [181] Down Down Down Up   Up   Up   Down Down Down Up   Up   Up   Up   Up   Down
## [196] Down Up   Down Down Up   Up   Up   Down Down Down Down Down Up   Up   Up  
## [211] Down Down Up   Up   Down Up   Up   Down Down Up   Up   Up   Down Up   Down
## [226] Down Down Up   Up   Up   Up   Up   Up   Down Down Down Up   Up   Up   Up  
## [241] Down Down Up   Up   Down Down Up   Up   Up   Up   Up   Up   Up   Up   Up  
## [256] Up   Up   Up   Down Up   Up   Up   Up   Down Down Up   Up   Up   Up   Down
## [271] Down Down Up   Down Up   Up   Up   Up   Up   Down Down Up   Up   Down Down
## [286] Down Down Down Up   Down Up   Up   Up   Down Down Up   Up   Up   Up   Up  
## [301] Up   Down Up   Up   Up   Up   Up   Up   Up   Up   Up   Up   Up   Up   Down
## [316] Down Up   Up   Up   Up   Up   Up   Up   Up   Up   Down Up   Up   Up   Up  
## [331] Down Down Up   Down Down Down Up   Down Up   Up   Up   Down Up   Up   Up  
## [346] Up   Up   Up   Up   Down Up   Up   Up   Up   Up   Up   Up   Down Down Up  
## [361] Up   Up   Up   Up   Up   Down Down Up   Up   Up   Down Down Up   Up   Up  
## [376] Up   Up   Up   Up   Up   Up   Up   Up   Down Down Down Down Down Down Up  
## [391] Up   Up   Down Down Down Down Up   Up   Down Down Down Down Down Up   Down
## [406] Up   Up   Up   Up   Up   Up   Up   Down Up   Down Down Down Up   Up   Up  
## [421] Up   Up   Up   Up   Up   Up   Up   Down Down Up   Up   Down Down Up   Up  
## [436] Up   Down Up   Down Down Down Down Down Down Down Down Down Up   Up   Down
## [451] Down Up   Up   Up   Down Down Down Down Up   Up   Up   Up   Up   Down Down
## [466] Up   Up   Down Down Down Up   Up   Down Down Up   Up   Up   Up   Up   Up  
## [481] Up   Down Up   Up   Down Down Up   Up   Down Down Up   Up   Up   Up   Up  
## [496] Down Down Down Down Up   Down Down Up   Up   Up   Up   Up   Up   Up   Up  
## [511] Up   Up   Up   Down Up   Up   Down Up   Up   Up   Up   Up   Up   Up   Up  
## [526] Down Down Down Up   Up   Up   Up   Up   Up   Down Up   Up   Up   Up   Up  
## [541] Up   Up   Up   Down Down Down Down Down Down Down Up   Down Up   Up   Up  
## [556] Up   Up   Down Down Up   Up   Up   Up   Up   Up   Down Down Up   Down Down
## [571] Down Down Up   Up   Down Down Up   Up   Down Down Up   Up   Up   Down Down
## [586] Down Up   Up   Up   Up   Up   Up   Down Down Down Down Up   Down Down Up  
## [601] Down Down Up   Up   Up   Down Down Up   Down Down Up   Up   Up   Up   Up  
## [616] Up   Up   Up   Up   Up   Down Up   Down Down Up   Up   Up   Down Up   Up  
## [631] Up   Up   Up   Up   Down Up   Down Down Up   Up   Up   Up   Up   Up   Up  
## [646] Down Down Up   Down Up   Up   Up   Down Down Up   Up   Up   Up   Up   Up  
## [661] Up   Down Down Down Up   Up   Up   Up   Up   Up   Up   Up   Down Down Down
## [676] Down Up   Up   Up   Up   Up   Up   Up   Down Down Down Down Down Up   Up  
## [691] Up   Up   Down Up   Up   Up   Up   Up   Up   Up   Up   Up   Down Down Up  
## [706] Up   Down Up   Up   Up   Up   Up   Up   Down Down Up   Up   Up   Up   Up  
## [721] Up   Down Down Up   Up   Down Down Up   Up   Up   Up   Up   Up   Down Down
## [736] Up   Up   Up   Down Down Up   Up   Up   Up   Down Down Up   Up   Down Down
## [751] Up   Up   Up   Up   Up   Up   Down Up   Up   Up   Up   Up   Down Up   Up  
## [766] Up   Up   Up   Up   Up   Up   Down Down Up   Down Up   Up   Up   Up   Up  
## [781] Up   Up   Up   Up   Up   Up   Down Up   Up   Up   Up   Up   Up   Up   Up  
## [796] Up   Up   Up   Down Down Up   Up   Up   Up   Down Down Down Down Up   Up  
## [811] Down Down Up   Up   Up   Up   Up   Up   Up   Up   Up   Up   Up   Down Down
## [826] Up   Up   Up   Up   Up   Up   Down Up   Up   Up   Up   Up   Down Up   Up  
## [841] Up   Up   Up   Up   Up   Up   Down Down Up   Up   Up   Up   Up   Up   Down
## [856] Down Up   Up   Up   Up   Up   Up   Up   Up   Up   Down Up   Up   Up   Up  
## [871] Up   Up   Up   Up   Up   Up   Up   Up   Up   Up   Up   Up   Up   Up   Up  
## [886] Up   Up   Up   Down Down Up   Up   Up   Up   Up   Up   Up   Up   Down Down
## [901] Up   Up   Down Down Down Down Up   Up   Up   Down Up   Up   Up   Up   Up  
## [916] Down Up   Up   Up   Up   Up   Up   Up   Up   Up   Up   Up   Up   Up   Up  
## [931] Up   Up   Up   Down Up   Down Down Up   Up   Up   Up   Up   Up   Up   Up  
## [946] Up   Down Up   Up   Up   Up   Up   Down Down Down Up   Up   Up   Down Down
## [961] Down Up   Up   Up   Down Down Down Up   Up   Up   Up   Up   Up   Up   Up  
## [976] Up   Up   Down Down Up   Up   Up   Up   Down Up   Down Down Up   Up   Up  
## [991] Up   Down Down Up   Up   Up   Up   Up  
## Levels: Down Up
## 
## $posterior
##          Down        Up
## 1   0.4923444 0.5076556
## 2   0.5060033 0.4939967
## 3   0.5145040 0.4854960
## 4   0.4950242 0.5049758
## 5   0.4918806 0.5081194
## 6   0.4996028 0.5003972
## 7   0.5093741 0.4906259
## 8   0.5042413 0.4957587
## 9   0.4854227 0.5145773
## 10  0.5064161 0.4935839
## 11  0.5111949 0.4888051
## 12  0.4856002 0.5143998
## 13  0.4814466 0.5185534
## 14  0.4963955 0.5036045
## 15  0.5066425 0.4933575
## 16  0.4905512 0.5094488
## 17  0.4913959 0.5086041
## 18  0.4709837 0.5290163
## 19  0.4828540 0.5171460
## 20  0.4913510 0.5086490
## 21  0.4752743 0.5247257
## 22  0.4720656 0.5279344
## 23  0.4626977 0.5373023
## 24  0.4960787 0.5039213
## 25  0.4943892 0.5056108
## 26  0.4780920 0.5219080
## 27  0.4978723 0.5021277
## 28  0.4731767 0.5268233
## 29  0.4474296 0.5525704
## 30  0.4460266 0.5539734
## 31  0.4738616 0.5261384
## 32  0.4790260 0.5209740
## 33  0.5072543 0.4927457
## 34  0.5065635 0.4934365
## 35  0.4598052 0.5401948
## 36  0.4798787 0.5201213
## 37  0.4822880 0.5177120
## 38  0.4919037 0.5080963
## 39  0.5090008 0.4909992
## 40  0.5105476 0.4894524
## 41  0.5002015 0.4997985
## 42  0.4545291 0.5454709
## 43  0.3949069 0.6050931
## 44  0.5136183 0.4863817
## 45  0.4755922 0.5244078
## 46  0.4865231 0.5134769
## 47  0.4683360 0.5316640
## 48  0.5033992 0.4966008
## 49  0.4843504 0.5156496
## 50  0.4450819 0.5549181
## 51  0.4705726 0.5294274
## 52  0.5111882 0.4888118
## 53  0.5331063 0.4668937
## 54  0.5328874 0.4671126
## 55  0.5032861 0.4967139
## 56  0.4688323 0.5311677
## 57  0.4993288 0.5006712
## 58  0.4872567 0.5127433
## 59  0.4200980 0.5799020
## 60  0.4747628 0.5252372
## 61  0.5367097 0.4632903
## 62  0.5639591 0.4360409
## 63  0.4894908 0.5105092
## 64  0.5303001 0.4696999
## 65  0.5346890 0.4653110
## 66  0.5068035 0.4931965
## 67  0.5074464 0.4925536
## 68  0.4997862 0.5002138
## 69  0.5432587 0.4567413
## 70  0.5793744 0.4206256
## 71  0.4958189 0.5041811
## 72  0.4579338 0.5420662
## 73  0.4582423 0.5417577
## 74  0.5020865 0.4979135
## 75  0.5184984 0.4815016
## 76  0.5134992 0.4865008
## 77  0.5077263 0.4922737
## 78  0.5043179 0.4956821
## 79  0.5096400 0.4903600
## 80  0.4682246 0.5317754
## 81  0.4993120 0.5006880
## 82  0.5071897 0.4928103
## 83  0.4839284 0.5160716
## 84  0.4807231 0.5192769
## 85  0.4842059 0.5157941
## 86  0.4779183 0.5220817
## 87  0.4859194 0.5140806
## 88  0.4927286 0.5072714
## 89  0.5240440 0.4759560
## 90  0.5431840 0.4568160
## 91  0.4958976 0.5041024
## 92  0.5125922 0.4874078
## 93  0.5101929 0.4898071
## 94  0.4629821 0.5370179
## 95  0.4827591 0.5172409
## 96  0.4761351 0.5238649
## 97  0.4672414 0.5327586
## 98  0.4574704 0.5425296
## 99  0.4872521 0.5127479
## 100 0.5018985 0.4981015
## 101 0.5004437 0.4995563
## 102 0.5116374 0.4883626
## 103 0.4939319 0.5060681
## 104 0.4881996 0.5118004
## 105 0.4829251 0.5170749
## 106 0.4679733 0.5320267
## 107 0.4832673 0.5167327
## 108 0.4740917 0.5259083
## 109 0.4514419 0.5485581
## 110 0.4699408 0.5300592
## 111 0.4774433 0.5225567
## 112 0.4891573 0.5108427
## 113 0.5044679 0.4955321
## 114 0.5144423 0.4855577
## 115 0.4924646 0.5075354
## 116 0.4724195 0.5275805
## 117 0.4815425 0.5184575
## 118 0.4807910 0.5192090
## 119 0.5015182 0.4984818
## 120 0.5049137 0.4950863
## 121 0.5010583 0.4989417
## 122 0.5004047 0.4995953
## 123 0.4689366 0.5310634
## 124 0.4402262 0.5597738
## 125 0.4877262 0.5122738
## 126 0.4778499 0.5221501
## 127 0.4764774 0.5235226
## 128 0.5176903 0.4823097
## 129 0.5357494 0.4642506
## 130 0.4819761 0.5180239
## 131 0.4944347 0.5055653
## 132 0.4953622 0.5046378
## 133 0.4922729 0.5077271
## 134 0.4921176 0.5078824
## 135 0.4607259 0.5392741
## 136 0.4504647 0.5495353
## 137 0.5013492 0.4986508
## 138 0.5251242 0.4748758
## 139 0.5063588 0.4936412
## 140 0.4904279 0.5095721
## 141 0.4955400 0.5044600
## 142 0.5010724 0.4989276
## 143 0.4989878 0.5010122
## 144 0.4866883 0.5133117
## 145 0.4667113 0.5332887
## 146 0.4845487 0.5154513
## 147 0.4678737 0.5321263
## 148 0.4771432 0.5228568
## 149 0.4967135 0.5032865
## 150 0.4974483 0.5025517
## 151 0.4848244 0.5151756
## 152 0.4744140 0.5255860
## 153 0.4867799 0.5132201
## 154 0.4686472 0.5313528
## 155 0.4898762 0.5101238
## 156 0.4832423 0.5167577
## 157 0.4896279 0.5103721
## 158 0.4943515 0.5056485
## 159 0.5118033 0.4881967
## 160 0.5146000 0.4854000
## 161 0.4614034 0.5385966
## 162 0.4599318 0.5400682
## 163 0.4524451 0.5475549
## 164 0.4836343 0.5163657
## 165 0.4932125 0.5067875
## 166 0.4870069 0.5129931
## 167 0.4537844 0.5462156
## 168 0.4440527 0.5559473
## 169 0.4867150 0.5132850
## 170 0.4185919 0.5814081
## 171 0.4823715 0.5176285
## 172 0.4586272 0.5413728
## 173 0.4236000 0.5764000
## 174 0.4431674 0.5568326
## 175 0.5304635 0.4695365
## 176 0.5766730 0.4233270
## 177 0.4939352 0.5060648
## 178 0.4998606 0.5001394
## 179 0.5296448 0.4703552
## 180 0.5225156 0.4774844
## 181 0.5031758 0.4968242
## 182 0.5288465 0.4711535
## 183 0.5180226 0.4819774
## 184 0.4888191 0.5111809
## 185 0.4791772 0.5208228
## 186 0.4734658 0.5265342
## 187 0.5139635 0.4860365
## 188 0.5429748 0.4570252
## 189 0.5050830 0.4949170
## 190 0.4817198 0.5182802
## 191 0.4969210 0.5030790
## 192 0.4716076 0.5283924
## 193 0.4640397 0.5359603
## 194 0.4885135 0.5114865
## 195 0.5136710 0.4863290
## 196 0.5059783 0.4940217
## 197 0.4849985 0.5150015
## 198 0.5073765 0.4926235
## 199 0.5138575 0.4861425
## 200 0.4588968 0.5411032
## 201 0.4465058 0.5534942
## 202 0.4772961 0.5227039
## 203 0.5177714 0.4822286
## 204 0.5305570 0.4694430
## 205 0.5107108 0.4892892
## 206 0.5265599 0.4734401
## 207 0.5070063 0.4929937
## 208 0.4897115 0.5102885
## 209 0.4941056 0.5058944
## 210 0.4884741 0.5115259
## 211 0.5112791 0.4887209
## 212 0.5202562 0.4797438
## 213 0.4924704 0.5075296
## 214 0.4857418 0.5142582
## 215 0.5006653 0.4993347
## 216 0.4945763 0.5054237
## 217 0.4749394 0.5250606
## 218 0.5003224 0.4996776
## 219 0.5129008 0.4870992
## 220 0.4875713 0.5124287
## 221 0.4540631 0.5459369
## 222 0.4929424 0.5070576
## 223 0.5023567 0.4976433
## 224 0.4762965 0.5237035
## 225 0.5000699 0.4999301
## 226 0.5325014 0.4674986
## 227 0.5228468 0.4771532
## 228 0.4752086 0.5247914
## 229 0.4573600 0.5426400
## 230 0.4732975 0.5267025
## 231 0.4865946 0.5134054
## 232 0.4664863 0.5335137
## 233 0.4829178 0.5170822
## 234 0.5059594 0.4940406
## 235 0.5119285 0.4880715
## 236 0.5062093 0.4937907
## 237 0.4848991 0.5151009
## 238 0.4878783 0.5121217
## 239 0.4941040 0.5058960
## 240 0.4944902 0.5055098
## 241 0.5028470 0.4971530
## 242 0.5020253 0.4979747
## 243 0.4773518 0.5226482
## 244 0.4882975 0.5117025
## 245 0.5078832 0.4921168
## 246 0.5090567 0.4909433
## 247 0.4881343 0.5118657
## 248 0.4776221 0.5223779
## 249 0.4784586 0.5215414
## 250 0.4860995 0.5139005
## 251 0.4769755 0.5230245
## 252 0.4712030 0.5287970
## 253 0.4927976 0.5072024
## 254 0.4750200 0.5249800
## 255 0.4927626 0.5072374
## 256 0.4894976 0.5105024
## 257 0.4692306 0.5307694
## 258 0.4935756 0.5064244
## 259 0.5038974 0.4961026
## 260 0.4946446 0.5053554
## 261 0.4899728 0.5100272
## 262 0.4445205 0.5554795
## 263 0.4968234 0.5031766
## 264 0.5228275 0.4771725
## 265 0.5022201 0.4977799
## 266 0.4427592 0.5572408
## 267 0.4698596 0.5301404
## 268 0.4762715 0.5237285
## 269 0.4788300 0.5211700
## 270 0.5057401 0.4942599
## 271 0.5271387 0.4728613
## 272 0.5050679 0.4949321
## 273 0.4987228 0.5012772
## 274 0.5002166 0.4997834
## 275 0.4709743 0.5290257
## 276 0.4493435 0.5506565
## 277 0.4975182 0.5024818
## 278 0.4880433 0.5119567
## 279 0.4902825 0.5097175
## 280 0.5211438 0.4788562
## 281 0.5168822 0.4831178
## 282 0.4896831 0.5103169
## 283 0.4856345 0.5143655
## 284 0.5152148 0.4847852
## 285 0.5462403 0.4537597
## 286 0.5120012 0.4879988
## 287 0.5027874 0.4972126
## 288 0.5047374 0.4952626
## 289 0.4928229 0.5071771
## 290 0.5008235 0.4991765
## 291 0.4900237 0.5099763
## 292 0.4722084 0.5277916
## 293 0.4791968 0.5208032
## 294 0.5032401 0.4967599
## 295 0.5042310 0.4957690
## 296 0.4940998 0.5059002
## 297 0.4714634 0.5285366
## 298 0.4800879 0.5199121
## 299 0.4849226 0.5150774
## 300 0.4626023 0.5373977
## 301 0.4870290 0.5129710
## 302 0.5033060 0.4966940
## 303 0.4989525 0.5010475
## 304 0.4909675 0.5090325
## 305 0.4759792 0.5240208
## 306 0.4660488 0.5339512
## 307 0.4817407 0.5182593
## 308 0.4855717 0.5144283
## 309 0.4890123 0.5109877
## 310 0.4824398 0.5175602
## 311 0.4986275 0.5013725
## 312 0.4717626 0.5282374
## 313 0.4873554 0.5126446
## 314 0.4872174 0.5127826
## 315 0.5133601 0.4866399
## 316 0.5261724 0.4738276
## 317 0.4849315 0.5150685
## 318 0.4884455 0.5115545
## 319 0.4671765 0.5328235
## 320 0.4679520 0.5320480
## 321 0.4725440 0.5274560
## 322 0.4802310 0.5197690
## 323 0.4668699 0.5331301
## 324 0.4622683 0.5377317
## 325 0.4959120 0.5040880
## 326 0.5145838 0.4854162
## 327 0.4985778 0.5014222
## 328 0.4722046 0.5277954
## 329 0.4491436 0.5508564
## 330 0.4719432 0.5280568
## 331 0.5308808 0.4691192
## 332 0.5498506 0.4501494
## 333 0.4505346 0.5494654
## 334 0.5046883 0.4953117
## 335 0.5401045 0.4598955
## 336 0.5168229 0.4831771
## 337 0.4928835 0.5071165
## 338 0.5070514 0.4929486
## 339 0.4806590 0.5193410
## 340 0.4611278 0.5388722
## 341 0.4883050 0.5116950
## 342 0.5090241 0.4909759
## 343 0.4867205 0.5132795
## 344 0.4658480 0.5341520
## 345 0.4716788 0.5283212
## 346 0.4788639 0.5211361
## 347 0.4894554 0.5105446
## 348 0.4545682 0.5454318
## 349 0.4767214 0.5232786
## 350 0.5009148 0.4990852
## 351 0.4731628 0.5268372
## 352 0.4742412 0.5257588
## 353 0.4917462 0.5082538
## 354 0.4687369 0.5312631
## 355 0.4875706 0.5124294
## 356 0.4830463 0.5169537
## 357 0.4767340 0.5232660
## 358 0.5223342 0.4776658
## 359 0.5419509 0.4580491
## 360 0.4658374 0.5341626
## 361 0.4553223 0.5446777
## 362 0.4507877 0.5492123
## 363 0.4829950 0.5170050
## 364 0.4693597 0.5306403
## 365 0.4731911 0.5268089
## 366 0.5093958 0.4906042
## 367 0.5151655 0.4848345
## 368 0.4557289 0.5442711
## 369 0.4395186 0.5604814
## 370 0.4865469 0.5134531
## 371 0.5371228 0.4628772
## 372 0.5501166 0.4498834
## 373 0.4381528 0.5618472
## 374 0.4138596 0.5861404
## 375 0.4927464 0.5072536
## 376 0.4901624 0.5098376
## 377 0.4774083 0.5225917
## 378 0.4568775 0.5431225
## 379 0.4858595 0.5141405
## 380 0.4562391 0.5437609
## 381 0.4042963 0.5957037
## 382 0.4167047 0.5832953
## 383 0.4275571 0.5724429
## 384 0.5547412 0.4452588
## 385 0.6261415 0.3738585
## 386 0.5064494 0.4935506
## 387 0.5614678 0.4385322
## 388 0.6213466 0.3786534
## 389 0.5067731 0.4932269
## 390 0.4595468 0.5404532
## 391 0.4350371 0.5649629
## 392 0.4137066 0.5862934
## 393 0.5279577 0.4720423
## 394 0.5622833 0.4377167
## 395 0.5522949 0.4477051
## 396 0.5548343 0.4451657
## 397 0.4859537 0.5140463
## 398 0.4498627 0.5501373
## 399 0.5326103 0.4673897
## 400 0.5819046 0.4180954
## 401 0.5031794 0.4968206
## 402 0.5172483 0.4827517
## 403 0.5122325 0.4877675
## 404 0.4971536 0.5028464
## 405 0.5233920 0.4766080
## 406 0.4786985 0.5213015
## 407 0.4886741 0.5113259
## 408 0.4797283 0.5202717
## 409 0.4486276 0.5513724
## 410 0.4769514 0.5230486
## 411 0.4863856 0.5136144
## 412 0.4190567 0.5809433
## 413 0.5162553 0.4837447
## 414 0.4956530 0.5043470
## 415 0.5023822 0.4976178
## 416 0.5260490 0.4739510
## 417 0.5117900 0.4882100
## 418 0.4983576 0.5016424
## 419 0.4510528 0.5489472
## 420 0.4821039 0.5178961
## 421 0.4949291 0.5050709
## 422 0.4614960 0.5385040
## 423 0.4691728 0.5308272
## 424 0.4361524 0.5638476
## 425 0.4821532 0.5178468
## 426 0.4720718 0.5279282
## 427 0.4501189 0.5498811
## 428 0.5130419 0.4869581
## 429 0.5496523 0.4503477
## 430 0.4733308 0.5266692
## 431 0.4520036 0.5479964
## 432 0.5308470 0.4691530
## 433 0.5474922 0.4525078
## 434 0.4580326 0.5419674
## 435 0.4433148 0.5566852
## 436 0.4431754 0.5568246
## 437 0.5023520 0.4976480
## 438 0.4779271 0.5220729
## 439 0.5293598 0.4706402
## 440 0.5861815 0.4138185
## 441 0.5757070 0.4242930
## 442 0.5466830 0.4533170
## 443 0.5723281 0.4276719
## 444 0.5108242 0.4891758
## 445 0.5324000 0.4676000
## 446 0.5175393 0.4824607
## 447 0.5023921 0.4976079
## 448 0.4899678 0.5100322
## 449 0.4763782 0.5236218
## 450 0.5029949 0.4970051
## 451 0.5051268 0.4948732
## 452 0.4677050 0.5322950
## 453 0.4949696 0.5050304
## 454 0.4948887 0.5051113
## 455 0.5068436 0.4931564
## 456 0.5246491 0.4753509
## 457 0.5094846 0.4905154
## 458 0.5105126 0.4894874
## 459 0.4689530 0.5310470
## 460 0.4617219 0.5382781
## 461 0.4480552 0.5519448
## 462 0.4888657 0.5111343
## 463 0.4984921 0.5015079
## 464 0.5192425 0.4807575
## 465 0.5377329 0.4622671
## 466 0.4825146 0.5174854
## 467 0.4740870 0.5259130
## 468 0.5106160 0.4893840
## 469 0.5419157 0.4580843
## 470 0.5201513 0.4798487
## 471 0.4890945 0.5109055
## 472 0.4610065 0.5389935
## 473 0.5175890 0.4824110
## 474 0.5363100 0.4636900
## 475 0.4835473 0.5164527
## 476 0.4650699 0.5349301
## 477 0.4726838 0.5273162
## 478 0.4676184 0.5323816
## 479 0.4886559 0.5113441
## 480 0.4650046 0.5349954
## 481 0.4983647 0.5016353
## 482 0.5100287 0.4899713
## 483 0.4845033 0.5154967
## 484 0.4651054 0.5348946
## 485 0.5116220 0.4883780
## 486 0.5192530 0.4807470
## 487 0.4612775 0.5387225
## 488 0.4665944 0.5334056
## 489 0.5002213 0.4997787
## 490 0.5098983 0.4901017
## 491 0.4835450 0.5164550
## 492 0.4791571 0.5208429
## 493 0.4615884 0.5384116
## 494 0.4846841 0.5153159
## 495 0.4953153 0.5046847
## 496 0.5289033 0.4710967
## 497 0.5522294 0.4477706
## 498 0.5168439 0.4831561
## 499 0.5187485 0.4812515
## 500 0.4611928 0.5388072
## 501 0.5061684 0.4938316
## 502 0.5197910 0.4802090
## 503 0.4871362 0.5128638
## 504 0.4955629 0.5044371
## 505 0.4761427 0.5238573
## 506 0.4720255 0.5279745
## 507 0.4639281 0.5360719
## 508 0.4527327 0.5472673
## 509 0.4608134 0.5391866
## 510 0.4949932 0.5050068
## 511 0.4609230 0.5390770
## 512 0.4483873 0.5516127
## 513 0.4971189 0.5028811
## 514 0.5158303 0.4841697
## 515 0.4648094 0.5351906
## 516 0.4972330 0.5027670
## 517 0.5143623 0.4856377
## 518 0.4759840 0.5240160
## 519 0.4697791 0.5302209
## 520 0.4742220 0.5257780
## 521 0.4676663 0.5323337
## 522 0.4914552 0.5085448
## 523 0.4879866 0.5120134
## 524 0.4620591 0.5379409
## 525 0.4765311 0.5234689
## 526 0.5147643 0.4852357
## 527 0.5439144 0.4560856
## 528 0.5114888 0.4885112
## 529 0.4680196 0.5319804
## 530 0.4995311 0.5004689
## 531 0.4833661 0.5166339
## 532 0.4882067 0.5117933
## 533 0.4801962 0.5198038
## 534 0.4960452 0.5039548
## 535 0.5111679 0.4888321
## 536 0.4843602 0.5156398
## 537 0.4581913 0.5418087
## 538 0.4922754 0.5077246
## 539 0.4895032 0.5104968
## 540 0.4928296 0.5071704
## 541 0.4626897 0.5373103
## 542 0.4623827 0.5376173
## 543 0.4878483 0.5121517
## 544 0.5332832 0.4667168
## 545 0.5577408 0.4422592
## 546 0.5319304 0.4680696
## 547 0.5628189 0.4371811
## 548 0.5054728 0.4945272
## 549 0.5030652 0.4969348
## 550 0.5194888 0.4805112
## 551 0.4806835 0.5193165
## 552 0.5014236 0.4985764
## 553 0.4991961 0.5008039
## 554 0.4813775 0.5186225
## 555 0.4790382 0.5209618
## 556 0.4559694 0.5440306
## 557 0.4956071 0.5043929
## 558 0.5345871 0.4654129
## 559 0.5291092 0.4708908
## 560 0.4880458 0.5119542
## 561 0.4939782 0.5060218
## 562 0.4879298 0.5120702
## 563 0.4662959 0.5337041
## 564 0.4881484 0.5118516
## 565 0.4922670 0.5077330
## 566 0.5109037 0.4890963
## 567 0.5270747 0.4729253
## 568 0.4800981 0.5199019
## 569 0.5015240 0.4984760
## 570 0.5100967 0.4899033
## 571 0.5149504 0.4850496
## 572 0.5337370 0.4662630
## 573 0.4890707 0.5109293
## 574 0.4600321 0.5399679
## 575 0.5041479 0.4958521
## 576 0.5204433 0.4795567
## 577 0.4916451 0.5083549
## 578 0.4870659 0.5129341
## 579 0.5079833 0.4920167
## 580 0.5066189 0.4933811
## 581 0.4969199 0.5030801
## 582 0.4935047 0.5064953
## 583 0.4689800 0.5310200
## 584 0.5006849 0.4993151
## 585 0.5242656 0.4757344
## 586 0.5030947 0.4969053
## 587 0.4814255 0.5185745
## 588 0.4966404 0.5033596
## 589 0.4960645 0.5039355
## 590 0.4477065 0.5522935
## 591 0.4748212 0.5251788
## 592 0.4933963 0.5066037
## 593 0.5057005 0.4942995
## 594 0.5031357 0.4968643
## 595 0.5152801 0.4847199
## 596 0.5222597 0.4777403
## 597 0.4859964 0.5140036
## 598 0.5050046 0.4949954
## 599 0.5148935 0.4851065
## 600 0.4995481 0.5004519
## 601 0.5136377 0.4863623
## 602 0.5161255 0.4838745
## 603 0.4906092 0.5093908
## 604 0.4688246 0.5311754
## 605 0.4927734 0.5072266
## 606 0.5167047 0.4832953
## 607 0.5086197 0.4913803
## 608 0.4760800 0.5239200
## 609 0.5111469 0.4888531
## 610 0.5271689 0.4728311
## 611 0.4879842 0.5120158
## 612 0.4647143 0.5352857
## 613 0.4794714 0.5205286
## 614 0.4696862 0.5303138
## 615 0.4811832 0.5188168
## 616 0.4795371 0.5204629
## 617 0.4968875 0.5031125
## 618 0.4910275 0.5089725
## 619 0.4779875 0.5220125
## 620 0.4981265 0.5018735
## 621 0.5138101 0.4861899
## 622 0.4947933 0.5052067
## 623 0.5077004 0.4922996
## 624 0.5228619 0.4771381
## 625 0.4854397 0.5145603
## 626 0.4630930 0.5369070
## 627 0.4928072 0.5071928
## 628 0.5090317 0.4909683
## 629 0.4916580 0.5083420
## 630 0.4763382 0.5236618
## 631 0.4640629 0.5359371
## 632 0.4963597 0.5036403
## 633 0.4860714 0.5139286
## 634 0.4923976 0.5076024
## 635 0.5025047 0.4974953
## 636 0.4795958 0.5204042
## 637 0.5062302 0.4937698
## 638 0.5132125 0.4867875
## 639 0.4762622 0.5237378
## 640 0.4796681 0.5203319
## 641 0.4911523 0.5088477
## 642 0.4779733 0.5220267
## 643 0.4843657 0.5156343
## 644 0.4665627 0.5334373
## 645 0.4798393 0.5201607
## 646 0.5006676 0.4993324
## 647 0.5030263 0.4969737
## 648 0.4974296 0.5025704
## 649 0.5055853 0.4944147
## 650 0.4942019 0.5057981
## 651 0.4923944 0.5076056
## 652 0.4976769 0.5023231
## 653 0.5014623 0.4985377
## 654 0.5046081 0.4953919
## 655 0.4893559 0.5106441
## 656 0.4910683 0.5089317
## 657 0.4782414 0.5217586
## 658 0.4813422 0.5186578
## 659 0.4939682 0.5060318
## 660 0.4927771 0.5072229
## 661 0.4973594 0.5026406
## 662 0.5033622 0.4966378
## 663 0.5127240 0.4872760
## 664 0.5142118 0.4857882
## 665 0.4963783 0.5036217
## 666 0.4820212 0.5179788
## 667 0.4970779 0.5029221
## 668 0.4918699 0.5081301
## 669 0.4631314 0.5368686
## 670 0.4874707 0.5125293
## 671 0.4986977 0.5013023
## 672 0.4864351 0.5135649
## 673 0.5045217 0.4954783
## 674 0.5059286 0.4940714
## 675 0.5036091 0.4963909
## 676 0.5041670 0.4958330
## 677 0.4663716 0.5336284
## 678 0.4879456 0.5120544
## 679 0.4694614 0.5305386
## 680 0.4669740 0.5330260
## 681 0.4737500 0.5262500
## 682 0.4967018 0.5032982
## 683 0.4881721 0.5118279
## 684 0.5108785 0.4891215
## 685 0.5282656 0.4717344
## 686 0.5037069 0.4962931
## 687 0.5071811 0.4928189
## 688 0.5006089 0.4993911
## 689 0.4877379 0.5122621
## 690 0.4907472 0.5092528
## 691 0.4941034 0.5058966
## 692 0.4978832 0.5021168
## 693 0.5031494 0.4968506
## 694 0.4903489 0.5096511
## 695 0.4908122 0.5091878
## 696 0.4784143 0.5215857
## 697 0.4879330 0.5120670
## 698 0.4971724 0.5028276
## 699 0.4687312 0.5312688
## 700 0.4831040 0.5168960
## 701 0.4865727 0.5134273
## 702 0.4875204 0.5124796
## 703 0.5109141 0.4890859
## 704 0.5131003 0.4868997
## 705 0.4889932 0.5110068
## 706 0.4928513 0.5071487
## 707 0.5036717 0.4963283
## 708 0.4921672 0.5078328
## 709 0.4817990 0.5182010
## 710 0.4957350 0.5042650
## 711 0.4903887 0.5096113
## 712 0.4760315 0.5239685
## 713 0.4827544 0.5172456
## 714 0.5036699 0.4963301
## 715 0.5048302 0.4951698
## 716 0.4780724 0.5219276
## 717 0.4724233 0.5275767
## 718 0.4692958 0.5307042
## 719 0.4926406 0.5073594
## 720 0.4882401 0.5117599
## 721 0.4838354 0.5161646
## 722 0.5115486 0.4884514
## 723 0.5155793 0.4844207
## 724 0.4968844 0.5031156
## 725 0.4940374 0.5059626
## 726 0.5037247 0.4962753
## 727 0.5005153 0.4994847
## 728 0.4832048 0.5167952
## 729 0.4936839 0.5063161
## 730 0.4842608 0.5157392
## 731 0.4925595 0.5074405
## 732 0.4870402 0.5129598
## 733 0.4798447 0.5201553
## 734 0.5032180 0.4967820
## 735 0.5084403 0.4915597
## 736 0.4844391 0.5155609
## 737 0.4929783 0.5070217
## 738 0.4991795 0.5008205
## 739 0.5058950 0.4941050
## 740 0.5049877 0.4950123
## 741 0.4939531 0.5060469
## 742 0.4975679 0.5024321
## 743 0.4899517 0.5100483
## 744 0.4895378 0.5104622
## 745 0.5070773 0.4929227
## 746 0.5067832 0.4932168
## 747 0.4920704 0.5079296
## 748 0.4872102 0.5127898
## 749 0.5026323 0.4973677
## 750 0.5082021 0.4917979
## 751 0.4937879 0.5062121
## 752 0.4984585 0.5015415
## 753 0.4829221 0.5170779
## 754 0.4881860 0.5118140
## 755 0.4877105 0.5122895
## 756 0.4954877 0.5045123
## 757 0.5017023 0.4982977
## 758 0.4997777 0.5002223
## 759 0.4966497 0.5033503
## 760 0.4985885 0.5014115
## 761 0.4949680 0.5050320
## 762 0.4828005 0.5171995
## 763 0.5031028 0.4968972
## 764 0.4933330 0.5066670
## 765 0.4591644 0.5408356
## 766 0.4861187 0.5138813
## 767 0.4917057 0.5082943
## 768 0.4914487 0.5085513
## 769 0.4943831 0.5056169
## 770 0.4779238 0.5220762
## 771 0.4842561 0.5157439
## 772 0.5074295 0.4925705
## 773 0.5036442 0.4963558
## 774 0.4933786 0.5066214
## 775 0.5087874 0.4912126
## 776 0.4974111 0.5025889
## 777 0.4762217 0.5237783
## 778 0.4973566 0.5026434
## 779 0.4964404 0.5035596
## 780 0.4786072 0.5213928
## 781 0.4812243 0.5187757
## 782 0.4825102 0.5174898
## 783 0.4838780 0.5161220
## 784 0.4928738 0.5071262
## 785 0.4953914 0.5046086
## 786 0.4903680 0.5096320
## 787 0.5019304 0.4980696
## 788 0.4942631 0.5057369
## 789 0.4858141 0.5141859
## 790 0.4955306 0.5044694
## 791 0.4953756 0.5046244
## 792 0.4792654 0.5207346
## 793 0.4728247 0.5271753
## 794 0.4610634 0.5389366
## 795 0.4532049 0.5467951
## 796 0.4964628 0.5035372
## 797 0.4876693 0.5123307
## 798 0.4867866 0.5132134
## 799 0.5108043 0.4891957
## 800 0.5039074 0.4960926
## 801 0.4713697 0.5286303
## 802 0.4592216 0.5407784
## 803 0.4768311 0.5231689
## 804 0.4843009 0.5156991
## 805 0.5081478 0.4918522
## 806 0.5125789 0.4874211
## 807 0.5051682 0.4948318
## 808 0.5125403 0.4874597
## 809 0.4930371 0.5069629
## 810 0.4955476 0.5044524
## 811 0.5064938 0.4935062
## 812 0.5098314 0.4901686
## 813 0.4963040 0.5036960
## 814 0.4772622 0.5227378
## 815 0.4812648 0.5187352
## 816 0.4950126 0.5049874
## 817 0.4761088 0.5238912
## 818 0.4767684 0.5232316
## 819 0.4887152 0.5112848
## 820 0.4966572 0.5033428
## 821 0.4968163 0.5031837
## 822 0.4675390 0.5324610
## 823 0.4859464 0.5140536
## 824 0.5131592 0.4868408
## 825 0.5102977 0.4897023
## 826 0.4835113 0.5164887
## 827 0.4878902 0.5121098
## 828 0.4718842 0.5281158
## 829 0.4664475 0.5335525
## 830 0.4731883 0.5268117
## 831 0.4962828 0.5037172
## 832 0.5037042 0.4962958
## 833 0.4936202 0.5063798
## 834 0.4816988 0.5183012
## 835 0.4616067 0.5383933
## 836 0.4616256 0.5383744
## 837 0.4913512 0.5086488
## 838 0.5011772 0.4988228
## 839 0.4899643 0.5100357
## 840 0.4873265 0.5126735
## 841 0.4729627 0.5270373
## 842 0.4901310 0.5098690
## 843 0.4943990 0.5056010
## 844 0.4870811 0.5129189
## 845 0.4950689 0.5049311
## 846 0.4961857 0.5038143
## 847 0.5115059 0.4884941
## 848 0.5153116 0.4846884
## 849 0.4986392 0.5013608
## 850 0.4954846 0.5045154
## 851 0.4891500 0.5108500
## 852 0.4942157 0.5057843
## 853 0.4828084 0.5171916
## 854 0.4897950 0.5102050
## 855 0.5152299 0.4847701
## 856 0.5148605 0.4851395
## 857 0.4773417 0.5226583
## 858 0.4874589 0.5125411
## 859 0.4809452 0.5190548
## 860 0.4893168 0.5106832
## 861 0.4983829 0.5016171
## 862 0.4888328 0.5111672
## 863 0.4913027 0.5086973
## 864 0.4864309 0.5135691
## 865 0.4900591 0.5099409
## 866 0.5044674 0.4955326
## 867 0.4964154 0.5035846
## 868 0.4781078 0.5218922
## 869 0.4824129 0.5175871
## 870 0.4915406 0.5084594
## 871 0.4974752 0.5025248
## 872 0.4794268 0.5205732
## 873 0.4754193 0.5245807
## 874 0.4738833 0.5261167
## 875 0.4844934 0.5155066
## 876 0.4796586 0.5203414
## 877 0.4865195 0.5134805
## 878 0.4949236 0.5050764
## 879 0.4916470 0.5083530
## 880 0.4853047 0.5146953
## 881 0.4794948 0.5205052
## 882 0.4777858 0.5222142
## 883 0.4837241 0.5162759
## 884 0.4981455 0.5018545
## 885 0.4796953 0.5203047
## 886 0.4828174 0.5171826
## 887 0.4785929 0.5214071
## 888 0.4777099 0.5222901
## 889 0.5004294 0.4995706
## 890 0.5032056 0.4967944
## 891 0.4958892 0.5041108
## 892 0.4962285 0.5037715
## 893 0.4964220 0.5035780
## 894 0.4859314 0.5140686
## 895 0.4817249 0.5182751
## 896 0.4637291 0.5362709
## 897 0.4518701 0.5481299
## 898 0.4796526 0.5203474
## 899 0.5072359 0.4927641
## 900 0.5038215 0.4961785
## 901 0.4686141 0.5313859
## 902 0.4817469 0.5182531
## 903 0.5083882 0.4916118
## 904 0.5114814 0.4885186
## 905 0.5076661 0.4923339
## 906 0.5021297 0.4978703
## 907 0.4944879 0.5055121
## 908 0.4941472 0.5058528
## 909 0.4872183 0.5127817
## 910 0.5002941 0.4997059
## 911 0.4996893 0.5003107
## 912 0.4925647 0.5074353
## 913 0.4810194 0.5189806
## 914 0.4886734 0.5113266
## 915 0.4967554 0.5032446
## 916 0.5054279 0.4945721
## 917 0.4992468 0.5007532
## 918 0.4945010 0.5054990
## 919 0.4919134 0.5080866
## 920 0.4873187 0.5126813
## 921 0.4978910 0.5021090
## 922 0.4974153 0.5025847
## 923 0.4940619 0.5059381
## 924 0.4818134 0.5181866
## 925 0.4865587 0.5134413
## 926 0.4983429 0.5016571
## 927 0.4868719 0.5131281
## 928 0.4925899 0.5074101
## 929 0.4776524 0.5223476
## 930 0.4710857 0.5289143
## 931 0.4867238 0.5132762
## 932 0.4825826 0.5174174
## 933 0.4918043 0.5081957
## 934 0.5020515 0.4979485
## 935 0.4940117 0.5059883
## 936 0.5085902 0.4914098
## 937 0.5154235 0.4845765
## 938 0.4921236 0.5078764
## 939 0.4974638 0.5025362
## 940 0.4839305 0.5160695
## 941 0.4688715 0.5311285
## 942 0.4850877 0.5149123
## 943 0.4883037 0.5116963
## 944 0.4760983 0.5239017
## 945 0.4681625 0.5318375
## 946 0.4874482 0.5125518
## 947 0.5013609 0.4986391
## 948 0.4821979 0.5178021
## 949 0.4812550 0.5187450
## 950 0.4930354 0.5069646
## 951 0.4783614 0.5216386
## 952 0.4793614 0.5206386
## 953 0.5076272 0.4923728
## 954 0.5256405 0.4743595
## 955 0.5096574 0.4903426
## 956 0.4944963 0.5055037
## 957 0.4923292 0.5076708
## 958 0.4894580 0.5105420
## 959 0.5038968 0.4961032
## 960 0.5232917 0.4767083
## 961 0.5179512 0.4820488
## 962 0.4922736 0.5077264
## 963 0.4869314 0.5130686
## 964 0.4869248 0.5130752
## 965 0.5002241 0.4997759
## 966 0.5123723 0.4876277
## 967 0.5008448 0.4991552
## 968 0.4786437 0.5213563
## 969 0.4904373 0.5095627
## 970 0.4977789 0.5022211
## 971 0.4746365 0.5253635
## 972 0.4885239 0.5114761
## 973 0.4961618 0.5038382
## 974 0.4944214 0.5055786
## 975 0.4950567 0.5049433
## 976 0.4851253 0.5148747
## 977 0.4797341 0.5202659
## 978 0.5051297 0.4948703
## 979 0.5100835 0.4899165
## 980 0.4891448 0.5108552
## 981 0.4888364 0.5111636
## 982 0.4721140 0.5278860
## 983 0.4870894 0.5129106
## 984 0.5021502 0.4978498
## 985 0.4944715 0.5055285
## 986 0.5000665 0.4999335
## 987 0.5059778 0.4940222
## 988 0.4964001 0.5035999
## 989 0.4884815 0.5115185
## 990 0.4760202 0.5239798
## 991 0.4827403 0.5172597
## 992 0.5015327 0.4984673
## 993 0.5054341 0.4945659
## 994 0.4937954 0.5062046
## 995 0.4835578 0.5164422
## 996 0.4947461 0.5052539
## 997 0.4981853 0.5018147
## 998 0.4891354 0.5108646
# Load the MASS library
library(MASS)

# Load the Smarket dataset
data("Smarket")

# Subset the data to include only observations in 2005
Smarket_2005 <- subset(Smarket, Year == 2005)

# Fit QDA model using the entire training dataset (observations before 2005)
qda.fit <- qda(Direction ~ Lag1 + Lag2, data = subset(Smarket, Year < 2005))

# Make predictions on the 2005 data with QDA
qda.class <- predict(qda.fit, newdata = Smarket_2005)$class

# Create a contingency table to compare predictions with actual values
prediction_table <- table(qda.class, Smarket_2005$Direction)

# Print the contingency table
print(prediction_table)
##          
## qda.class Down  Up
##      Down   30  20
##      Up     81 121
# Calculate the accuracy of QDA predictions
accuracy <- mean(qda.class == Smarket_2005$Direction)
cat("Accuracy of QDA predictions:", accuracy, "\n")
## Accuracy of QDA predictions: 0.5992063
#4.7.5 Naive Bayes

# Load required library
library(e1071)

# Load the Smarket dataset
data("Smarket")

# Subset the data to include only observations before 2005
train <- subset(Smarket, Year < 2005)

# Fit Naive Bayes model using only the observations before 2005
nb.fit <- naiveBayes(Direction ~ Lag1 + Lag2, data = train)

# Display the summary of the Naive Bayes model
print(nb.fit)
## 
## Naive Bayes Classifier for Discrete Predictors
## 
## Call:
## naiveBayes.default(x = X, y = Y, laplace = laplace)
## 
## A-priori probabilities:
## Y
##     Down       Up 
## 0.491984 0.508016 
## 
## Conditional probabilities:
##       Lag1
## Y             [,1]     [,2]
##   Down  0.04279022 1.227446
##   Up   -0.03954635 1.231668
## 
##       Lag2
## Y             [,1]     [,2]
##   Down  0.03389409 1.239191
##   Up   -0.03132544 1.220765
# Make predictions with Naive Bayes
nb.class <- predict(nb.fit, newdata = subset(Smarket, Year == 2005))

# Create a contingency table to compare predictions with actual values
prediction_table <- table(nb.class, subset(Smarket, Year == 2005)$Direction)

# Print the contingency table
print(prediction_table)
##         
## nb.class Down  Up
##     Down   28  20
##     Up     83 121
# Calculate the accuracy of Naive Bayes predictions
accuracy <- mean(nb.class == subset(Smarket, Year == 2005)$Direction)
cat("Accuracy of Naive Bayes predictions:", accuracy, "\n")
## Accuracy of Naive Bayes predictions: 0.5912698
# Generate estimates of the probability for each observation belonging to a particular class
nb.preds <- predict(nb.fit, newdata = subset(Smarket, Year == 2005), type = "raw")
print(nb.preds[1:5, ])
##           Down        Up
## [1,] 0.4873164 0.5126836
## [2,] 0.4762492 0.5237508
## [3,] 0.4653377 0.5346623
## [4,] 0.4748652 0.5251348
## [5,] 0.4901890 0.5098110
#4.7.6K-Nearest Neighbors

# Load the class library
library(class)

# Load the Smarket dataset
data("Smarket")

# Subset the data to include only observations before 2005
train <- subset(Smarket, Year < 2005)

# Prepare predictors and response variables for training and testing sets
train.X <- cbind(train$Lag1, train$Lag2)
test.X <- cbind(Smarket[Smarket$Year >= 2005, "Lag1"], Smarket[Smarket$Year >= 2005, "Lag2"])
train.Direction <- train$Direction
Direction_2005 <- Smarket$Direction[Smarket$Year >= 2005]

# Fit KNN model with K=1
set.seed(1)
knn.pred <- knn(train.X, test.X, train.Direction, k = 1)

# Create a contingency table to compare predictions with actual values
prediction_table <- table(knn.pred, Direction_2005)

# Print the contingency table
print
## function (x, ...) 
## UseMethod("print")
## <bytecode: 0x56fb7b27c778>
## <environment: namespace:base>
# Calculate the accuracy of KNN predictions
accuracy <- mean(knn.pred == Direction_2005)
cat("Accuracy of KNN predictions (K=1):", accuracy, "\n")
## Accuracy of KNN predictions (K=1): 0.5
# Fit KNN model with K=3
knn.pred <- knn(train.X, test.X, train.Direction, k = 3)

# Create a contingency table to compare predictions with actual values
prediction_table <- table(knn.pred, Direction_2005)

# Print the contingency table
print(prediction_table)
##         Direction_2005
## knn.pred Down Up
##     Down   48 54
##     Up     63 87
# Calculate the accuracy of KNN predictions
accuracy <- mean(knn.pred == Direction_2005)
cat("Accuracy of KNN predictions (K=3):", accuracy, "\n")
## Accuracy of KNN predictions (K=3): 0.5357143
#4.7.7 Poisson Regression

# Load the Bikeshare data
library(ISLR2)
data("Bikeshare")

# Fit a Poisson regression model
mod.pois <- glm(bikers ~ mnth + hr + workingday + temp + weathersit, data = Bikeshare, family = poisson)

# Display summary of the model
summary(mod.pois)
## 
## Call:
## glm(formula = bikers ~ mnth + hr + workingday + temp + weathersit, 
##     family = poisson, data = Bikeshare)
## 
## Coefficients:
##                            Estimate Std. Error  z value Pr(>|z|)    
## (Intercept)                2.693688   0.009720  277.124  < 2e-16 ***
## mnthFeb                    0.226046   0.006951   32.521  < 2e-16 ***
## mnthMarch                  0.376437   0.006691   56.263  < 2e-16 ***
## mnthApril                  0.691693   0.006987   98.996  < 2e-16 ***
## mnthMay                    0.910641   0.007436  122.469  < 2e-16 ***
## mnthJune                   0.893405   0.008242  108.402  < 2e-16 ***
## mnthJuly                   0.773787   0.008806   87.874  < 2e-16 ***
## mnthAug                    0.821341   0.008332   98.573  < 2e-16 ***
## mnthSept                   0.903663   0.007621  118.578  < 2e-16 ***
## mnthOct                    0.937743   0.006744  139.054  < 2e-16 ***
## mnthNov                    0.820433   0.006494  126.334  < 2e-16 ***
## mnthDec                    0.686850   0.006317  108.724  < 2e-16 ***
## hr1                       -0.471593   0.012999  -36.278  < 2e-16 ***
## hr2                       -0.808761   0.014646  -55.220  < 2e-16 ***
## hr3                       -1.443918   0.018843  -76.631  < 2e-16 ***
## hr4                       -2.076098   0.024796  -83.728  < 2e-16 ***
## hr5                       -1.060271   0.016075  -65.957  < 2e-16 ***
## hr6                        0.324498   0.010610   30.585  < 2e-16 ***
## hr7                        1.329567   0.009056  146.822  < 2e-16 ***
## hr8                        1.831313   0.008653  211.630  < 2e-16 ***
## hr9                        1.336155   0.009016  148.191  < 2e-16 ***
## hr10                       1.091238   0.009261  117.831  < 2e-16 ***
## hr11                       1.248507   0.009093  137.304  < 2e-16 ***
## hr12                       1.434028   0.008936  160.486  < 2e-16 ***
## hr13                       1.427951   0.008951  159.529  < 2e-16 ***
## hr14                       1.379296   0.008999  153.266  < 2e-16 ***
## hr15                       1.408149   0.008977  156.862  < 2e-16 ***
## hr16                       1.628688   0.008805  184.979  < 2e-16 ***
## hr17                       2.049021   0.008565  239.221  < 2e-16 ***
## hr18                       1.966668   0.008586  229.065  < 2e-16 ***
## hr19                       1.668409   0.008743  190.830  < 2e-16 ***
## hr20                       1.370588   0.008973  152.737  < 2e-16 ***
## hr21                       1.118568   0.009215  121.383  < 2e-16 ***
## hr22                       0.871879   0.009536   91.429  < 2e-16 ***
## hr23                       0.481387   0.010207   47.164  < 2e-16 ***
## workingday                 0.014665   0.001955    7.502 6.27e-14 ***
## temp                       0.785292   0.011475   68.434  < 2e-16 ***
## weathersitcloudy/misty    -0.075231   0.002179  -34.528  < 2e-16 ***
## weathersitlight rain/snow -0.575800   0.004058 -141.905  < 2e-16 ***
## weathersitheavy rain/snow -0.926287   0.166782   -5.554 2.79e-08 ***
## ---
## Signif. codes:  0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
## 
## (Dispersion parameter for poisson family taken to be 1)
## 
##     Null deviance: 1052921  on 8644  degrees of freedom
## Residual deviance:  228041  on 8605  degrees of freedom
## AIC: 281159
## 
## Number of Fisher Scoring iterations: 5
# Plot coefficients associated with month
coef.mnth <- c(coef(mod.pois)[2:12], -sum(coef(mod.pois)[2:12]))
plot(coef.mnth, xlab = "Month", ylab = "Coefficient", xaxt = "n", col = "blue", pch = 19, type = "o")
axis(side = 1, at = 1:12, labels = c("J", "F", "M", "A", "M", "J", "J", "A", "S", "O", "N", "D"))

# Plot coefficients associated with hour
coef.hours <- c(coef(mod.pois)[13:35], -sum(coef(mod.pois)[13:35]))
plot(coef.hours, xlab = "Hour", ylab = "Coefficient", col = "blue", pch = 19, type = "o")