Lab 4: Logistic Regression, LDA, QDA, and KNN
The Stock Market Data
library(ISLR)
names(Smarket)
[1] "Year" "Lag1" "Lag2" "Lag3"
[5] "Lag4" "Lag5" "Volume" "Today"
[9] "Direction"
dim(Smarket)
[1] 1250 9
summary(Smarket)
Year Lag1
Min. :2001 Min. :-4.922000
1st Qu.:2002 1st Qu.:-0.639500
Median :2003 Median : 0.039000
Mean :2003 Mean : 0.003834
3rd Qu.:2004 3rd Qu.: 0.596750
Max. :2005 Max. : 5.733000
Lag2 Lag3
Min. :-4.922000 Min. :-4.922000
1st Qu.:-0.639500 1st Qu.:-0.640000
Median : 0.039000 Median : 0.038500
Mean : 0.003919 Mean : 0.001716
3rd Qu.: 0.596750 3rd Qu.: 0.596750
Max. : 5.733000 Max. : 5.733000
Lag4 Lag5
Min. :-4.922000 Min. :-4.92200
1st Qu.:-0.640000 1st Qu.:-0.64000
Median : 0.038500 Median : 0.03850
Mean : 0.001636 Mean : 0.00561
3rd Qu.: 0.596750 3rd Qu.: 0.59700
Max. : 5.733000 Max. : 5.73300
Volume Today Direction
Min. :0.3561 Min. :-4.922000 Down:602
1st Qu.:1.2574 1st Qu.:-0.639500 Up :648
Median :1.4229 Median : 0.038500
Mean :1.4783 Mean : 0.003138
3rd Qu.:1.6417 3rd Qu.: 0.596750
Max. :3.1525 Max. : 5.733000
pairs(Smarket)

# producira error por tener columna no numerica
# cor(Smarket)
cor(Smarket[,-9])
Year Lag1 Lag2
Year 1.00000000 0.029699649 0.030596422
Lag1 0.02969965 1.000000000 -0.026294328
Lag2 0.03059642 -0.026294328 1.000000000
Lag3 0.03319458 -0.010803402 -0.025896670
Lag4 0.03568872 -0.002985911 -0.010853533
Lag5 0.02978799 -0.005674606 -0.003557949
Volume 0.53900647 0.040909908 -0.043383215
Today 0.03009523 -0.026155045 -0.010250033
Lag3 Lag4 Lag5
Year 0.033194581 0.035688718 0.029787995
Lag1 -0.010803402 -0.002985911 -0.005674606
Lag2 -0.025896670 -0.010853533 -0.003557949
Lag3 1.000000000 -0.024051036 -0.018808338
Lag4 -0.024051036 1.000000000 -0.027083641
Lag5 -0.018808338 -0.027083641 1.000000000
Volume -0.041823686 -0.048414246 -0.022002315
Today -0.002447647 -0.006899527 -0.034860083
Volume Today
Year 0.53900647 0.030095229
Lag1 0.04090991 -0.026155045
Lag2 -0.04338321 -0.010250033
Lag3 -0.04182369 -0.002447647
Lag4 -0.04841425 -0.006899527
Lag5 -0.02200231 -0.034860083
Volume 1.00000000 0.014591823
Today 0.01459182 1.000000000
attach(Smarket)
plot(Volume)

Logistic Regression
glm.fit = glm(Direction ~ Lag1 + Lag2 + Lag3 + Lag4 + Lag5 + Volume, data = Smarket, family = binomial)
summary(glm.fit)
Call:
glm(formula = Direction ~ Lag1 + Lag2 + Lag3 + Lag4 + Lag5 +
Volume, family = binomial, data = Smarket)
Deviance Residuals:
Min 1Q Median 3Q Max
-1.446 -1.203 1.065 1.145 1.326
Coefficients:
Estimate Std. Error z value Pr(>|z|)
(Intercept) -0.126000 0.240736 -0.523 0.601
Lag1 -0.073074 0.050167 -1.457 0.145
Lag2 -0.042301 0.050086 -0.845 0.398
Lag3 0.011085 0.049939 0.222 0.824
Lag4 0.009359 0.049974 0.187 0.851
Lag5 0.010313 0.049511 0.208 0.835
Volume 0.135441 0.158360 0.855 0.392
(Dispersion parameter for binomial family taken to be 1)
Null deviance: 1731.2 on 1249 degrees of freedom
Residual deviance: 1727.6 on 1243 degrees of freedom
AIC: 1741.6
Number of Fisher Scoring iterations: 3
coef(glm.fit)
(Intercept) Lag1 Lag2
-0.126000257 -0.073073746 -0.042301344
Lag3 Lag4 Lag5
0.011085108 0.009358938 0.010313068
Volume
0.135440659
summary(glm.fit)$coef
Estimate Std. Error z value
(Intercept) -0.126000257 0.24073574 -0.5233966
Lag1 -0.073073746 0.05016739 -1.4565986
Lag2 -0.042301344 0.05008605 -0.8445733
Lag3 0.011085108 0.04993854 0.2219750
Lag4 0.009358938 0.04997413 0.1872757
Lag5 0.010313068 0.04951146 0.2082966
Volume 0.135440659 0.15835970 0.8552723
Pr(>|z|)
(Intercept) 0.6006983
Lag1 0.1452272
Lag2 0.3983491
Lag3 0.8243333
Lag4 0.8514445
Lag5 0.8349974
Volume 0.3924004
glm.probs = predict(glm.fit, type = "response")
glm.probs[1:10]
1 2 3 4 5
0.5070841 0.4814679 0.4811388 0.5152224 0.5107812
6 7 8 9 10
0.5069565 0.4926509 0.5092292 0.5176135 0.4888378
contrasts(Direction)
Up
Down 0
Up 1
glm.pred = rep("Down", 1250)
glm.pred[glm.probs > 0.5] = "Up"
table(glm.pred, Direction)
Direction
glm.pred Down Up
Down 145 141
Up 457 507
(507+145)/1250
[1] 0.5216
mean(glm.pred == Direction)
[1] 0.5216
train = (Year < 2005)
Smarket.2005 = Smarket[!train,]
dim(Smarket.2005)
[1] 252 9
Direction.2005 = Direction[!train]
glm.fit = glm(Direction ~ Lag1 + Lag2 + Lag3 + Lag4 + Lag5 + Volume, data = Smarket, family = binomial, subset = train)
glm.probs = predict(glm.fit, Smarket.2005, type = "response")
glm.pred = rep("Down", 252)
glm.pred[glm.probs > 0.5] = "Up"
table (glm.pred, Direction.2005)
Direction.2005
glm.pred Down Up
Down 77 97
Up 34 44
mean(glm.pred == Direction.2005)
[1] 0.4801587
mean(glm.pred != Direction.2005)
[1] 0.5198413
glm.fit = glm(Direction ~ Lag1 + Lag2, data = Smarket, family = binomial, subset = train)
glm.probs = predict(glm.fit, Smarket.2005, type = "response")
glm.pred = rep("Down", 252)
glm.pred[glm.probs > 0.5] = "Up"
table(glm.pred, Direction.2005)
Direction.2005
glm.pred Down Up
Down 35 35
Up 76 106
mean(glm.pred == Direction.2005)
[1] 0.5595238
106/(106+76)
[1] 0.5824176
predict(glm.fit, newdata = data.frame(Lag1 = c(1.2, 1.5), Lag2 = c(1.1, -0.8)), type = "response")
1 2
0.4791462 0.4960939
Linear Discriminant Analysis
library(MASS)
lda.fit = lda(Direction ~ Lag1 + Lag2, data = Smarket, subset = train)
lda.fit
Call:
lda(Direction ~ Lag1 + Lag2, data = Smarket, subset = train)
Prior probabilities of groups:
Down Up
0.491984 0.508016
Group means:
Lag1 Lag2
Down 0.04279022 0.03389409
Up -0.03954635 -0.03132544
Coefficients of linear discriminants:
LD1
Lag1 -0.6420190
Lag2 -0.5135293
plot(lda.fit)

lda.pred = predict(lda.fit, Smarket.2005)
names(lda.pred)
[1] "class" "posterior" "x"
lda.class = lda.pred$class
table(lda.class, Direction.2005)
Direction.2005
lda.class Down Up
Down 35 35
Up 76 106
mean(lda.class == Direction.2005)
[1] 0.5595238
sum(lda.pred$posterior[,1] >= 0.5)
[1] 70
sum(lda.pred$posterior[,1] < 0.5)
[1] 182
lda.pred$posterior[1:20, 1]
999 1000 1001 1002 1003
0.4901792 0.4792185 0.4668185 0.4740011 0.4927877
1004 1005 1006 1007 1008
0.4938562 0.4951016 0.4872861 0.4907013 0.4844026
1009 1010 1011 1012 1013
0.4906963 0.5119988 0.4895152 0.4706761 0.4744593
1014 1015 1016 1017 1018
0.4799583 0.4935775 0.5030894 0.4978806 0.4886331
lda.class[1:20]
[1] Up Up Up Up Up Up Up Up Up
[10] Up Up Down Up Up Up Up Up Down
[19] Up Up
Levels: Down Up
sum(lda.pred$posterior[,1] > 0.9)
[1] 0
Quadratic Discriminant Analysis
qda.fit = qda(Direction ~ Lag1 + Lag2, data = Smarket, subset = train)
qda.fit
Call:
qda(Direction ~ Lag1 + Lag2, data = Smarket, subset = train)
Prior probabilities of groups:
Down Up
0.491984 0.508016
Group means:
Lag1 Lag2
Down 0.04279022 0.03389409
Up -0.03954635 -0.03132544
qda.class = predict(qda.fit, Smarket.2005)$class
table(qda.class, Direction.2005)
Direction.2005
qda.class Down Up
Down 30 20
Up 81 121
mean(qda.class == Direction.2005)
[1] 0.5992063
K-Nearest Neighbors
library(class)
train.X = cbind(Lag1, Lag2)[train,]
test.X = cbind(Lag1, Lag2)[!train,]
train.Direction = Direction[train]
set.seed(1)
knn.pred = knn(train.X, test.X, train.Direction, k = 1)
table(knn.pred, Direction.2005)
Direction.2005
knn.pred Down Up
Down 43 58
Up 68 83
(83+43)/252
[1] 0.5
knn.pred = knn(train.X, test.X, train.Direction, k = 3)
table(knn.pred, Direction.2005)
Direction.2005
knn.pred Down Up
Down 48 54
Up 63 87
mean(knn.pred == Direction.2005)
[1] 0.5357143
An Application to Caravan Insurance Data
dim(Caravan)
[1] 5822 86
attach(Caravan)
summary(Purchase)
No Yes
5474 348
348/5822
[1] 0.05977327
standardized.X = scale(Caravan[,-86])
var(Caravan[,1])
[1] 165.0378
var(Caravan[,2])
[1] 0.1647078
var(standardized.X[,1])
[1] 1
var(standardized.X[,2])
[1] 1
test = 1:1000
train.X = standardized.X[-test,]
test.X = standardized.X[test,]
train.Y = Purchase[-test]
test.Y = Purchase[test]
set.seed(1)
knn.pred = knn(train.X, test.X, train.Y, k = 1)
mean(test.Y != knn.pred)
[1] 0.118
mean(test.Y != "No")
[1] 0.059
table(knn.pred, test.Y)
test.Y
knn.pred No Yes
No 873 50
Yes 68 9
9/(68+9)
[1] 0.1168831
knn.pred = knn(train.X, test.X, train.Y, k = 3)
table(knn.pred, test.Y)
test.Y
knn.pred No Yes
No 920 54
Yes 21 5
5/26
[1] 0.1923077
knn.pred = knn(train.X, test.X, train.Y, k = 5)
table(knn.pred, test.Y)
test.Y
knn.pred No Yes
No 930 55
Yes 11 4
4/15
[1] 0.2666667
glm.fit = glm(Purchase ~ ., data = Caravan, family = binomial, subset = -test)
glm.fit: fitted probabilities numerically 0 or 1 occurred
glm.probs = predict(glm.fit, Caravan[test,], type = "response")
glm.pred = rep("No", 1000)
glm.pred[glm.probs > 0.5] = "Yes"
table(glm.pred, test.Y)
test.Y
glm.pred No Yes
No 934 59
Yes 7 0
glm.pred = rep("No", 1000)
glm.pred[glm.probs > 0.25] = "Yes"
table(glm.pred, test.Y)
test.Y
glm.pred No Yes
No 919 48
Yes 22 11
11/(22+11)
[1] 0.3333333
LS0tDQp0aXRsZTogIkxhYm9yYXRvcmlvIDQiDQpvdXRwdXQ6IGh0bWxfbm90ZWJvb2sNCmF1dGhvcjogIkx1aXMgSmltZW5leiINCi0tLQ0KDQojIyMgTGFiIDQ6IExvZ2lzdGljIFJlZ3Jlc3Npb24sIExEQSwgUURBLCBhbmQgS05ODQoNCiMjIyMgVGhlIFN0b2NrIE1hcmtldCBEYXRhDQoNCmBgYHtyfQ0KbGlicmFyeShJU0xSKQ0KbmFtZXMoU21hcmtldCkNCmRpbShTbWFya2V0KQ0Kc3VtbWFyeShTbWFya2V0KQ0KcGFpcnMoU21hcmtldCkNCmBgYA0KDQpgYGB7cn0NCiMgcHJvZHVjaXJhIGVycm9yIHBvciB0ZW5lciBjb2x1bW5hIG5vIG51bWVyaWNhDQojIGNvcihTbWFya2V0KQ0KY29yKFNtYXJrZXRbLC05XSkNCmBgYA0KDQpgYGB7cn0NCmF0dGFjaChTbWFya2V0KQ0KcGxvdChWb2x1bWUpDQpgYGANCg0KIyMjIyBMb2dpc3RpYyBSZWdyZXNzaW9uDQoNCmBgYHtyfQ0KZ2xtLmZpdCA9IGdsbShEaXJlY3Rpb24gfiBMYWcxICsgTGFnMiArIExhZzMgKyBMYWc0ICsgTGFnNSArIFZvbHVtZSwgZGF0YSA9IFNtYXJrZXQsIGZhbWlseSA9IGJpbm9taWFsKQ0Kc3VtbWFyeShnbG0uZml0KQ0KYGBgDQoNCmBgYHtyfQ0KY29lZihnbG0uZml0KQ0Kc3VtbWFyeShnbG0uZml0KSRjb2VmDQpgYGANCg0KYGBge3J9DQpnbG0ucHJvYnMgPSBwcmVkaWN0KGdsbS5maXQsIHR5cGUgPSAicmVzcG9uc2UiKQ0KZ2xtLnByb2JzWzE6MTBdDQpjb250cmFzdHMoRGlyZWN0aW9uKQ0KYGBgDQoNCmBgYHtyfQ0KZ2xtLnByZWQgPSByZXAoIkRvd24iLCAxMjUwKQ0KZ2xtLnByZWRbZ2xtLnByb2JzID4gMC41XSA9ICJVcCINCmBgYA0KDQpgYGB7cn0NCnRhYmxlKGdsbS5wcmVkLCBEaXJlY3Rpb24pDQooNTA3KzE0NSkvMTI1MA0KbWVhbihnbG0ucHJlZCA9PSBEaXJlY3Rpb24pDQpgYGANCg0KYGBge3J9DQp0cmFpbiA9IChZZWFyIDwgMjAwNSkNClNtYXJrZXQuMjAwNSA9IFNtYXJrZXRbIXRyYWluLF0NCmRpbShTbWFya2V0LjIwMDUpDQpEaXJlY3Rpb24uMjAwNSA9IERpcmVjdGlvblshdHJhaW5dDQpgYGANCg0KYGBge3J9DQpnbG0uZml0ID0gZ2xtKERpcmVjdGlvbiB+IExhZzEgKyBMYWcyICsgTGFnMyArIExhZzQgKyBMYWc1ICsgVm9sdW1lLCBkYXRhID0gU21hcmtldCwgZmFtaWx5ID0gYmlub21pYWwsIHN1YnNldCA9IHRyYWluKQ0KZ2xtLnByb2JzID0gcHJlZGljdChnbG0uZml0LCBTbWFya2V0LjIwMDUsIHR5cGUgPSAicmVzcG9uc2UiKQ0KYGBgDQoNCmBgYHtyfQ0KZ2xtLnByZWQgPSByZXAoIkRvd24iLCAyNTIpDQpnbG0ucHJlZFtnbG0ucHJvYnMgPiAwLjVdID0gIlVwIg0KdGFibGUgKGdsbS5wcmVkLCBEaXJlY3Rpb24uMjAwNSkNCm1lYW4oZ2xtLnByZWQgPT0gRGlyZWN0aW9uLjIwMDUpDQptZWFuKGdsbS5wcmVkICE9IERpcmVjdGlvbi4yMDA1KQ0KYGBgDQoNCmBgYHtyfQ0KZ2xtLmZpdCA9IGdsbShEaXJlY3Rpb24gfiBMYWcxICsgTGFnMiwgZGF0YSA9IFNtYXJrZXQsIGZhbWlseSA9IGJpbm9taWFsLCBzdWJzZXQgPSB0cmFpbikNCmdsbS5wcm9icyA9IHByZWRpY3QoZ2xtLmZpdCwgU21hcmtldC4yMDA1LCB0eXBlID0gInJlc3BvbnNlIikNCmdsbS5wcmVkID0gcmVwKCJEb3duIiwgMjUyKQ0KZ2xtLnByZWRbZ2xtLnByb2JzID4gMC41XSA9ICJVcCINCnRhYmxlKGdsbS5wcmVkLCBEaXJlY3Rpb24uMjAwNSkNCm1lYW4oZ2xtLnByZWQgPT0gRGlyZWN0aW9uLjIwMDUpDQoxMDYvKDEwNis3NikNCmBgYA0KDQpgYGB7cn0NCnByZWRpY3QoZ2xtLmZpdCwgbmV3ZGF0YSA9IGRhdGEuZnJhbWUoTGFnMSA9IGMoMS4yLCAxLjUpLCBMYWcyID0gYygxLjEsIC0wLjgpKSwgdHlwZSA9ICJyZXNwb25zZSIpDQpgYGANCg0KIyMjIyBMaW5lYXIgRGlzY3JpbWluYW50IEFuYWx5c2lzDQoNCmBgYHtyfQ0KbGlicmFyeShNQVNTKQ0KbGRhLmZpdCA9IGxkYShEaXJlY3Rpb24gfiBMYWcxICsgTGFnMiwgZGF0YSA9IFNtYXJrZXQsIHN1YnNldCA9IHRyYWluKQ0KbGRhLmZpdA0KcGxvdChsZGEuZml0KQ0KYGBgDQoNCmBgYHtyfQ0KbGRhLnByZWQgPSBwcmVkaWN0KGxkYS5maXQsIFNtYXJrZXQuMjAwNSkNCm5hbWVzKGxkYS5wcmVkKQ0KYGBgDQoNCmBgYHtyfQ0KbGRhLmNsYXNzID0gbGRhLnByZWQkY2xhc3MNCnRhYmxlKGxkYS5jbGFzcywgRGlyZWN0aW9uLjIwMDUpDQptZWFuKGxkYS5jbGFzcyA9PSBEaXJlY3Rpb24uMjAwNSkNCmBgYA0KDQpgYGB7cn0NCnN1bShsZGEucHJlZCRwb3N0ZXJpb3JbLDFdID49IDAuNSkNCnN1bShsZGEucHJlZCRwb3N0ZXJpb3JbLDFdIDwgMC41KQ0KYGBgDQoNCmBgYHtyfQ0KbGRhLnByZWQkcG9zdGVyaW9yWzE6MjAsIDFdDQpsZGEuY2xhc3NbMToyMF0NCmBgYA0KDQpgYGB7cn0NCnN1bShsZGEucHJlZCRwb3N0ZXJpb3JbLDFdID4gMC45KQ0KYGBgDQoNCiMjIyMgUXVhZHJhdGljIERpc2NyaW1pbmFudCBBbmFseXNpcw0KDQpgYGB7cn0NCnFkYS5maXQgPSBxZGEoRGlyZWN0aW9uIH4gTGFnMSArIExhZzIsIGRhdGEgPSBTbWFya2V0LCBzdWJzZXQgPSB0cmFpbikNCnFkYS5maXQNCmBgYA0KDQpgYGB7cn0NCnFkYS5jbGFzcyA9IHByZWRpY3QocWRhLmZpdCwgU21hcmtldC4yMDA1KSRjbGFzcw0KdGFibGUocWRhLmNsYXNzLCBEaXJlY3Rpb24uMjAwNSkNCm1lYW4ocWRhLmNsYXNzID09IERpcmVjdGlvbi4yMDA1KQ0KYGBgDQoNCiMjIyMgSy1OZWFyZXN0IE5laWdoYm9ycw0KDQpgYGB7cn0NCmxpYnJhcnkoY2xhc3MpDQp0cmFpbi5YID0gY2JpbmQoTGFnMSwgTGFnMilbdHJhaW4sXQ0KdGVzdC5YID0gY2JpbmQoTGFnMSwgTGFnMilbIXRyYWluLF0NCnRyYWluLkRpcmVjdGlvbiA9IERpcmVjdGlvblt0cmFpbl0NCmBgYA0KDQpgYGB7cn0NCnNldC5zZWVkKDEpDQprbm4ucHJlZCA9IGtubih0cmFpbi5YLCB0ZXN0LlgsIHRyYWluLkRpcmVjdGlvbiwgayA9IDEpDQp0YWJsZShrbm4ucHJlZCwgRGlyZWN0aW9uLjIwMDUpDQooODMrNDMpLzI1Mg0KYGBgDQoNCmBgYHtyfQ0Ka25uLnByZWQgPSBrbm4odHJhaW4uWCwgdGVzdC5YLCB0cmFpbi5EaXJlY3Rpb24sIGsgPSAzKQ0KdGFibGUoa25uLnByZWQsIERpcmVjdGlvbi4yMDA1KQ0KbWVhbihrbm4ucHJlZCA9PSBEaXJlY3Rpb24uMjAwNSkNCmBgYA0KDQojIyMjIEFuIEFwcGxpY2F0aW9uIHRvIENhcmF2YW4gSW5zdXJhbmNlIERhdGENCg0KYGBge3J9DQpkaW0oQ2FyYXZhbikNCmF0dGFjaChDYXJhdmFuKQ0Kc3VtbWFyeShQdXJjaGFzZSkNCjM0OC81ODIyDQpgYGANCg0KYGBge3J9DQpzdGFuZGFyZGl6ZWQuWCA9IHNjYWxlKENhcmF2YW5bLC04Nl0pDQp2YXIoQ2FyYXZhblssMV0pDQp2YXIoQ2FyYXZhblssMl0pDQp2YXIoc3RhbmRhcmRpemVkLlhbLDFdKQ0KdmFyKHN0YW5kYXJkaXplZC5YWywyXSkNCmBgYA0KDQpgYGB7cn0NCnRlc3QgPSAxOjEwMDANCnRyYWluLlggPSBzdGFuZGFyZGl6ZWQuWFstdGVzdCxdDQp0ZXN0LlggPSBzdGFuZGFyZGl6ZWQuWFt0ZXN0LF0NCnRyYWluLlkgPSBQdXJjaGFzZVstdGVzdF0NCnRlc3QuWSA9IFB1cmNoYXNlW3Rlc3RdDQpzZXQuc2VlZCgxKQ0Ka25uLnByZWQgPSBrbm4odHJhaW4uWCwgdGVzdC5YLCB0cmFpbi5ZLCBrID0gMSkNCm1lYW4odGVzdC5ZICE9IGtubi5wcmVkKQ0KbWVhbih0ZXN0LlkgIT0gIk5vIikNCmBgYA0KDQpgYGB7cn0NCnRhYmxlKGtubi5wcmVkLCB0ZXN0LlkpDQo5Lyg2OCs5KQ0KYGBgDQoNCmBgYHtyfQ0Ka25uLnByZWQgPSBrbm4odHJhaW4uWCwgdGVzdC5YLCB0cmFpbi5ZLCBrID0gMykNCnRhYmxlKGtubi5wcmVkLCB0ZXN0LlkpDQo1LzI2DQprbm4ucHJlZCA9IGtubih0cmFpbi5YLCB0ZXN0LlgsIHRyYWluLlksIGsgPSA1KQ0KdGFibGUoa25uLnByZWQsIHRlc3QuWSkNCjQvMTUNCmBgYA0KDQpgYGB7cn0NCmdsbS5maXQgPSBnbG0oUHVyY2hhc2UgfiAuLCBkYXRhID0gQ2FyYXZhbiwgZmFtaWx5ID0gYmlub21pYWwsIHN1YnNldCA9IC10ZXN0KQ0KZ2xtLnByb2JzID0gcHJlZGljdChnbG0uZml0LCBDYXJhdmFuW3Rlc3QsXSwgdHlwZSA9ICJyZXNwb25zZSIpDQpnbG0ucHJlZCA9IHJlcCgiTm8iLCAxMDAwKQ0KZ2xtLnByZWRbZ2xtLnByb2JzID4gMC41XSA9ICJZZXMiDQp0YWJsZShnbG0ucHJlZCwgdGVzdC5ZKQ0KZ2xtLnByZWQgPSByZXAoIk5vIiwgMTAwMCkNCmdsbS5wcmVkW2dsbS5wcm9icyA+IDAuMjVdID0gIlllcyINCnRhYmxlKGdsbS5wcmVkLCB0ZXN0LlkpDQoxMS8oMjIrMTEpDQpgYGA=