Expolaratory Analysis

library(dplyr)
setwd("/Users/jayavarshini/Desktop/RpubsProjects/")
college.df<-data.frame(read.csv("College.csv",sep=",",header=T))
head(college.df)

1.1 b

Private_univ=table(college.df$Private)
Private_univ

 No Yes 
212 565 

1.1 c

private_list<-filter(college.df,college.df$Private=="Yes")
private_list
public_list<-filter(college.df,college.df$Private=="No")
public_list
hist(private_list$PhD, main ="PhD holders in Private Universities",probability = TRUE,xlab="Count of Phd Students",ylab="Density",border="red")
lines(density(private_list$PhD) ,col="blue")

hist(public_list$PhD, main ="PhD holders in Public Universities",probability = TRUE,xlab="Count of Phd Students",ylab="Density",border="blue")
lines(density(private_list$PhD) ,col="red")

print("As we can see, private universities have many PhD faculty")
[1] "As we can see, private universities have many PhD faculty"

1.1 d

Grad_Rate<-data.frame(arrange(college.df,college.df$Grad.Rate))
head(select(Grad_Rate, Name, Grad.Rate ),5)

1.1 e

1.1 e i

summary(college.df)
                           Name     Private        Apps           Accept          Enroll       Top10perc    
 Abilene Christian University:  1   No :212   Min.   :   81   Min.   :   72   Min.   :  35   Min.   : 1.00  
 Adelphi University          :  1   Yes:565   1st Qu.:  776   1st Qu.:  604   1st Qu.: 242   1st Qu.:15.00  
 Adrian College              :  1             Median : 1558   Median : 1110   Median : 434   Median :23.00  
 Agnes Scott College         :  1             Mean   : 3002   Mean   : 2019   Mean   : 780   Mean   :27.56  
 Alaska Pacific University   :  1             3rd Qu.: 3624   3rd Qu.: 2424   3rd Qu.: 902   3rd Qu.:35.00  
 Albertson College           :  1             Max.   :48094   Max.   :26330   Max.   :6392   Max.   :96.00  
 (Other)                     :771                                                                           
   Top25perc      F.Undergrad     P.Undergrad         Outstate       Room.Board       Books       
 Min.   :  9.0   Min.   :  139   Min.   :    1.0   Min.   : 2340   Min.   :1780   Min.   :  96.0  
 1st Qu.: 41.0   1st Qu.:  992   1st Qu.:   95.0   1st Qu.: 7320   1st Qu.:3597   1st Qu.: 470.0  
 Median : 54.0   Median : 1707   Median :  353.0   Median : 9990   Median :4200   Median : 500.0  
 Mean   : 55.8   Mean   : 3700   Mean   :  855.3   Mean   :10441   Mean   :4358   Mean   : 549.4  
 3rd Qu.: 69.0   3rd Qu.: 4005   3rd Qu.:  967.0   3rd Qu.:12925   3rd Qu.:5050   3rd Qu.: 600.0  
 Max.   :100.0   Max.   :31643   Max.   :21836.0   Max.   :21700   Max.   :8124   Max.   :2340.0  
                                                                                                  
    Personal         PhD            Terminal       S.F.Ratio      perc.alumni        Expend     
 Min.   : 250   Min.   :  8.00   Min.   : 24.0   Min.   : 2.50   Min.   : 0.00   Min.   : 3186  
 1st Qu.: 850   1st Qu.: 62.00   1st Qu.: 71.0   1st Qu.:11.50   1st Qu.:13.00   1st Qu.: 6751  
 Median :1200   Median : 75.00   Median : 82.0   Median :13.60   Median :21.00   Median : 8377  
 Mean   :1341   Mean   : 72.66   Mean   : 79.7   Mean   :14.09   Mean   :22.74   Mean   : 9660  
 3rd Qu.:1700   3rd Qu.: 85.00   3rd Qu.: 92.0   3rd Qu.:16.50   3rd Qu.:31.00   3rd Qu.:10830  
 Max.   :6800   Max.   :103.00   Max.   :100.0   Max.   :39.80   Max.   :64.00   Max.   :56233  
                                                                                                
   Grad.Rate     
 Min.   : 10.00  
 1st Qu.: 53.00  
 Median : 65.00  
 Mean   : 65.46  
 3rd Qu.: 78.00  
 Max.   :118.00  
                 

1.1 e ii

pairs(college.df[, 1:10])

1.1 e iii

boxplot(private_list$perc.alumni, public_list$perc.alumni,main="Alumni Donation to their colleges \n (RED: Private and BLUE:PUBLIC ) ", xlab="Alumni", ylab="Donation", col = c("red", "blue"))

1.1 e iv

boxplot(private_list$PhD , public_list$PhD, main="More PhD's",ylab="PhD employee", col = c("red", "blue"))

1.1 e v

Elite <-rep("No", nrow(college.df))
Elite[college.df$Top10perc > 50] <-"Yes"
Elite<-as.factor(Elite)
college<-data.frame(college.df,Elite)
summary(Elite)
 No Yes 
699  78 

1.1 e vi

par(mfrow=c(2,2))
hist(college.df$Top10perc, breaks = 10, col="red",main="Top 10%",xlab="Top 10 Percent")
hist(college.df$Personal, breaks =15, col="green",main="Personal Spending",xlab="Amount")
hist(college.df$perc.alumni, breaks=20, col="orange", main="Percentage of Alumni",xlab="% of Alumni")
hist(college.df$PhD, breaks=25, col="blue", main="Faculty with PhD",xlab="PhD count")

1.1 e vii

# Exploratory analysis : To see the total expenses in a public or priavte university 
library(ggplot2)
#str(college)
df_new=college.df[,c(1,2,10,11,12,13)]
df_new=transform(df_new,Total_expense=df_new$Outstate+df_new$Room.Board+df_new$Personal+df_new$Books)
df_new_private=df_new[df_new$Private=="Yes",]
df_new_pubic=df_new[df_new$Private=="No",]
mean_tot_private=mean(df_new_private$Total_expense)
print("The average amount spent by a student in a private university(In the gn list)")
[1] "The average amount spent by a student in a private university(In the gn list)"
mean_tot_private
[1] 18149.78
mean_tot_pub=mean(df_new_pubic$Total_expense)
print("The average amount spent by a student in a public university")
[1] "The average amount spent by a student in a public university"
mean_tot_pub
[1] 12793.01
print("Proves private universites are costlier than public universties")
[1] "Proves private universites are costlier than public universties"

Linear Regression

1.2 a i

library(ISLR)
data(Auto)
model<-lm(mpg~horsepower,data=Auto)
summary(model)

Call:
lm(formula = mpg ~ horsepower, data = Auto)

Residuals:
     Min       1Q   Median       3Q      Max 
-13.5710  -3.2592  -0.3435   2.7630  16.9240 

Coefficients:
             Estimate Std. Error t value Pr(>|t|)    
(Intercept) 39.935861   0.717499   55.66   <2e-16 ***
horsepower  -0.157845   0.006446  -24.49   <2e-16 ***
---
Signif. codes:  0 ‘***’ 0.001 ‘**’ 0.01 ‘*’ 0.05 ‘.’ 0.1 ‘ ’ 1

Residual standard error: 4.906 on 390 degrees of freedom
Multiple R-squared:  0.6059,    Adjusted R-squared:  0.6049 
F-statistic: 599.7 on 1 and 390 DF,  p-value: < 2.2e-16
print("The p value is the probability that the current result is found. As we know, if the p value is less than 5% ie., 0.5 the correlation co efficient is statiscally significant. Here the p-value is < 2.2e-16")
[1] "The p value is the probability that the current result is found. As we know, if the p value is less than 5% ie., 0.5 the correlation co efficient is statiscally significant. Here the p-value is < 2.2e-16"

1.2 a ii

print("R squared value is the mesaure of the variation between the dependent variable and the independent variable in the linear regression model. The adjusted r squared value is 0.6059 showing there is a big variation")
[1] "R squared value is the mesaure of the variation between the dependent variable and the independent variable in the linear regression model. The adjusted r squared value is 0.6059 showing there is a big variation"

1.2 a iii

print("Negative. High horse power -> Less mpg")

1.2 a iv

print("Predicted mpg with horsepower=98 associated with 95% prediction interval")
[1] "Predicted mpg with horsepower=98 associated with 95% prediction interval"
df_98p=data.frame(horsepower=98)
predict(model, df_98p, interval = "prediction",level=0.95)
       fit     lwr      upr
1 24.46708 14.8094 34.12476
print("Predicted mpg with horsepower=98 associated with 95% confidence interval")
[1] "Predicted mpg with horsepower=98 associated with 95% confidence interval"
df_98c=data.frame(horsepower = 98)
predict(model, df_98c, interval = "confidence",level=0.95)
       fit      lwr      upr
1 24.46708 23.97308 24.96108

1.2 b

plot(Auto$horsepower, Auto$mpg, main = "Scatterplot", xlab = "horsepower", ylab = "mpg", col = "yellow")
abline(model, col = "green")

1.2 c

par(mfrow = c(2, 2))
plot(model)

print("Residuals show how poorly the model represent data. Graph 1: The plot shows if the residuals have non linear patterns. The residuals are not spread out equally around the line, means there might be some non linear relationships. Graph 2: The residuals almost lie perfect in the straight line : Meaning the residuals are normally distributed Graph 3: To check the equal variance: for this model, the residuals are spread acroos one half of the graph. Graph 4: Has many leverage points. ")

Logistic Regression

pairs(Auto)

1.3 b

cor(subset(Auto, select=-c(name)))
                    mpg  cylinders displacement horsepower     weight acceleration       year     origin
mpg           1.0000000 -0.7776175   -0.8051269 -0.7784268 -0.8322442    0.4233285  0.5805410  0.5652088
cylinders    -0.7776175  1.0000000    0.9508233  0.8429834  0.8975273   -0.5046834 -0.3456474 -0.5689316
displacement -0.8051269  0.9508233    1.0000000  0.8972570  0.9329944   -0.5438005 -0.3698552 -0.6145351
horsepower   -0.7784268  0.8429834    0.8972570  1.0000000  0.8645377   -0.6891955 -0.4163615 -0.4551715
weight       -0.8322442  0.8975273    0.9329944  0.8645377  1.0000000   -0.4168392 -0.3091199 -0.5850054
acceleration  0.4233285 -0.5046834   -0.5438005 -0.6891955 -0.4168392    1.0000000  0.2903161  0.2127458
year          0.5805410 -0.3456474   -0.3698552 -0.4163615 -0.3091199    0.2903161  1.0000000  0.1815277
origin        0.5652088 -0.5689316   -0.6145351 -0.4551715 -0.5850054    0.2127458  0.1815277  1.0000000

2.3 c

model_2 <- lm(mpg ~ . - name, data = Auto)
summary(model_2)

Call:
lm(formula = mpg ~ . - name, data = Auto)

Residuals:
    Min      1Q  Median      3Q     Max 
-9.5903 -2.1565 -0.1169  1.8690 13.0604 

Coefficients:
               Estimate Std. Error t value Pr(>|t|)    
(Intercept)  -17.218435   4.644294  -3.707  0.00024 ***
cylinders     -0.493376   0.323282  -1.526  0.12780    
displacement   0.019896   0.007515   2.647  0.00844 ** 
horsepower    -0.016951   0.013787  -1.230  0.21963    
weight        -0.006474   0.000652  -9.929  < 2e-16 ***
acceleration   0.080576   0.098845   0.815  0.41548    
year           0.750773   0.050973  14.729  < 2e-16 ***
origin         1.426141   0.278136   5.127 4.67e-07 ***
---
Signif. codes:  0 ‘***’ 0.001 ‘**’ 0.01 ‘*’ 0.05 ‘.’ 0.1 ‘ ’ 1

Residual standard error: 3.328 on 384 degrees of freedom
Multiple R-squared:  0.8215,    Adjusted R-squared:  0.8182 
F-statistic: 252.4 on 7 and 384 DF,  p-value: < 2.2e-16
print("i The p value is less than 0.5 showing there is a relationship ")
print("ii Year,Origin,Weight and Displacement(Choosing from the summary of the model")
print("iii As year increases, the mpg increases since it has a positive correlation")

1.3 d

par(mfrow=c(2,2))
plot(model_2)

print("There is a non linearlity in the model (graph1) The residuals almost lie perfect in the straight line : Meaning the residuals are normally distributed. From Graph 4: It shows it has a leverage point.")
LS0tCnRpdGxlOiAiRXhwbG9yYXRvcnkgQW5hbHlzaXMgb24gVW5pdmVyc2l0eSBEYXRhIHNldCBhbmQgUmVncmVzc2lvbiBNb2RlbHMiCm91dHB1dDogCiAgaHRtbF9ub3RlYm9vazoKICAgIHRvYzogeWVzCiAgICB0b2NfZmxvYXQ6IHllcwphdXRob3I6IEpheWF2YXJzaGluaSBJbGFyYWphbiwgCiAgICAgICAgSWxsaW5vaXMgSW5zdGl0dXRlIG9mIFRlY2hub2xvZ3kKLS0tCgojIyMgIEV4cG9sYXJhdG9yeSBBbmFseXNpcwpgYGB7cn0KbGlicmFyeShkcGx5cikKc2V0d2QoIi9Vc2Vycy9qYXlhdmFyc2hpbmkvRGVza3RvcC9ScHVic1Byb2plY3RzLyIpCmNvbGxlZ2UuZGY8LWRhdGEuZnJhbWUocmVhZC5jc3YoIkNvbGxlZ2UuY3N2IixzZXA9IiwiLGhlYWRlcj1UKSkKaGVhZChjb2xsZWdlLmRmKQpgYGAKIyMjICAxLjEgYgpgYGB7cn0KUHJpdmF0ZV91bml2PXRhYmxlKGNvbGxlZ2UuZGYkUHJpdmF0ZSkKUHJpdmF0ZV91bml2CmBgYAojIyMgIDEuMSBjCmBgYHtyfQpwcml2YXRlX2xpc3Q8LWZpbHRlcihjb2xsZWdlLmRmLGNvbGxlZ2UuZGYkUHJpdmF0ZT09IlllcyIpCnByaXZhdGVfbGlzdApgYGAKYGBge3J9CnB1YmxpY19saXN0PC1maWx0ZXIoY29sbGVnZS5kZixjb2xsZWdlLmRmJFByaXZhdGU9PSJObyIpCnB1YmxpY19saXN0CmBgYApgYGB7cn0KaGlzdChwcml2YXRlX2xpc3QkUGhELCBtYWluID0iUGhEIGhvbGRlcnMgaW4gUHJpdmF0ZSBVbml2ZXJzaXRpZXMiLHByb2JhYmlsaXR5ID0gVFJVRSx4bGFiPSJDb3VudCBvZiBQaGQgU3R1ZGVudHMiLHlsYWI9IkRlbnNpdHkiLGJvcmRlcj0icmVkIikKbGluZXMoZGVuc2l0eShwcml2YXRlX2xpc3QkUGhEKSAsY29sPSJibHVlIikKYGBgCmBgYHtyfQpoaXN0KHB1YmxpY19saXN0JFBoRCwgbWFpbiA9IlBoRCBob2xkZXJzIGluIFB1YmxpYyBVbml2ZXJzaXRpZXMiLHByb2JhYmlsaXR5ID0gVFJVRSx4bGFiPSJDb3VudCBvZiBQaGQgU3R1ZGVudHMiLHlsYWI9IkRlbnNpdHkiLGJvcmRlcj0iYmx1ZSIpCmxpbmVzKGRlbnNpdHkocHJpdmF0ZV9saXN0JFBoRCkgLGNvbD0icmVkIikKYGBgCmBgYHtyfQpwcmludCgiQXMgd2UgY2FuIHNlZSwgcHJpdmF0ZSB1bml2ZXJzaXRpZXMgaGF2ZSBtYW55IFBoRCBmYWN1bHR5IikKYGBgCiMjIyAgMS4xIGQKYGBge3J9CkdyYWRfUmF0ZTwtZGF0YS5mcmFtZShhcnJhbmdlKGNvbGxlZ2UuZGYsY29sbGVnZS5kZiRHcmFkLlJhdGUpKQpoZWFkKHNlbGVjdChHcmFkX1JhdGUsIE5hbWUsIEdyYWQuUmF0ZSApLDUpCmBgYAojIyMgIDEuMSBlCiMjIyMgIDEuMSBlIGkKYGBge3J9CnN1bW1hcnkoY29sbGVnZS5kZikKYGBgCiMjIyMgIDEuMSBlIGlpCmBgYHtyfQpwYWlycyhjb2xsZWdlLmRmWywgMToxMF0pCmBgYAojIyMjICAxLjEgZSBpaWkKYGBge3J9CmJveHBsb3QocHJpdmF0ZV9saXN0JHBlcmMuYWx1bW5pLCBwdWJsaWNfbGlzdCRwZXJjLmFsdW1uaSxtYWluPSJBbHVtbmkgRG9uYXRpb24gdG8gdGhlaXIgY29sbGVnZXMgXG4gKFJFRDogUHJpdmF0ZSBhbmQgQkxVRTpQVUJMSUMgKSAiLCB4bGFiPSJBbHVtbmkiLCB5bGFiPSJEb25hdGlvbiIsIGNvbCA9IGMoInJlZCIsICJibHVlIikpCmBgYAojIyMjICAxLjEgZSBpdgpgYGB7cn0KYm94cGxvdChwcml2YXRlX2xpc3QkUGhEICwgcHVibGljX2xpc3QkUGhELCBtYWluPSJNb3JlIFBoRCdzIix5bGFiPSJQaEQgZW1wbG95ZWUiLCBjb2wgPSBjKCJyZWQiLCAiYmx1ZSIpKQpgYGAKCiMjIyMgIDEuMSBlIHYKYGBge3J9CkVsaXRlIDwtcmVwKCJObyIsIG5yb3coY29sbGVnZS5kZikpCkVsaXRlW2NvbGxlZ2UuZGYkVG9wMTBwZXJjID4gNTBdIDwtIlllcyIKRWxpdGU8LWFzLmZhY3RvcihFbGl0ZSkKY29sbGVnZTwtZGF0YS5mcmFtZShjb2xsZWdlLmRmLEVsaXRlKQpzdW1tYXJ5KEVsaXRlKQpgYGAKIyMjIyAgMS4xIGUgdmkKCmBgYHtyfQpwYXIobWZyb3c9YygyLDIpKQoKaGlzdChjb2xsZWdlLmRmJFRvcDEwcGVyYywgYnJlYWtzID0gMTAsIGNvbD0icmVkIixtYWluPSJUb3AgMTAlIix4bGFiPSJUb3AgMTAgUGVyY2VudCIpCmhpc3QoY29sbGVnZS5kZiRQZXJzb25hbCwgYnJlYWtzID0xNSwgY29sPSJncmVlbiIsbWFpbj0iUGVyc29uYWwgU3BlbmRpbmciLHhsYWI9IkFtb3VudCIpCmhpc3QoY29sbGVnZS5kZiRwZXJjLmFsdW1uaSwgYnJlYWtzPTIwLCBjb2w9Im9yYW5nZSIsIG1haW49IlBlcmNlbnRhZ2Ugb2YgQWx1bW5pIix4bGFiPSIlIG9mIEFsdW1uaSIpCmhpc3QoY29sbGVnZS5kZiRQaEQsIGJyZWFrcz0yNSwgY29sPSJibHVlIiwgbWFpbj0iRmFjdWx0eSB3aXRoIFBoRCIseGxhYj0iUGhEIGNvdW50IikKYGBgCiMjIyMgIDEuMSBlIHZpaQpgYGB7cn0KIyBFeHBsb3JhdG9yeSBhbmFseXNpcyA6IFRvIHNlZSB0aGUgdG90YWwgZXhwZW5zZXMgaW4gYSBwdWJsaWMgb3IgcHJpYXZ0ZSB1bml2ZXJzaXR5IApsaWJyYXJ5KGdncGxvdDIpCiNzdHIoY29sbGVnZSkKZGZfbmV3PWNvbGxlZ2UuZGZbLGMoMSwyLDEwLDExLDEyLDEzKV0KZGZfbmV3PXRyYW5zZm9ybShkZl9uZXcsVG90YWxfZXhwZW5zZT1kZl9uZXckT3V0c3RhdGUrZGZfbmV3JFJvb20uQm9hcmQrZGZfbmV3JFBlcnNvbmFsK2RmX25ldyRCb29rcykKCmRmX25ld19wcml2YXRlPWRmX25ld1tkZl9uZXckUHJpdmF0ZT09IlllcyIsXQpkZl9uZXdfcHViaWM9ZGZfbmV3W2RmX25ldyRQcml2YXRlPT0iTm8iLF0KbWVhbl90b3RfcHJpdmF0ZT1tZWFuKGRmX25ld19wcml2YXRlJFRvdGFsX2V4cGVuc2UpCnByaW50KCJUaGUgYXZlcmFnZSBhbW91bnQgc3BlbnQgYnkgYSBzdHVkZW50IGluIGEgcHJpdmF0ZSB1bml2ZXJzaXR5KEluIHRoZSBnbiBsaXN0KSIpCm1lYW5fdG90X3ByaXZhdGUKbWVhbl90b3RfcHViPW1lYW4oZGZfbmV3X3B1YmljJFRvdGFsX2V4cGVuc2UpCnByaW50KCJUaGUgYXZlcmFnZSBhbW91bnQgc3BlbnQgYnkgYSBzdHVkZW50IGluIGEgcHVibGljIHVuaXZlcnNpdHkiKQptZWFuX3RvdF9wdWIKcHJpbnQoIlByb3ZlcyBwcml2YXRlIHVuaXZlcnNpdGVzIGFyZSBjb3N0bGllciB0aGFuIHB1YmxpYyB1bml2ZXJzdGllcyIpCmBgYAojIyMgIExpbmVhciBSZWdyZXNzaW9uCiMjIyMgIDEuMiBhIGkKYGBge3J9CmxpYnJhcnkoSVNMUikKZGF0YShBdXRvKQptb2RlbDwtbG0obXBnfmhvcnNlcG93ZXIsZGF0YT1BdXRvKQpzdW1tYXJ5KG1vZGVsKQpgYGAKYGBge3J9CnByaW50KCJUaGUgcCB2YWx1ZSBpcyB0aGUgcHJvYmFiaWxpdHkgdGhhdCB0aGUgY3VycmVudCByZXN1bHQgaXMgZm91bmQuIEFzIHdlIGtub3csIGlmIHRoZSBwIHZhbHVlIGlzIGxlc3MgdGhhbiA1JSBpZS4sIDAuNSB0aGUgY29ycmVsYXRpb24gY28gZWZmaWNpZW50IGlzIHN0YXRpc2NhbGx5IHNpZ25pZmljYW50LiBIZXJlIHRoZSBwLXZhbHVlIGlzIDwgMi4yZS0xNiIpCmBgYAojIyMjICAxLjIgYSBpaQpgYGB7cn0KcHJpbnQoIlIgc3F1YXJlZCB2YWx1ZSBpcyB0aGUgbWVzYXVyZSBvZiB0aGUgdmFyaWF0aW9uIGJldHdlZW4gdGhlIGRlcGVuZGVudCB2YXJpYWJsZSBhbmQgdGhlIGluZGVwZW5kZW50IHZhcmlhYmxlIGluIHRoZSBsaW5lYXIgcmVncmVzc2lvbiBtb2RlbC4gVGhlIGFkanVzdGVkIHIgc3F1YXJlZCB2YWx1ZSBpcyAwLjYwNTkgc2hvd2luZyB0aGVyZSBpcyBhIGJpZyB2YXJpYXRpb24iKQpgYGAKIyMjIyAgMS4yIGEgaWlpCmBgYHtyfQpwcmludCgiTmVnYXRpdmUuIEhpZ2ggaG9yc2UgcG93ZXIgLT4gTGVzcyBtcGciKQpgYGAKIyMjIyAgMS4yIGEgaXYKYGBge3J9CnByaW50KCJQcmVkaWN0ZWQgbXBnIHdpdGggaG9yc2Vwb3dlcj05OCBhc3NvY2lhdGVkIHdpdGggOTUlIHByZWRpY3Rpb24gaW50ZXJ2YWwiKQpkZl85OHA9ZGF0YS5mcmFtZShob3JzZXBvd2VyPTk4KQpwcmVkaWN0KG1vZGVsLCBkZl85OHAsIGludGVydmFsID0gInByZWRpY3Rpb24iLGxldmVsPTAuOTUpCnByaW50KCJQcmVkaWN0ZWQgbXBnIHdpdGggaG9yc2Vwb3dlcj05OCBhc3NvY2lhdGVkIHdpdGggOTUlIGNvbmZpZGVuY2UgaW50ZXJ2YWwiKQpkZl85OGM9ZGF0YS5mcmFtZShob3JzZXBvd2VyID0gOTgpCnByZWRpY3QobW9kZWwsIGRmXzk4YywgaW50ZXJ2YWwgPSAiY29uZmlkZW5jZSIsbGV2ZWw9MC45NSkKCmBgYAojIyMgIDEuMiBiCmBgYHtyfQpwbG90KEF1dG8kaG9yc2Vwb3dlciwgQXV0byRtcGcsIG1haW4gPSAiU2NhdHRlcnBsb3QiLCB4bGFiID0gImhvcnNlcG93ZXIiLCB5bGFiID0gIm1wZyIsIGNvbCA9ICJ5ZWxsb3ciKQphYmxpbmUobW9kZWwsIGNvbCA9ICJncmVlbiIpCmBgYAojIyMgIDEuMiBjCmBgYHtyfQpwYXIobWZyb3cgPSBjKDIsIDIpKQpwbG90KG1vZGVsKQoKYGBgCgpgYGB7cn0KcHJpbnQoIlJlc2lkdWFscyBzaG93IGhvdyBwb29ybHkgdGhlIG1vZGVsIHJlcHJlc2VudCBkYXRhLiBHcmFwaCAxOiBUaGUgcGxvdCBzaG93cyBpZiB0aGUgcmVzaWR1YWxzIGhhdmUgbm9uIGxpbmVhciBwYXR0ZXJucy4gVGhlIHJlc2lkdWFscyBhcmUgbm90IHNwcmVhZCBvdXQgZXF1YWxseSBhcm91bmQgdGhlIGxpbmUsIG1lYW5zIHRoZXJlIG1pZ2h0IGJlIHNvbWUgbm9uIGxpbmVhciByZWxhdGlvbnNoaXBzLiBHcmFwaCAyOiBUaGUgcmVzaWR1YWxzIGFsbW9zdCBsaWUgcGVyZmVjdCBpbiB0aGUgc3RyYWlnaHQgbGluZSA6IE1lYW5pbmcgdGhlIHJlc2lkdWFscyBhcmUgbm9ybWFsbHkgZGlzdHJpYnV0ZWQgR3JhcGggMzogVG8gY2hlY2sgdGhlIGVxdWFsIHZhcmlhbmNlOiBmb3IgdGhpcyBtb2RlbCwgdGhlIHJlc2lkdWFscyBhcmUgc3ByZWFkIGFjcm9vcyBvbmUgaGFsZiBvZiB0aGUgZ3JhcGguIEdyYXBoIDQ6IEhhcyBtYW55IGxldmVyYWdlIHBvaW50cy4gIikKYGBgCiMjI0xvZ2lzdGljIFJlZ3Jlc3Npb24KYGBge3J9CnBhaXJzKEF1dG8pCmBgYAojIyMgMS4zIGIgCmBgYHtyfQpjb3Ioc3Vic2V0KEF1dG8sIHNlbGVjdD0tYyhuYW1lKSkpCmBgYAojIyMgMi4zIGMgCmBgYHtyfQptb2RlbF8yIDwtIGxtKG1wZyB+IC4gLSBuYW1lLCBkYXRhID0gQXV0bykKc3VtbWFyeShtb2RlbF8yKQpgYGAKYGBge3J9CnByaW50KCJpIFRoZSBwIHZhbHVlIGlzIGxlc3MgdGhhbiAwLjUgc2hvd2luZyB0aGVyZSBpcyBhIHJlbGF0aW9uc2hpcCAiKQpwcmludCgiaWkgWWVhcixPcmlnaW4sV2VpZ2h0IGFuZCBEaXNwbGFjZW1lbnQoQ2hvb3NpbmcgZnJvbSB0aGUgc3VtbWFyeSBvZiB0aGUgbW9kZWwiKQpwcmludCgiaWlpIEFzIHllYXIgaW5jcmVhc2VzLCB0aGUgbXBnIGluY3JlYXNlcyBzaW5jZSBpdCBoYXMgYSBwb3NpdGl2ZSBjb3JyZWxhdGlvbiIpCmBgYAojIyMgIDEuMyBkIApgYGB7cn0KcGFyKG1mcm93PWMoMiwyKSkKcGxvdChtb2RlbF8yKQpgYGAKYGBge3J9CnByaW50KCJUaGVyZSBpcyBhIG5vbiBsaW5lYXJsaXR5IGluIHRoZSBtb2RlbCAoZ3JhcGgxKSBUaGUgcmVzaWR1YWxzIGFsbW9zdCBsaWUgcGVyZmVjdCBpbiB0aGUgc3RyYWlnaHQgbGluZSA6IE1lYW5pbmcgdGhlIHJlc2lkdWFscyBhcmUgbm9ybWFsbHkgZGlzdHJpYnV0ZWQuIEZyb20gR3JhcGggNDogSXQgc2hvd3MgaXQgaGFzIGEgbGV2ZXJhZ2UgcG9pbnQuIikKYGBgCgo=