Introduction:

Below is the dataset of house prices available from Kaggle.com. The dataset has 1460 observations of houses in Ames, Iowa, and 81 variables potentially contributing to the house sale price.

Link to the data set and description:

https://www.kaggle.com/c/house-prices-advanced-regression-techniques/data

library(knitr)
library(DT)
library(MASS)
library(reshape)
library(ggplot2)

traindf <- read.csv('https://raw.githubusercontent.com/raghu74us/DATA-605/master/Final%20Project/train.csv')
  
datatable(traindf, options = list( pageLength = 5, lengthMenu = c(5, 10, 40),   initComplete = JS(
    "function(settings, json) {",
    "$(this.api().table().header()).css({'background-color': '#01975b', 'color': '#fff'});",
    "}")), rownames=TRUE)

Variables

Pick one of the quantitative independent variables from the training data set (train.csv) , and define that variable as X. Pick SalePrice as the dependent variable, and define it as Y for the next analysis.

#chosen variable
X<-traindf$OverallCond
Y<-traindf$SalePrice

plot(X,Y, col="#4caf50", main="Scatterplot of Overall Condition and Sale Price", xlab = "Overall Condition", ylab="Sale Price")
abline(lm(Y~X), col="yellow", lwd=3) # regression line (y~x) 

hist(X, col="green", main="Histogram of Overall Condition", xlab = "Overall Condition")

hist(Y, col="#80cbc4", main="Histogram of Sale Price", xlab = "Sale Price")

print("Summary of X variable: Overall Condition")
## [1] "Summary of X variable: Overall Condition"
summary(X)
##    Min. 1st Qu.  Median    Mean 3rd Qu.    Max. 
##   1.000   5.000   5.000   5.575   6.000   9.000
boxplot(X)

print("Summary of X variable: Sale Price")
## [1] "Summary of X variable: Sale Price"
summary(Y)
##    Min. 1st Qu.  Median    Mean 3rd Qu.    Max. 
##   34900  130000  163000  180900  214000  755000
boxplot(Y)

Probability

Calculate as a minimum the below probabilities a through c. Assume the small letter “x” is estimated as the 1st quartile of the X variable, and the small letter “y” is estimated as the 2nd quartile of the Y variable. Interpret the meaning of all probabilities.

  1. \[ P(X>x | Y>y) \]
XQ1<-quantile(X, probs=0.25)  #1st quartile of X variable
XQ1
## 25% 
##   5
YQ2<-quantile(Y, probs=0.50) #2nd quartile, or median, of Y variable
YQ2
##    50% 
## 163000
n<-(nrow(traindf))
n
## [1] 1460
overallcond<-as.numeric(traindf$OverallCond)
saleprice<-as.numeric(traindf$SalePrice)

nYQ2<-nrow(subset(traindf,saleprice>YQ2))
nYQ2
## [1] 728
p1<-nrow(subset(traindf, overallcond > XQ1 & saleprice>YQ2))/nYQ2
p1
## [1] 0.2431319
  1. \[ P(X>x \& Y>y) \]
p2<-nrow(subset(traindf, overallcond > XQ1 & saleprice>YQ2))/n
p2
## [1] 0.1212329
  1. \[ P(X<x | Y>y) \]
p3<-nrow(subset(traindf, overallcond < XQ1 & saleprice>YQ2))/nYQ2
p3
## [1] 0.01648352

Independence

Does splitting the training data in this fashion make them independent? In other words, does P(XY)=P(X)P(Y))? Check mathematically, and then evaluate by running a Chi Square test for association. You might have to research this.

c1<-nrow(subset(traindf, overallcond <=XQ1 & saleprice<=YQ2))/n
c2<-nrow(subset(traindf, overallcond <=XQ1 & saleprice>YQ2))/n
c4<-nrow(subset(traindf, overallcond >XQ1 & saleprice<=YQ2))/n
c5<-nrow(subset(traindf, overallcond >XQ1 & saleprice>YQ2))/n

dfcounts<-matrix(round(c(c1,c2,c4,c5),3), ncol=2, nrow=2, byrow=TRUE)
colnames(dfcounts)<-c(
"<=1 quartile",
">1 quartile")
rownames(dfcounts)<-c("<=2nd quartile",">2nd quartile")

print("Quartile Matrix Percent")
## [1] "Quartile Matrix Percent"
dfcounts<-as.table(dfcounts)
addmargins(dfcounts)
##                <=1 quartile >1 quartile   Sum
## <=2nd quartile        0.245       0.377 0.622
## >2nd quartile         0.256       0.121 0.377
## Sum                   0.501       0.498 0.999
print (paste0("p(A)*p(B)=", round(c4*c5,5)))
## [1] "p(A)*p(B)=0.03106"

\[ p(AB)=p(X>x \& Y>y)= 0.24313\]

\[ p(A)*p(B)=0.03106 \]
\[ p(AB) != p(A)*p(B)\]

chisq.test(dfcounts, correct=TRUE) 
## Warning in chisq.test(dfcounts, correct = TRUE): Chi-squared approximation
## may be incorrect
## 
##  Pearson's Chi-squared test with Yates' continuity correction
## 
## data:  dfcounts
## X-squared = 1.4013e-32, df = 1, p-value = 1

Statistics

Descriptive and Inferential Statistics. Provide univariate descriptive statistics and appropriate plots for both variables. Provide a scatterplot of X and Y. Transform both variables simultaneously using Box-Cox transformations. You might have to research this.

summary(traindf)
##        Id           MSSubClass       MSZoning     LotFrontage    
##  Min.   :   1.0   Min.   : 20.0   C (all):  10   Min.   : 21.00  
##  1st Qu.: 365.8   1st Qu.: 20.0   FV     :  65   1st Qu.: 59.00  
##  Median : 730.5   Median : 50.0   RH     :  16   Median : 69.00  
##  Mean   : 730.5   Mean   : 56.9   RL     :1151   Mean   : 70.05  
##  3rd Qu.:1095.2   3rd Qu.: 70.0   RM     : 218   3rd Qu.: 80.00  
##  Max.   :1460.0   Max.   :190.0                  Max.   :313.00  
##                                                  NA's   :259     
##     LotArea        Street      Alley      LotShape  LandContour
##  Min.   :  1300   Grvl:   6   Grvl:  50   IR1:484   Bnk:  63   
##  1st Qu.:  7554   Pave:1454   Pave:  41   IR2: 41   HLS:  50   
##  Median :  9478               NA's:1369   IR3: 10   Low:  36   
##  Mean   : 10517                           Reg:925   Lvl:1311   
##  3rd Qu.: 11602                                                
##  Max.   :215245                                                
##                                                                
##   Utilities      LotConfig    LandSlope   Neighborhood   Condition1  
##  AllPub:1459   Corner : 263   Gtl:1382   NAmes  :225   Norm   :1260  
##  NoSeWa:   1   CulDSac:  94   Mod:  65   CollgCr:150   Feedr  :  81  
##                FR2    :  47   Sev:  13   OldTown:113   Artery :  48  
##                FR3    :   4              Edwards:100   RRAn   :  26  
##                Inside :1052              Somerst: 86   PosN   :  19  
##                                          Gilbert: 79   RRAe   :  11  
##                                          (Other):707   (Other):  15  
##    Condition2     BldgType      HouseStyle   OverallQual    
##  Norm   :1445   1Fam  :1220   1Story :726   Min.   : 1.000  
##  Feedr  :   6   2fmCon:  31   2Story :445   1st Qu.: 5.000  
##  Artery :   2   Duplex:  52   1.5Fin :154   Median : 6.000  
##  PosN   :   2   Twnhs :  43   SLvl   : 65   Mean   : 6.099  
##  RRNn   :   2   TwnhsE: 114   SFoyer : 37   3rd Qu.: 7.000  
##  PosA   :   1                 1.5Unf : 14   Max.   :10.000  
##  (Other):   2                 (Other): 19                   
##   OverallCond      YearBuilt     YearRemodAdd    RoofStyle   
##  Min.   :1.000   Min.   :1872   Min.   :1950   Flat   :  13  
##  1st Qu.:5.000   1st Qu.:1954   1st Qu.:1967   Gable  :1141  
##  Median :5.000   Median :1973   Median :1994   Gambrel:  11  
##  Mean   :5.575   Mean   :1971   Mean   :1985   Hip    : 286  
##  3rd Qu.:6.000   3rd Qu.:2000   3rd Qu.:2004   Mansard:   7  
##  Max.   :9.000   Max.   :2010   Max.   :2010   Shed   :   2  
##                                                              
##     RoofMatl     Exterior1st   Exterior2nd    MasVnrType    MasVnrArea    
##  CompShg:1434   VinylSd:515   VinylSd:504   BrkCmn : 15   Min.   :   0.0  
##  Tar&Grv:  11   HdBoard:222   MetalSd:214   BrkFace:445   1st Qu.:   0.0  
##  WdShngl:   6   MetalSd:220   HdBoard:207   None   :864   Median :   0.0  
##  WdShake:   5   Wd Sdng:206   Wd Sdng:197   Stone  :128   Mean   : 103.7  
##  ClyTile:   1   Plywood:108   Plywood:142   NA's   :  8   3rd Qu.: 166.0  
##  Membran:   1   CemntBd: 61   CmentBd: 60                 Max.   :1600.0  
##  (Other):   2   (Other):128   (Other):136                 NA's   :8       
##  ExterQual ExterCond  Foundation  BsmtQual   BsmtCond    BsmtExposure
##  Ex: 52    Ex:   3   BrkTil:146   Ex  :121   Fa  :  45   Av  :221    
##  Fa: 14    Fa:  28   CBlock:634   Fa  : 35   Gd  :  65   Gd  :134    
##  Gd:488    Gd: 146   PConc :647   Gd  :618   Po  :   2   Mn  :114    
##  TA:906    Po:   1   Slab  : 24   TA  :649   TA  :1311   No  :953    
##            TA:1282   Stone :  6   NA's: 37   NA's:  37   NA's: 38    
##                      Wood  :  3                                      
##                                                                      
##  BsmtFinType1   BsmtFinSF1     BsmtFinType2   BsmtFinSF2     
##  ALQ :220     Min.   :   0.0   ALQ :  19    Min.   :   0.00  
##  BLQ :148     1st Qu.:   0.0   BLQ :  33    1st Qu.:   0.00  
##  GLQ :418     Median : 383.5   GLQ :  14    Median :   0.00  
##  LwQ : 74     Mean   : 443.6   LwQ :  46    Mean   :  46.55  
##  Rec :133     3rd Qu.: 712.2   Rec :  54    3rd Qu.:   0.00  
##  Unf :430     Max.   :5644.0   Unf :1256    Max.   :1474.00  
##  NA's: 37                      NA's:  38                     
##    BsmtUnfSF       TotalBsmtSF      Heating     HeatingQC CentralAir
##  Min.   :   0.0   Min.   :   0.0   Floor:   1   Ex:741    N:  95    
##  1st Qu.: 223.0   1st Qu.: 795.8   GasA :1428   Fa: 49    Y:1365    
##  Median : 477.5   Median : 991.5   GasW :  18   Gd:241              
##  Mean   : 567.2   Mean   :1057.4   Grav :   7   Po:  1              
##  3rd Qu.: 808.0   3rd Qu.:1298.2   OthW :   2   TA:428              
##  Max.   :2336.0   Max.   :6110.0   Wall :   4                       
##                                                                     
##  Electrical     X1stFlrSF      X2ndFlrSF     LowQualFinSF    
##  FuseA:  94   Min.   : 334   Min.   :   0   Min.   :  0.000  
##  FuseF:  27   1st Qu.: 882   1st Qu.:   0   1st Qu.:  0.000  
##  FuseP:   3   Median :1087   Median :   0   Median :  0.000  
##  Mix  :   1   Mean   :1163   Mean   : 347   Mean   :  5.845  
##  SBrkr:1334   3rd Qu.:1391   3rd Qu.: 728   3rd Qu.:  0.000  
##  NA's :   1   Max.   :4692   Max.   :2065   Max.   :572.000  
##                                                              
##    GrLivArea     BsmtFullBath     BsmtHalfBath        FullBath    
##  Min.   : 334   Min.   :0.0000   Min.   :0.00000   Min.   :0.000  
##  1st Qu.:1130   1st Qu.:0.0000   1st Qu.:0.00000   1st Qu.:1.000  
##  Median :1464   Median :0.0000   Median :0.00000   Median :2.000  
##  Mean   :1515   Mean   :0.4253   Mean   :0.05753   Mean   :1.565  
##  3rd Qu.:1777   3rd Qu.:1.0000   3rd Qu.:0.00000   3rd Qu.:2.000  
##  Max.   :5642   Max.   :3.0000   Max.   :2.00000   Max.   :3.000  
##                                                                   
##     HalfBath       BedroomAbvGr    KitchenAbvGr   KitchenQual
##  Min.   :0.0000   Min.   :0.000   Min.   :0.000   Ex:100     
##  1st Qu.:0.0000   1st Qu.:2.000   1st Qu.:1.000   Fa: 39     
##  Median :0.0000   Median :3.000   Median :1.000   Gd:586     
##  Mean   :0.3829   Mean   :2.866   Mean   :1.047   TA:735     
##  3rd Qu.:1.0000   3rd Qu.:3.000   3rd Qu.:1.000              
##  Max.   :2.0000   Max.   :8.000   Max.   :3.000              
##                                                              
##   TotRmsAbvGrd    Functional    Fireplaces    FireplaceQu   GarageType 
##  Min.   : 2.000   Maj1:  14   Min.   :0.000   Ex  : 24    2Types :  6  
##  1st Qu.: 5.000   Maj2:   5   1st Qu.:0.000   Fa  : 33    Attchd :870  
##  Median : 6.000   Min1:  31   Median :1.000   Gd  :380    Basment: 19  
##  Mean   : 6.518   Min2:  34   Mean   :0.613   Po  : 20    BuiltIn: 88  
##  3rd Qu.: 7.000   Mod :  15   3rd Qu.:1.000   TA  :313    CarPort:  9  
##  Max.   :14.000   Sev :   1   Max.   :3.000   NA's:690    Detchd :387  
##                   Typ :1360                               NA's   : 81  
##   GarageYrBlt   GarageFinish   GarageCars      GarageArea     GarageQual 
##  Min.   :1900   Fin :352     Min.   :0.000   Min.   :   0.0   Ex  :   3  
##  1st Qu.:1961   RFn :422     1st Qu.:1.000   1st Qu.: 334.5   Fa  :  48  
##  Median :1980   Unf :605     Median :2.000   Median : 480.0   Gd  :  14  
##  Mean   :1979   NA's: 81     Mean   :1.767   Mean   : 473.0   Po  :   3  
##  3rd Qu.:2002                3rd Qu.:2.000   3rd Qu.: 576.0   TA  :1311  
##  Max.   :2010                Max.   :4.000   Max.   :1418.0   NA's:  81  
##  NA's   :81                                                              
##  GarageCond  PavedDrive   WoodDeckSF      OpenPorchSF     EnclosedPorch   
##  Ex  :   2   N:  90     Min.   :  0.00   Min.   :  0.00   Min.   :  0.00  
##  Fa  :  35   P:  30     1st Qu.:  0.00   1st Qu.:  0.00   1st Qu.:  0.00  
##  Gd  :   9   Y:1340     Median :  0.00   Median : 25.00   Median :  0.00  
##  Po  :   7              Mean   : 94.24   Mean   : 46.66   Mean   : 21.95  
##  TA  :1326              3rd Qu.:168.00   3rd Qu.: 68.00   3rd Qu.:  0.00  
##  NA's:  81              Max.   :857.00   Max.   :547.00   Max.   :552.00  
##                                                                           
##    X3SsnPorch      ScreenPorch        PoolArea        PoolQC    
##  Min.   :  0.00   Min.   :  0.00   Min.   :  0.000   Ex  :   2  
##  1st Qu.:  0.00   1st Qu.:  0.00   1st Qu.:  0.000   Fa  :   2  
##  Median :  0.00   Median :  0.00   Median :  0.000   Gd  :   3  
##  Mean   :  3.41   Mean   : 15.06   Mean   :  2.759   NA's:1453  
##  3rd Qu.:  0.00   3rd Qu.:  0.00   3rd Qu.:  0.000              
##  Max.   :508.00   Max.   :480.00   Max.   :738.000              
##                                                                 
##    Fence      MiscFeature    MiscVal             MoSold      
##  GdPrv:  59   Gar2:   2   Min.   :    0.00   Min.   : 1.000  
##  GdWo :  54   Othr:   2   1st Qu.:    0.00   1st Qu.: 5.000  
##  MnPrv: 157   Shed:  49   Median :    0.00   Median : 6.000  
##  MnWw :  11   TenC:   1   Mean   :   43.49   Mean   : 6.322  
##  NA's :1179   NA's:1406   3rd Qu.:    0.00   3rd Qu.: 8.000  
##                           Max.   :15500.00   Max.   :12.000  
##                                                              
##      YrSold        SaleType    SaleCondition    SalePrice     
##  Min.   :2006   WD     :1267   Abnorml: 101   Min.   : 34900  
##  1st Qu.:2007   New    : 122   AdjLand:   4   1st Qu.:129975  
##  Median :2008   COD    :  43   Alloca :  12   Median :163000  
##  Mean   :2008   ConLD  :   9   Family :  20   Mean   :180921  
##  3rd Qu.:2009   ConLI  :   5   Normal :1198   3rd Qu.:214000  
##  Max.   :2010   ConLw  :   5   Partial: 125   Max.   :755000  
##                 (Other):   9

Confidence interval

t.test(traindf$OverallCond, traindf$SalePrice)
## 
##  Welch Two Sample t-test
## 
## data:  traindf$OverallCond and traindf$SalePrice
## t = -87.016, df = 1459, p-value < 2.2e-16
## alternative hypothesis: true difference in means is not equal to 0
## 95 percent confidence interval:
##  -184994.0 -176837.3
## sample estimates:
##    mean of x    mean of y 
## 5.575342e+00 1.809212e+05

Linear Algebra and Correlation.

Using at least three untransformed variables, build a correlation matrix. Invert your correlation matrix. (This is known as the precision matrix and contains variance inflation factors on the diagonal.) Multiply the correlation matrix by the precision matrix, and then multiply the precision matrix by the correlation matrix.

myvars<-data.frame(traindf$OverallCond, traindf$YearBuilt,traindf$SalePrice)
head(myvars) 
##   traindf.OverallCond traindf.YearBuilt traindf.SalePrice
## 1                   5              2003            208500
## 2                   8              1976            181500
## 3                   5              2001            223500
## 4                   5              1915            140000
## 5                   5              2000            250000
## 6                   5              1993            143000
cor(myvars)
##                     traindf.OverallCond traindf.YearBuilt
## traindf.OverallCond          1.00000000        -0.3759832
## traindf.YearBuilt           -0.37598320         1.0000000
## traindf.SalePrice           -0.07785589         0.5228973
##                     traindf.SalePrice
## traindf.OverallCond       -0.07785589
## traindf.YearBuilt          0.52289733
## traindf.SalePrice          1.00000000
cor.test(traindf$OverallCond + traindf$YearBuilt, traindf$SalePrice, conf.level = 0.99)
## 
##  Pearson's product-moment correlation
## 
## data:  traindf$OverallCond + traindf$YearBuilt and traindf$SalePrice
## t = 23.679, df = 1458, p-value < 2.2e-16
## alternative hypothesis: true correlation is not equal to 0
## 99 percent confidence interval:
##  0.4765613 0.5740143
## sample estimates:
##       cor 
## 0.5270181
t.test(traindf$OverallCond + traindf$YearBuilt, traindf$SalePrice, conf.level = 0.99)
## 
##  Welch Two Sample t-test
## 
## data:  traindf$OverallCond + traindf$YearBuilt and traindf$SalePrice
## t = -86.068, df = 1459, p-value < 2.2e-16
## alternative hypothesis: true difference in means is not equal to 0
## 99 percent confidence interval:
##  -184306.8 -173581.9
## sample estimates:
##  mean of x  mean of y 
##   1976.843 180921.196
mymx<-as.matrix(cor(myvars))
#correlation matrix
mymx
##                     traindf.OverallCond traindf.YearBuilt
## traindf.OverallCond          1.00000000        -0.3759832
## traindf.YearBuilt           -0.37598320         1.0000000
## traindf.SalePrice           -0.07785589         0.5228973
##                     traindf.SalePrice
## traindf.OverallCond       -0.07785589
## traindf.YearBuilt          0.52289733
## traindf.SalePrice          1.00000000
#inverse of correlation matrix, precision matrix
ginvmymx<-ginv(mymx)
ginvmymx
##            [,1]       [,2]       [,3]
## [1,]  1.1915682  0.5498376 -0.1947380
## [2,]  0.5498376  1.6300312 -0.8095309
## [3,] -0.1947380 -0.8095309  1.4081400
#corr mat * precision mat
mymxginv<-mymx%*%ginvmymx
round(mymxginv,2)
##                     [,1] [,2] [,3]
## traindf.OverallCond    1    0    0
## traindf.YearBuilt      0    1    0
## traindf.SalePrice      0    0    1
#precision mat x corr mat
ginvmymx<-ginvmymx%*%mymx
round(ginvmymx,2)
##      traindf.OverallCond traindf.YearBuilt traindf.SalePrice
## [1,]                   1                 0                 0
## [2,]                   0                 1                 0
## [3,]                   0                 0                 1

With a 99 percent confidence level, the correlation between Overall condition plus Year Built and Sale Price is estimated to be between 0.47 and 0.57.

Principal Components Analysis

correlation matrix for all quantitative variables

#Correlation matrix of all quantitative variables in dataframe

cormatrix<-cor(traindf[,sapply(traindf, is.numeric)])
cordf<-as.data.frame(cormatrix)
kable(head(cordf))
Id MSSubClass LotFrontage LotArea OverallQual OverallCond YearBuilt YearRemodAdd MasVnrArea BsmtFinSF1 BsmtFinSF2 BsmtUnfSF TotalBsmtSF X1stFlrSF X2ndFlrSF LowQualFinSF GrLivArea BsmtFullBath BsmtHalfBath FullBath HalfBath BedroomAbvGr KitchenAbvGr TotRmsAbvGrd Fireplaces GarageYrBlt GarageCars GarageArea WoodDeckSF OpenPorchSF EnclosedPorch X3SsnPorch ScreenPorch PoolArea MiscVal MoSold YrSold SalePrice
Id 1.0000000 0.0111565 NA -0.0332255 -0.0283648 0.0126089 -0.0127127 -0.0219976 NA -0.0050240 -0.0059677 -0.0079397 -0.0154146 0.0104960 0.0055898 -0.0442300 0.0082728 0.0022886 -0.0201547 0.0055875 0.0067838 0.0377186 0.0029512 0.0272387 -0.0197716 NA 0.0165697 0.0176338 -0.0296432 -0.0004769 0.0028892 -0.0466348 0.0013302 0.0570439 -0.0062424 0.0211722 0.0007118 -0.0219167
MSSubClass 0.0111565 1.0000000 NA -0.1397811 0.0326277 -0.0593158 0.0278501 0.0405810 NA -0.0698357 -0.0656486 -0.1407595 -0.2385184 -0.2517584 0.3078857 0.0464738 0.0748532 0.0034910 -0.0023325 0.1316082 0.1773544 -0.0234380 0.2817210 0.0403801 -0.0455693 NA -0.0401098 -0.0986715 -0.0125794 -0.0061001 -0.0120366 -0.0438245 -0.0260302 0.0082827 -0.0076833 -0.0135846 -0.0214070 -0.0842841
LotFrontage NA NA 1 NA NA NA NA NA NA NA NA NA NA NA NA NA NA NA NA NA NA NA NA NA NA NA NA NA NA NA NA NA NA NA NA NA NA NA
LotArea -0.0332255 -0.1397811 NA 1.0000000 0.1058057 -0.0056363 0.0142277 0.0137884 NA 0.2141031 0.1111697 -0.0026184 0.2608331 0.2994746 0.0509859 0.0047790 0.2631162 0.1581545 0.0480456 0.1260306 0.0142595 0.1196899 -0.0177839 0.1900148 0.2713640 NA 0.1548707 0.1804028 0.1716977 0.0847738 -0.0183397 0.0204228 0.0431604 0.0776724 0.0380677 0.0012050 -0.0142614 0.2638434
OverallQual -0.0283648 0.0326277 NA 0.1058057 1.0000000 -0.0919323 0.5723228 0.5506839 NA 0.2396660 -0.0591187 0.3081589 0.5378085 0.4762238 0.2954929 -0.0304293 0.5930074 0.1110978 -0.0401502 0.5505997 0.2734581 0.1016764 -0.1838822 0.4274523 0.3967650 NA 0.6006707 0.5620218 0.2389234 0.3088188 -0.1139369 0.0303706 0.0648864 0.0651658 -0.0314062 0.0708152 -0.0273467 0.7909816
OverallCond 0.0126089 -0.0593158 NA -0.0056363 -0.0919323 1.0000000 -0.3759832 0.0737415 NA -0.0462309 0.0402292 -0.1368406 -0.1710975 -0.1442028 0.0289421 0.0254943 -0.0796859 -0.0549415 0.1178209 -0.1941495 -0.0607693 0.0129801 -0.0870009 -0.0575832 -0.0238200 NA -0.1857575 -0.1515214 -0.0033337 -0.0325888 0.0703562 0.0255037 0.0548105 -0.0019849 0.0687768 -0.0035108 0.0439497 -0.0778559

correlation

cordf[cordf == 1] <- NA #drop correlation of 1, diagonals
cordf[abs(cordf) < 0.1] <- NA # drop correlations of less than 0.1
cordf<-as.matrix(cordf)
#cordf
cordf2<- na.omit(melt(cordf)) 
kable(head(cordf2[order(-abs(cordf2$value)),]))
X1 X2 value
1016 GarageArea GarageCars 0.8824754
1053 GarageCars GarageArea 0.8824754
632 TotRmsAbvGrd GrLivArea 0.8254894
891 GrLivArea TotRmsAbvGrd 0.8254894
470 X1stFlrSF TotalBsmtSF 0.8195300
507 TotalBsmtSF X1stFlrSF 0.8195300

All variables with correlation to Sale Price

#test of alternate corr approach
#myvars<-data.frame(traindf$OverallCond, traindf$SalePrice)
#head(myvars)

topcors <- cordf2[ which(cordf2$X2=='SalePrice'),]

topcorsdf<-topcors[order(-abs(topcors$value)),]# sort by highest correlations
#topcorsdf

cors1<-data.frame(topcorsdf$X1,topcorsdf$X2,topcorsdf$value)
kable(cors1)
topcorsdf.X1 topcorsdf.X2 topcorsdf.value
OverallQual SalePrice 0.7909816
GrLivArea SalePrice 0.7086245
GarageCars SalePrice 0.6404092
GarageArea SalePrice 0.6234314
TotalBsmtSF SalePrice 0.6135806
X1stFlrSF SalePrice 0.6058522
FullBath SalePrice 0.5606638
TotRmsAbvGrd SalePrice 0.5337232
YearBuilt SalePrice 0.5228973
YearRemodAdd SalePrice 0.5071010
Fireplaces SalePrice 0.4669288
BsmtFinSF1 SalePrice 0.3864198
WoodDeckSF SalePrice 0.3244134
X2ndFlrSF SalePrice 0.3193338
OpenPorchSF SalePrice 0.3158562
HalfBath SalePrice 0.2841077
LotArea SalePrice 0.2638434
BsmtFullBath SalePrice 0.2271222
BsmtUnfSF SalePrice 0.2144791
BedroomAbvGr SalePrice 0.1682132
KitchenAbvGr SalePrice -0.1359074
EnclosedPorch SalePrice -0.1285780
ScreenPorch SalePrice 0.1114466
par(mar=c(8,8,1,1))
barplot(topcorsdf$value, ylab="Correlation to Sale Price", ylim=c(0,1), col=rainbow(20), las=2, names.arg=topcorsdf$X1)

Sampling

Calculus-Based Probability & Statistics. Many times, it makes sense to fit a closed form distribution to data. For your non-transformed independent variable, location shift (if necessary) it so that the minimum value is above zero. Then load the MASS package and run fitdistr to fit a density function of your choice. (See https://stat.ethz.ch/R-manual/R-devel/library/MASS/html/fitdistr.html ). Find the optimal value of the parameters for this distribution, and then take 1000 samples from this distribution (e.g., rexp(1000, ???) for an exponential). Plot a histogram and compare it with a histogram of your non-transformed original variable.

Minimum value is above 0.

#check that min val is not 0
min(traindf$OverallCond)
## [1] 1
fit <- fitdistr(traindf$OverallCond, "exponential")
lambda <- fit$estimate
sampledf <- rexp(1000, lambda)
## Warning in rexp(1000, lambda): '.Random.seed' is not an integer vector but
## of type 'NULL', so ignored
lambda
##      rate 
## 0.1793612
sampledf<-data.frame(as.numeric(sampledf))
colnames(sampledf)[1] <- "sample"
str(sampledf)
## 'data.frame':    1000 obs. of  1 variable:
##  $ sample: num  7.64 3.91 15.48 14.6 6.62 ...
head(sampledf)
##       sample
## 1  7.6350605
## 2  3.9140303
## 3 15.4835311
## 4 14.5987077
## 5  6.6190968
## 6  0.1875989
hist(sampledf$sample, col="green", main="Histogram of Exponential Distribution", xlab = "Overall Condition", breaks=30)

Modeling

Build some type of regression model and submit your model to the competition board. Provide your complete model summary and results with analysis. Report your Kaggle.com user name and score.

Multiple Linear Regression

fit <- lm(traindf$SalePrice ~ traindf$OverallQual + traindf$GrLivArea + traindf$GarageCars + traindf$GarageArea, data=traindf)
summary(fit)
## 
## Call:
## lm(formula = traindf$SalePrice ~ traindf$OverallQual + traindf$GrLivArea + 
##     traindf$GarageCars + traindf$GarageArea, data = traindf)
## 
## Residuals:
##     Min      1Q  Median      3Q     Max 
## -372594  -21236   -1594   18625  301129 
## 
## Coefficients:
##                       Estimate Std. Error t value Pr(>|t|)    
## (Intercept)         -98436.050   4820.467 -20.420  < 2e-16 ***
## traindf$OverallQual  26988.854   1067.393  25.285  < 2e-16 ***
## traindf$GrLivArea       49.573      2.555  19.402  < 2e-16 ***
## traindf$GarageCars   11317.522   3126.297   3.620 0.000305 ***
## traindf$GarageArea      41.478     10.627   3.903 9.93e-05 ***
## ---
## Signif. codes:  0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
## 
## Residual standard error: 40420 on 1455 degrees of freedom
## Multiple R-squared:  0.7418, Adjusted R-squared:  0.7411 
## F-statistic:  1045 on 4 and 1455 DF,  p-value: < 2.2e-16

\[ SalePrice=26988.854*OverallQual + 49.573*GrLivArea + 11317.522*GarageCars + 41.478*GarageArea - 98436.050 \]

par(mfrow=c(2,2))
X1<-traindf$OverallQual
X2<-traindf$GrLivArea
X3<-traindf$GarageCars
X4<-traindf$GarageArea
Y<-traindf$SalePrice

plot(X1,Y, col="#f06293", main="OverallQual", ylab="Sale Price")
abline(lm(Y~X1), col="green", lwd=3) # regression line (y~x)

plot(X2,Y, col="#9c27b1", main="GrLivArea", ylab="Sale Price")
abline(lm(Y~X2), col="green", lwd=3) # regression line (y~x)

plot(X3,Y, col="#ce93d7", main="GarageCars", ylab="Sale Price")
abline(lm(Y~X3), col="green", lwd=3) # regression line (y~x)

plot(X4,Y, col="#c2185c", main="GarageArea", ylab="Sale Price")
abline(lm(Y~X4), col="green", lwd=3) # regression line (y~x)

Load test data set and create calculated column using equation for multiple linear regression. Select required columns and export to csv for contest entry.

dftest <- read.csv('https://raw.githubusercontent.com/raghu74us/DATA-605/master/Final%20Project/test.csv')

SalePrice<-((26988.854*dftest$OverallQual) + (49.573*dftest$GrLivArea) +  (11317.522*dftest$GarageCars) + (41.478*dftest$GarageArea) -98436.050)


dftest<-dftest[,c("Id","OverallQual","GrLivArea","GarageCars","GarageArea")]

kable(head(dftest))
Id OverallQual GrLivArea GarageCars GarageArea
1461 5 896 1 730
1462 6 1329 1 312
1463 5 1629 2 482
1464 6 1604 2 470
1465 8 1280 2 506
1466 6 1655 2 440
submission <- cbind(dftest$Id,SalePrice)
colnames(submission)[1] <- "Id"
submission[submission<0] <- median(SalePrice)
submission<-as.data.frame(submission[1:1459,])
submission[is.na(submission)] <- 0
head(submission)
##     Id SalePrice
## 1 1461  122522.1
## 2 1462  153638.2
## 3 1463  159890.1
## 4 1464  185141.9
## 5 1465  224551.1
## 6 1466  186425.8

Upload csv to Kaggle.

kable(head(submission))
Id SalePrice
1461 122522.1
1462 153638.2
1463 159890.1
1464 185141.9
1465 224551.1
1466 186425.8
write.csv(submission, file = 'C:/cuny/Fall_2017/DATA-605/Final Project/submission1.csv', quote=FALSE, row.names=FALSE)

#write.csv(submission, file = 'C:/submission1.csv', quote=FALSE, row.names=FALSE)
#knitr::include_graphics('C:/cuny/Fall_2017/DATA-605/Final Project/score.PNG')
knitr::include_graphics('https://raw.githubusercontent.com/raghu74us/DATA-605/master/Final%20Project/score.PNG')

Analysis:

Based on the correlation of attributes and sale price, I chose the top four attributes with high correlation. I also used only the linear regression modal to predict the score. With this modal and parameter, I got a kaggle score of 0.78.  Residuals is high based on the linear modal regression. For these 4 attributes, scatter plot shows its linear and normally distributed. Inititally i did a comparison of "overall condition" and Saleprice but the "Overall Condition" did not show up in the correlation. The probability test proves P(A???B) <> P(A)P(B) that the variables are dependent.  The correlation test of OverallCond + Yearbuilt vs Saleprice gave a confidence interval of 0.47 and 0.57. 
Inorder to improve the score, I would have to try with different regression modals and also the top 10 attributes with high correlation.