#1) Develop the logistic regression model
#### using x1 = Wet performance rating
#### and x2 = Noise performance rating to
#### y = Purchase. Write out the
#### estimated logistic regression equation.


#2)Are the independent variables significant
### based on the z-test? (Use alpha = 0.05)


#3) Assess the overall model significance 
###using the: McFadden R-Squared


#4) Use the estimated logit to compute 
### an estimate of the probability that
### a customer will probably or definitely purchase
### a particular tire again with a Wet performance rating of 8 
### and a Noise performance rating of 8.
#Install/ load packages 

install.packages("Hmisc")
install.packages('pscl')
library(readxl)
## Warning: package 'readxl' was built under R version 4.3.3
library(Hmisc)
## Warning: package 'Hmisc' was built under R version 4.3.3
## 
## Attaching package: 'Hmisc'
## The following objects are masked from 'package:base':
## 
##     format.pval, units
library(pscl)
## Warning: package 'pscl' was built under R version 4.3.3
## Classes and Methods for R originally developed in the
## Political Science Computational Laboratory
## Department of Political Science
## Stanford University (2002-2015),
## by and under the direction of Simon Jackman.
## hurdle and zeroinfl functions by Achim Zeileis.
library(pROC)
## Warning: package 'pROC' was built under R version 4.3.3
## Type 'citation("pROC")' for a citation.
## 
## Attaching package: 'pROC'
## The following objects are masked from 'package:stats':
## 
##     cov, smooth, var
##Load data
tired_af <- read_excel(file.choose())

tiredf = subset(tired_af, select = -c(Tire))
#Scrubbing
head(tired_af)
## # A tibble: 6 × 5
##   Tire                                     Wet Noise Buy_Again Purchase
##   <chr>                                  <dbl> <dbl>     <dbl>    <dbl>
## 1 BFGoodrich g-Force Super Sport A/S       8     7.2       6.1        0
## 2 BFGoodrich g-Force Super Sport A/S H&V   8     7.2       6.6        1
## 3 BFGoodrich g-Force T/A KDWS              7.6   7.5       6.9        1
## 4 Bridgestone B381                         6.6   5.4       6.6        0
## 5 Bridgestone Insignia SE200               5.8   6.3       4          0
## 6 Bridgestone Insignia SE200-02            6.3   5.7       4.5        0
summary(tired_af)
##      Tire                Wet            Noise         Buy_Again    
##  Length:68          Min.   :4.300   Min.   :3.600   Min.   :1.400  
##  Class :character   1st Qu.:6.450   1st Qu.:6.000   1st Qu.:3.850  
##  Mode  :character   Median :7.750   Median :7.100   Median :6.150  
##                     Mean   :7.315   Mean   :6.903   Mean   :5.657  
##                     3rd Qu.:8.225   3rd Qu.:7.925   3rd Qu.:7.400  
##                     Max.   :9.200   Max.   :8.900   Max.   :8.900  
##     Purchase     
##  Min.   :0.0000  
##  1st Qu.:0.0000  
##  Median :0.0000  
##  Mean   :0.4412  
##  3rd Qu.:1.0000  
##  Max.   :1.0000
head(tiredf)
## # A tibble: 6 × 4
##     Wet Noise Buy_Again Purchase
##   <dbl> <dbl>     <dbl>    <dbl>
## 1   8     7.2       6.1        0
## 2   8     7.2       6.6        1
## 3   7.6   7.5       6.9        1
## 4   6.6   5.4       6.6        0
## 5   5.8   6.3       4          0
## 6   6.3   5.7       4.5        0
summary(tiredf)
##       Wet            Noise         Buy_Again        Purchase     
##  Min.   :4.300   Min.   :3.600   Min.   :1.400   Min.   :0.0000  
##  1st Qu.:6.450   1st Qu.:6.000   1st Qu.:3.850   1st Qu.:0.0000  
##  Median :7.750   Median :7.100   Median :6.150   Median :0.0000  
##  Mean   :7.315   Mean   :6.903   Mean   :5.657   Mean   :0.4412  
##  3rd Qu.:8.225   3rd Qu.:7.925   3rd Qu.:7.400   3rd Qu.:1.0000  
##  Max.   :9.200   Max.   :8.900   Max.   :8.900   Max.   :1.0000
#Correlation plot
corr <- rcorr(as.matrix(tiredf))
corr
##            Wet Noise Buy_Again Purchase
## Wet       1.00  0.76      0.91     0.74
## Noise     0.76  1.00      0.83     0.72
## Buy_Again 0.91  0.83      1.00     0.83
## Purchase  0.74  0.72      0.83     1.00
## 
## n= 68 
## 
## 
## P
##           Wet Noise Buy_Again Purchase
## Wet            0     0         0      
## Noise      0         0         0      
## Buy_Again  0   0               0      
## Purchase   0   0     0
#Logistic model
model <- glm(Purchase ~ Wet + Noise , data = tiredf, family = binomial)
summary(model)
## 
## Call:
## glm(formula = Purchase ~ Wet + Noise, family = binomial, data = tiredf)
## 
## Coefficients:
##             Estimate Std. Error z value Pr(>|z|)   
## (Intercept) -39.4982    12.4779  -3.165  0.00155 **
## Wet           3.3745     1.2641   2.670  0.00760 **
## Noise         1.8163     0.8312   2.185  0.02887 * 
## ---
## Signif. codes:  0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
## 
## (Dispersion parameter for binomial family taken to be 1)
## 
##     Null deviance: 93.325  on 67  degrees of freedom
## Residual deviance: 27.530  on 65  degrees of freedom
## AIC: 33.53
## 
## Number of Fisher Scoring iterations: 8
print(model)
## 
## Call:  glm(formula = Purchase ~ Wet + Noise, family = binomial, data = tiredf)
## 
## Coefficients:
## (Intercept)          Wet        Noise  
##     -39.498        3.374        1.816  
## 
## Degrees of Freedom: 67 Total (i.e. Null);  65 Residual
## Null Deviance:       93.32 
## Residual Deviance: 27.53     AIC: 33.53
McFadden
pR2(model)
## fitting null model for pseudo-r2
##         llh     llhNull          G2    McFadden        r2ML        r2CU 
## -13.7649516 -46.6623284  65.7947536   0.7050093   0.6199946   0.8305269
#ROC(forfun)
roc_curve = roc(tiredf$Purchase, fitted(model))
## Setting levels: control = 0, case = 1
## Setting direction: controls < cases
plot(roc_curve)

auc(roc_curve)
## Area under the curve: 0.9741
#Predicttion with 8 rating on noise and wet
new_data1 <- data.frame(Wet = 8, Noise = 8, Buy_Again=1)
prob1<- predict(model, newdata= new_data1, type="response") 
prob1
##         1 
## 0.8836964
#prediction with 7 rating on noise and wet
new_data2 <- data.frame(Wet = 7, Noise = 7, Buy_Again=1)
prob2<- predict(model, newdata= new_data2, type="response") #Probability that customer will purchase the tire again
prob2
##          1 
## 0.04058753