Libraries

library(tidyverse)
library(datarium)
library(caret)
library(psych)
library(gridExtra)
library(Metrics)

Load data

data <- marketing
head(data)
str(data)
'data.frame':   200 obs. of  4 variables:
 $ youtube  : num  276.1 53.4 20.6 181.8 217 ...
 $ facebook : num  45.4 47.2 55.1 49.6 13 ...
 $ newspaper: num  83 54.1 83.2 70.2 70.1 ...
 $ sales    : num  26.5 12.5 11.2 22.2 15.5 ...

Data Exploration and Visualization

ggplot(data, aes(x=youtube, y=sales)) + geom_point() 

ggplot(data, aes(x=facebook, y=sales)) + geom_point()  

ggplot(data, aes(x=newspaper, y=sales)) + geom_point() 

pairs.panels(data, method = "pearson", hist.col = "#00AFBB")

Data Preparation

Split train-test

set.seed(3456)
trainIndex <- createDataPartition(data$sales, p = 0.8, 
                                  list = FALSE, 
                                  times = 1)
head(trainIndex)
     Resample1
[1,]         1
[2,]         2
[3,]         3
[4,]         4
[5,]         5
[6,]         6
train <- data[ trainIndex,]
test  <- data[-trainIndex,]

Modelling

Simple Linear Regression

linreg1 <- lm(sales~youtube,train)
summary(linreg1)

Call:
lm(formula = sales ~ youtube, data = train)

Residuals:
    Min      1Q  Median      3Q     Max 
-9.7375 -2.4757  0.0065  2.6190  8.8483 

Coefficients:
            Estimate Std. Error t value Pr(>|t|)    
(Intercept) 8.583693   0.633160   13.56   <2e-16 ***
youtube     0.046120   0.003054   15.10   <2e-16 ***
---
Signif. codes:  0 ‘***’ 0.001 ‘**’ 0.01 ‘*’ 0.05 ‘.’ 0.1 ‘ ’ 1

Residual standard error: 4.025 on 160 degrees of freedom
Multiple R-squared:  0.5878,    Adjusted R-squared:  0.5852 
F-statistic: 228.1 on 1 and 160 DF,  p-value: < 2.2e-16
linreg2 <- lm(sales~facebook,train)
summary(linreg2)

Call:
lm(formula = sales ~ facebook, data = train)

Residuals:
    Min      1Q  Median      3Q     Max 
-18.636  -2.568   1.027   3.461   9.713 

Coefficients:
            Estimate Std. Error t value Pr(>|t|)    
(Intercept) 11.48285    0.77301   14.86  < 2e-16 ***
facebook     0.19094    0.02317    8.24 5.84e-14 ***
---
Signif. codes:  0 ‘***’ 0.001 ‘**’ 0.01 ‘*’ 0.05 ‘.’ 0.1 ‘ ’ 1

Residual standard error: 5.252 on 160 degrees of freedom
Multiple R-squared:  0.2979,    Adjusted R-squared:  0.2935 
F-statistic:  67.9 on 1 and 160 DF,  p-value: 5.844e-14
linreg3 <- lm(sales~newspaper,train)
summary(linreg3)

Call:
lm(formula = sales ~ newspaper, data = train)

Residuals:
     Min       1Q   Median       3Q      Max 
-13.6974  -4.1096  -0.9246   4.1947  15.0749 

Coefficients:
            Estimate Std. Error t value Pr(>|t|)    
(Intercept) 15.11819    0.82283  18.373  < 2e-16 ***
newspaper    0.04782    0.01821   2.626  0.00948 ** 
---
Signif. codes:  0 ‘***’ 0.001 ‘**’ 0.01 ‘*’ 0.05 ‘.’ 0.1 ‘ ’ 1

Residual standard error: 6.137 on 160 degrees of freedom
Multiple R-squared:  0.04131,   Adjusted R-squared:  0.03532 
F-statistic: 6.895 on 1 and 160 DF,  p-value: 0.009484
p1 <- ggplot(data, aes(x=youtube, y=sales)) + 
        geom_point() + 
        geom_smooth(method='lm',formula='y ~ x') 
p2 <- ggplot(data, aes(x=facebook, y=sales)) + 
        geom_point() + 
        geom_smooth(method='lm',formula='y ~ x') 
p3 <- ggplot(data, aes(x=newspaper, y=sales)) + 
        geom_point() + 
        geom_smooth(method='lm',formula='y ~ x') 
grid.arrange(p1,p2,p3,nrow=2)

Multiple Linear Regression

multireg1 <- lm(sales~.,train)
summary(multireg1)

Call:
lm(formula = sales ~ ., data = train)

Residuals:
     Min       1Q   Median       3Q      Max 
-10.5411  -1.0937   0.4033   1.4690   3.7120 

Coefficients:
             Estimate Std. Error t value Pr(>|t|)    
(Intercept)  3.210320   0.441087   7.278 1.49e-11 ***
youtube      0.046288   0.001586  29.192  < 2e-16 ***
facebook     0.194756   0.010061  19.358  < 2e-16 ***
newspaper   -0.004111   0.006766  -0.608    0.544    
---
Signif. codes:  0 ‘***’ 0.001 ‘**’ 0.01 ‘*’ 0.05 ‘.’ 0.1 ‘ ’ 1

Residual standard error: 2.09 on 158 degrees of freedom
Multiple R-squared:  0.8902,    Adjusted R-squared:  0.8881 
F-statistic: 427.1 on 3 and 158 DF,  p-value: < 2.2e-16
multireg2 <- lm(sales~youtube+facebook,train)
summary(multireg2)

Call:
lm(formula = sales ~ youtube + facebook, data = train)

Residuals:
     Min       1Q   Median       3Q      Max 
-10.3862  -1.0921   0.3928   1.4976   3.7229 

Coefficients:
            Estimate Std. Error t value Pr(>|t|)    
(Intercept) 3.128690   0.419291   7.462 5.25e-12 ***
youtube     0.046289   0.001583  29.250  < 2e-16 ***
facebook    0.192311   0.009202  20.898  < 2e-16 ***
---
Signif. codes:  0 ‘***’ 0.001 ‘**’ 0.01 ‘*’ 0.05 ‘.’ 0.1 ‘ ’ 1

Residual standard error: 2.086 on 159 degrees of freedom
Multiple R-squared:   0.89, Adjusted R-squared:  0.8886 
F-statistic: 643.1 on 2 and 159 DF,  p-value: < 2.2e-16

Prediction and Evaluation

Predict test set

x_test <- test[,-4]
y_test <- as.numeric(as.matrix(test[,4]))

predict.linreg1 <- predict(linreg1,x_test)
predict.linreg2 <- predict(linreg2,x_test)
predict.linreg3 <- predict(linreg3,x_test)
predict.multireg1 <- predict(multireg1,x_test)
predict.multireg2 <- predict(multireg2,x_test)
data_pred <- data.frame(actual = y_test, 
                        linreg1 = predict.linreg1,
                        linreg2 = predict.linreg2,
                        linreg3 = predict.linreg3,
                        multireg1 = predict.multireg1,
                        multireg2 = predict.multireg2)
head(data_pred)

Evaluation

Root Mean Square Error (RMSE)

\[ \mathrm{RMSE}=\sqrt{\frac{\sum_{i=1}^n\left(y_i-\hat{y}_i\right)^2}{n}} \]

# Root Mean Square Error
print(apply(X = data_pred[,-1], MARGIN = 2, FUN = rmse, actual = data_pred$actual) %>% sort())
multireg2 multireg1   linreg1   linreg2   linreg3 
 1.757540  1.778099  3.412434  4.603717  6.013120 
LS0tCnRpdGxlOiAiTWFjaGluZSBMZWFybmluZyBSZWdyZXNzaW9uIgpvdXRwdXQ6CiAgaHRtbF9ub3RlYm9vazoKICAgIHRvYzogdHJ1ZQogICAgdG9jX2Zsb2F0OgogICAgICB0b2NfY29sbGFwc2VkOiB0cnVlCi0tLQoKIyMgTGlicmFyaWVzCmBgYHtyfQpsaWJyYXJ5KHRpZHl2ZXJzZSkKbGlicmFyeShkYXRhcml1bSkKbGlicmFyeShjYXJldCkKbGlicmFyeShwc3ljaCkKbGlicmFyeShncmlkRXh0cmEpCmxpYnJhcnkoTWV0cmljcykKYGBgCgoKIyMgTG9hZCBkYXRhCmBgYHtyfQpkYXRhIDwtIG1hcmtldGluZwpoZWFkKGRhdGEpCmBgYAoKYGBge3J9CnN0cihkYXRhKQpgYGAKCgojIyBEYXRhIEV4cGxvcmF0aW9uIGFuZCBWaXN1YWxpemF0aW9uCmBgYHtyfQpnZ3Bsb3QoZGF0YSwgYWVzKHg9eW91dHViZSwgeT1zYWxlcykpICsgZ2VvbV9wb2ludCgpIApgYGAKCgpgYGB7cn0KZ2dwbG90KGRhdGEsIGFlcyh4PWZhY2Vib29rLCB5PXNhbGVzKSkgKyBnZW9tX3BvaW50KCkgIApgYGAKCgoKYGBge3J9CmdncGxvdChkYXRhLCBhZXMoeD1uZXdzcGFwZXIsIHk9c2FsZXMpKSArIGdlb21fcG9pbnQoKSAKYGBgCgpgYGB7cn0KcGFpcnMucGFuZWxzKGRhdGEsIG1ldGhvZCA9ICJwZWFyc29uIiwgaGlzdC5jb2wgPSAiIzAwQUZCQiIpCmBgYAoKIyMgRGF0YSBQcmVwYXJhdGlvbgoKIyMjIFNwbGl0IHRyYWluLXRlc3QKYGBge3J9CnNldC5zZWVkKDM0NTYpCnRyYWluSW5kZXggPC0gY3JlYXRlRGF0YVBhcnRpdGlvbihkYXRhJHNhbGVzLCBwID0gMC44LCAKICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgIGxpc3QgPSBGQUxTRSwgCiAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICB0aW1lcyA9IDEpCmhlYWQodHJhaW5JbmRleCkKYGBgCgpgYGB7cn0KdHJhaW4gPC0gZGF0YVsgdHJhaW5JbmRleCxdCnRlc3QgIDwtIGRhdGFbLXRyYWluSW5kZXgsXQpgYGAKCgojIyBNb2RlbGxpbmcKCiMjIyBTaW1wbGUgTGluZWFyIFJlZ3Jlc3Npb24KYGBge3J9CmxpbnJlZzEgPC0gbG0oc2FsZXN+eW91dHViZSx0cmFpbikKc3VtbWFyeShsaW5yZWcxKQpgYGAKCgpgYGB7cn0KbGlucmVnMiA8LSBsbShzYWxlc35mYWNlYm9vayx0cmFpbikKc3VtbWFyeShsaW5yZWcyKQpgYGAKCgpgYGB7cn0KbGlucmVnMyA8LSBsbShzYWxlc35uZXdzcGFwZXIsdHJhaW4pCnN1bW1hcnkobGlucmVnMykKYGBgCgoKYGBge3J9CnAxIDwtIGdncGxvdChkYXRhLCBhZXMoeD15b3V0dWJlLCB5PXNhbGVzKSkgKyAKICAgICAgICBnZW9tX3BvaW50KCkgKyAKICAgICAgICBnZW9tX3Ntb290aChtZXRob2Q9J2xtJyxmb3JtdWxhPSd5IH4geCcpIApwMiA8LSBnZ3Bsb3QoZGF0YSwgYWVzKHg9ZmFjZWJvb2ssIHk9c2FsZXMpKSArIAogICAgICAgIGdlb21fcG9pbnQoKSArIAogICAgICAgIGdlb21fc21vb3RoKG1ldGhvZD0nbG0nLGZvcm11bGE9J3kgfiB4JykgCnAzIDwtIGdncGxvdChkYXRhLCBhZXMoeD1uZXdzcGFwZXIsIHk9c2FsZXMpKSArIAogICAgICAgIGdlb21fcG9pbnQoKSArIAogICAgICAgIGdlb21fc21vb3RoKG1ldGhvZD0nbG0nLGZvcm11bGE9J3kgfiB4JykgCmdyaWQuYXJyYW5nZShwMSxwMixwMyxucm93PTIpCmBgYAoKIyMjIE11bHRpcGxlIExpbmVhciBSZWdyZXNzaW9uCmBgYHtyfQptdWx0aXJlZzEgPC0gbG0oc2FsZXN+Lix0cmFpbikKc3VtbWFyeShtdWx0aXJlZzEpCmBgYAoKYGBge3J9Cm11bHRpcmVnMiA8LSBsbShzYWxlc355b3V0dWJlK2ZhY2Vib29rLHRyYWluKQpzdW1tYXJ5KG11bHRpcmVnMikKYGBgCgojIyBQcmVkaWN0aW9uIGFuZCBFdmFsdWF0aW9uCgojIyMgUHJlZGljdCB0ZXN0IHNldApgYGB7cn0KeF90ZXN0IDwtIHRlc3RbLC00XQp5X3Rlc3QgPC0gYXMubnVtZXJpYyhhcy5tYXRyaXgodGVzdFssNF0pKQoKcHJlZGljdC5saW5yZWcxIDwtIHByZWRpY3QobGlucmVnMSx4X3Rlc3QpCnByZWRpY3QubGlucmVnMiA8LSBwcmVkaWN0KGxpbnJlZzIseF90ZXN0KQpwcmVkaWN0LmxpbnJlZzMgPC0gcHJlZGljdChsaW5yZWczLHhfdGVzdCkKcHJlZGljdC5tdWx0aXJlZzEgPC0gcHJlZGljdChtdWx0aXJlZzEseF90ZXN0KQpwcmVkaWN0Lm11bHRpcmVnMiA8LSBwcmVkaWN0KG11bHRpcmVnMix4X3Rlc3QpCmBgYAoKCmBgYHtyfQpkYXRhX3ByZWQgPC0gZGF0YS5mcmFtZShhY3R1YWwgPSB5X3Rlc3QsIAogICAgICAgICAgICAgICAgICAgICAgICBsaW5yZWcxID0gcHJlZGljdC5saW5yZWcxLAogICAgICAgICAgICAgICAgICAgICAgICBsaW5yZWcyID0gcHJlZGljdC5saW5yZWcyLAogICAgICAgICAgICAgICAgICAgICAgICBsaW5yZWczID0gcHJlZGljdC5saW5yZWczLAogICAgICAgICAgICAgICAgICAgICAgICBtdWx0aXJlZzEgPSBwcmVkaWN0Lm11bHRpcmVnMSwKICAgICAgICAgICAgICAgICAgICAgICAgbXVsdGlyZWcyID0gcHJlZGljdC5tdWx0aXJlZzIpCmhlYWQoZGF0YV9wcmVkKQpgYGAKCgojIyMgRXZhbHVhdGlvbgoKIyMjIyBSb290IE1lYW4gU3F1YXJlIEVycm9yIChSTVNFKQokJApcbWF0aHJte1JNU0V9PVxzcXJ0e1xmcmFje1xzdW1fe2k9MX1eblxsZWZ0KHlfaS1caGF0e3l9X2lccmlnaHQpXjJ9e259fQokJAoKYGBge3J9CiMgUm9vdCBNZWFuIFNxdWFyZSBFcnJvcgpwcmludChhcHBseShYID0gZGF0YV9wcmVkWywtMV0sIE1BUkdJTiA9IDIsIEZVTiA9IHJtc2UsIGFjdHVhbCA9IGRhdGFfcHJlZCRhY3R1YWwpICU+JSBzb3J0KCkpCmBgYAoKCgoKCg==