x <- c(16, 17, 20, 19, 26, 15, 18, 12, 15, 22)
y <- c(13.9, 13.4, 14.9, 12.7, 15.3, 11.9, 14.0, 12.8, 14.1, 14.0)
lmObj = lm(y~x)
summary(lmObj)
##
## Call:
## lm(formula = y ~ x)
##
## Residuals:
## Min 1Q Median 3Q Max
## -1.2812 -0.3255 0.1771 0.4844 0.9187
##
## Coefficients:
## Estimate Std. Error t value Pr(>|t|)
## (Intercept) 10.58750 1.23763 8.555 2.69e-05 ***
## x 0.17292 0.06728 2.570 0.0331 *
## ---
## Signif. codes: 0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
##
## Residual standard error: 0.8073 on 8 degrees of freedom
## Multiple R-squared: 0.4523, Adjusted R-squared: 0.3838
## F-statistic: 6.606 on 1 and 8 DF, p-value: 0.03312
cat("Increase in sales volume when advertisement is increased by Rs 10000 is ",0.17292*10," in ten thousand units.")
## Increase in sales volume when advertisement is increased by Rs 10000 is 1.7292 in ten thousand units.
cat("\n The intercept ",10.58750," will be the sales without any advertisement.")
##
## The intercept 10.5875 will be the sales without any advertisement.
plot(x,y)
abline(lmObj,col="red")
14. The following data give the annual incomes (in thousands of dollars) and amounts (in thousands of dollars) of life insurance policies for eight persons. Annual income (x) 42 58 27 36 70 24 53 37 Life insurance (y) 150 175 25 75 250 50 250 100 (a) Calculate the least-squares regression line for these data. (b) Plot the points and the least-squares regression line on the same graph. (c) Calculate the 95% confidence intervals for B0 and B1, respectively. (d) Test the hypothesis H0 : B0 = 0 vs. Ha : B0 != 0 using the 0.05 level of significance. (e) Test the hypothesis H0 : B1 = 0 vs. Ha : B1 != 0 using the 0.05 level of significance. (f) Obtain a 90% prediction interval at x = 59 and interpret the result.
x <- c(42, 58, 27, 36, 70, 24, 53, 37)
y <- c(150, 175, 25, 75, 250, 50, 250, 100)
lmObj = lm(y~x)
summary(lmObj)
##
## Call:
## lm(formula = y ~ x)
##
## Residuals:
## Min 1Q Median 3Q Max
## -33.06 -23.38 -10.39 15.57 67.13
##
## Coefficients:
## Estimate Std. Error t value Pr(>|t|)
## (Intercept) -84.1674 39.5586 -2.128 0.07746 .
## x 5.0384 0.8631 5.838 0.00111 **
## ---
## Signif. codes: 0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
##
## Residual standard error: 36.16 on 6 degrees of freedom
## Multiple R-squared: 0.8503, Adjusted R-squared: 0.8254
## F-statistic: 34.08 on 1 and 6 DF, p-value: 0.001113
confint(lmObj)
## 2.5 % 97.5 %
## (Intercept) -180.963802 12.628926
## x 2.926612 7.150272
plot(x,y)
abline(lmObj,col="red")
print("We fail to reject the null hypothesis that the intercept is equal to zero")
## [1] "We fail to reject the null hypothesis that the intercept is equal to zero"
print("We reject the null hypothesis that the slope is equal to zero ")
## [1] "We reject the null hypothesis that the slope is equal to zero "
predict(lmObj,data.frame(x=59),interval = "prediction",level = 0.9)
## fit lwr upr
## 1 213.1007 134.0908 292.1105
x <- c(63, 70, 74, 82, 60, 44, 80, 71, 71, 41)
y <- c(151, 149, 164, 157, 144, 130, 157, 160, 121, 125)
lmObj = lm(y~x)
summary(lmObj)
##
## Call:
## lm(formula = y ~ x)
##
## Residuals:
## Min 1Q Median 3Q Max
## -28.8606 -0.8763 0.4071 5.9691 11.8835
##
## Coefficients:
## Estimate Std. Error t value Pr(>|t|)
## (Intercept) 96.4714 19.1861 5.028 0.00102 **
## x 0.7520 0.2867 2.622 0.03054 *
## ---
## Signif. codes: 0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
##
## Residual standard error: 11.94 on 8 degrees of freedom
## Multiple R-squared: 0.4622, Adjusted R-squared: 0.395
## F-statistic: 6.877 on 1 and 8 DF, p-value: 0.03054
plot(x,y)
abline(lmObj,col="red")
confint(lmObj)
## 2.5 % 97.5 %
## (Intercept) 52.22817238 140.714632
## x 0.09071445 1.413206
print("We reject the null hypothesis that the intercept is equal to zero")
## [1] "We reject the null hypothesis that the intercept is equal to zero"
print("We reject the null hypothesis that the slope is equal to zero ")
## [1] "We reject the null hypothesis that the slope is equal to zero "
predict(lmObj,data.frame(x=85),interval = "prediction")
## fit lwr upr
## 1 160.388 128.7849 191.9912
x <- c(73, 83, 77, 80, 85, 71, 80)
y <- c(186, 234, 208, 237, 265, 190, 220)
lmObj = lm(y~x)
summary(lmObj)
##
## Call:
## lm(formula = y ~ x)
##
## Residuals:
## 1 2 3 4 5 6 7
## -5.727 -9.809 -4.560 8.816 10.774 8.690 -8.184
##
## Coefficients:
## Estimate Std. Error t value Pr(>|t|)
## (Intercept) -188.4761 62.0831 -3.036 0.02889 *
## x 5.2083 0.7902 6.591 0.00121 **
## ---
## Signif. codes: 0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
##
## Residual standard error: 9.86 on 5 degrees of freedom
## Multiple R-squared: 0.8968, Adjusted R-squared: 0.8762
## F-statistic: 43.45 on 1 and 5 DF, p-value: 0.001207
plot(x,y)
abline(lmObj,col="red")
confint(lmObj)
## 2.5 % 97.5 %
## (Intercept) -348.065841 -28.886453
## x 3.177085 7.239429
print("We reject the null hypothesis that the intercept is equal to zero")
## [1] "We reject the null hypothesis that the intercept is equal to zero"
print("We reject the null hypothesis that the slope is equal to zero ")
## [1] "We reject the null hypothesis that the slope is equal to zero "
predict(lmObj,data.frame(x=90),interval = "prediction",level = 0.99)
## fit lwr upr
## 1 280.267 224.0032 336.5308
Price Y in thousands of dollars : 100,80,104,94,130 Age X1 in years : 1,5,5,10,20 Square Footage X2 in thousands of square feet : 1,1,2,2,3
x1 <- c(1,5,5,10,20)
x2 <- c(1,1,2,2,3)
y <- c(100,80,104,94,130)
lmObj = lm(y~x1+x2)
summary(lmObj)
##
## Call:
## lm(formula = y ~ x1 + x2)
##
## Residuals:
## 1 2 3 4 5
## 12.818 -5.665 -3.101 -11.204 7.153
##
## Coefficients:
## Estimate Std. Error t value Pr(>|t|)
## (Intercept) 66.1252 21.0132 3.147 0.0879 .
## x1 -0.3794 2.2212 -0.171 0.8801
## x2 21.4365 19.4544 1.102 0.3854
## ---
## Signif. codes: 0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
##
## Residual standard error: 13.83 on 2 degrees of freedom
## Multiple R-squared: 0.7142, Adjusted R-squared: 0.4285
## F-statistic: 2.499 on 2 and 2 DF, p-value: 0.2858
anova(lmObj)
## Analysis of Variance Table
##
## Response: y
## Df Sum Sq Mean Sq F value Pr(>F)
## x1 1 724.17 724.17 3.7845 0.1911
## x2 1 232.33 232.33 1.2141 0.3854
## Residuals 2 382.70 191.35
print("We fail to reject the null hypothesis.")
## [1] "We fail to reject the null hypothesis."
Units Sold : 65,59,80,90,100,105 Interest rate : 9.0,9.3,8.9,9.1,9.0,8.7 Unemployment rate : 10.0,8.0,8.2,7.7,7.1,7.2
x1 <- c(9.0,9.3,8.9,9.1,9.0,8.7)
x2 <- c(10.0,8.0,8.2,7.7,7.1,7.2)
y <- c(65,59,80,90,100,105)
lmObj = lm(y~x1+x2)
summary(lmObj)
##
## Call:
## lm(formula = y ~ x1 + x2)
##
## Residuals:
## 1 2 3 4 5 6
## 3.522 -8.477 -6.681 8.510 6.540 -3.414
##
## Coefficients:
## Estimate Std. Error t value Pr(>|t|)
## (Intercept) 653.469 185.861 3.516 0.0390 *
## x1 -53.523 20.988 -2.550 0.0839 .
## x2 -11.028 3.976 -2.774 0.0694 .
## ---
## Signif. codes: 0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
##
## Residual standard error: 9.233 on 3 degrees of freedom
## Multiple R-squared: 0.8522, Adjusted R-squared: 0.7537
## F-statistic: 8.652 on 2 and 3 DF, p-value: 0.0568
anova(lmObj)
## Analysis of Variance Table
##
## Response: y
## Df Sum Sq Mean Sq F value Pr(>F)
## x1 1 819.20 819.20 9.6094 0.05330 .
## x2 1 655.88 655.88 7.6937 0.06935 .
## Residuals 3 255.75 85.25
## ---
## Signif. codes: 0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
print("We fail to reject the null hypothesis.")
## [1] "We fail to reject the null hypothesis."