setwd(“C:/Users/User/Desktop/LearnR/CA/CAdata”)

x = c(18,23,25,35,65,54,34,56,72,19,23,42,18,39,37)
y = c(202,186,187,180,156,169,174,172,153,199,193,174,198,183,178)
plot(x,y, xlim=c(0,80), ylim=c(150,215))# make a plot
abline(lm(y ~ x)) # plot the regression line

lm(y ~ x) # the basic values of the regression analysis
## 
## Call:
## lm(formula = y ~ x)
## 
## Coefficients:
## (Intercept)            x  
##    210.0485      -0.7977
summary(lm(y ~ x))  #alternative code
## 
## Call:
## lm(formula = y ~ x)
## 
## Residuals:
##     Min      1Q  Median      3Q     Max 
## -8.9258 -2.5383  0.3879  3.1867  6.6242 
## 
## Coefficients:
##              Estimate Std. Error t value Pr(>|t|)    
## (Intercept) 210.04846    2.86694   73.27  < 2e-16 ***
## x            -0.79773    0.06996  -11.40 3.85e-08 ***
## ---
## Signif. codes:  0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
## 
## Residual standard error: 4.578 on 13 degrees of freedom
## Multiple R-squared:  0.9091, Adjusted R-squared:  0.9021 
## F-statistic:   130 on 1 and 13 DF,  p-value: 3.848e-08

library和require都可載入package 如果該package不存在, 執行到library會停止執行後續code require則會繼續執行。

library(ggplot2)
# require(ggplot2)
# print(qplot(x, y, xlim=c(0,80), ylim=c(150,220)))
# abline(lm(y ~ x))

Numeric Functions in R

Multiple Regression with R

y = c(202,186,187,180,156,169,174,172,153,199,193,174,198,183,178)

x1 = c(18,23,25,35,65,54,34,56,72,19,23,42,18,39,37)

x2 = c(56,45,67,89,65,76,55,66,77,63,53,49,76,62,53)

fit= lm(y~x1+x2)

summary(fit)
## 
## Call:
## lm(formula = y ~ x1 + x2)
## 
## Residuals:
##    Min     1Q Median     3Q    Max 
## -8.164 -2.733 -0.187  2.885  6.784 
## 
## Coefficients:
##              Estimate Std. Error t value Pr(>|t|)    
## (Intercept) 204.60635    6.55970  31.191 7.42e-13 ***
## x1           -0.81965    0.07426 -11.038 1.22e-07 ***
## x2            0.09865    0.10680   0.924    0.374    
## ---
## Signif. codes:  0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
## 
## Residual standard error: 4.604 on 12 degrees of freedom
## Multiple R-squared:  0.9151, Adjusted R-squared:  0.901 
## F-statistic:  64.7 on 2 and 12 DF,  p-value: 3.737e-07

R-squared 英文名稱 coefficient of determination

Multiple Regression with Categorical Predictors

將變項設為類別變項

y = c(202,186,187,180,156,169,174,172,153,199,193,174,198,183,178)
x1 = c(18,23,25,35,65,54,34,56,72,19,23,42,18,39,37)
x2 = c(56,45,67,89,65,76,55,66,77,63,53,49,76,62,53)
x3 = c(1,1,1,1,1,2,2,2,0,0,0,0,0,1,1)
fit= lm(y~x1+x2+factor(x3))  #用factor 宣告x3轉成類別變項
summary(fit)
## 
## Call:
## lm(formula = y ~ x1 + x2 + factor(x3))
## 
## Residuals:
##     Min      1Q  Median      3Q     Max 
## -8.1066 -2.5487 -0.8259  1.9912  7.3362 
## 
## Coefficients:
##              Estimate Std. Error t value Pr(>|t|)    
## (Intercept) 205.80831    7.33581  28.055 7.69e-11 ***
## x1           -0.81849    0.08383  -9.764 1.98e-06 ***
## x2            0.09552    0.11501   0.831    0.426    
## factor(x3)1  -1.76090    2.90297  -0.607    0.558    
## factor(x3)2  -1.12668    3.76855  -0.299    0.771    
## ---
## Signif. codes:  0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
## 
## Residual standard error: 4.953 on 10 degrees of freedom
## Multiple R-squared:  0.9181, Adjusted R-squared:  0.8854 
## F-statistic: 28.04 on 4 and 10 DF,  p-value: 2.054e-05
y = c(202,186,187,180,156,169,174,172,153,199,193,174,198,183,178)
x1 = c(18,23,25,35,65,54,34,56,72,19,23,42,18,39,37)
x2 = c(56,45,67,89,65,76,55,66,77,63,53,49,76,62,53)
x3 = c(1,1,1,1,1,2,2,2,0,0,0,0,0,1,1)
x3 = factor(x3)
newx3 = relevel(x3, ref = "2")
a = lm(y~x1+x2+newx3)
summary(a)
## 
## Call:
## lm(formula = y ~ x1 + x2 + newx3)
## 
## Residuals:
##     Min      1Q  Median      3Q     Max 
## -8.1066 -2.5487 -0.8259  1.9912  7.3362 
## 
## Coefficients:
##              Estimate Std. Error t value Pr(>|t|)    
## (Intercept) 204.68163    7.92081  25.841 1.73e-10 ***
## x1           -0.81849    0.08383  -9.764 1.98e-06 ***
## x2            0.09552    0.11501   0.831    0.426    
## newx30        1.12668    3.76855   0.299    0.771    
## newx31       -0.63422    3.58180  -0.177    0.863    
## ---
## Signif. codes:  0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
## 
## Residual standard error: 4.953 on 10 degrees of freedom
## Multiple R-squared:  0.9181, Adjusted R-squared:  0.8854 
## F-statistic: 28.04 on 4 and 10 DF,  p-value: 2.054e-05

不將變項設為類別變項

y = c(202,186,187,180,156,169,174,172,153,199,193,174,198,183,178)
x1 = c(18,23,25,35,65,54,34,56,72,19,23,42,18,39,37)
x2 = c(56,45,67,89,65,76,55,66,77,63,53,49,76,62,53)
x3 = c(1,1,1,1,1,2,2,2,0,0,0,0,0,1,1)
fit= lm(y~x1+x2+x3)  #用factor 宣告x3轉成類別變項
summary(fit)
## 
## Call:
## lm(formula = y ~ x1 + x2 + x3)
## 
## Residuals:
##    Min     1Q Median     3Q    Max 
## -7.301 -2.691 -1.098  2.572  7.484 
## 
## Coefficients:
##              Estimate Std. Error t value Pr(>|t|)    
## (Intercept) 205.05293    6.87760  29.815 7.13e-12 ***
## x1           -0.81148    0.07932 -10.230 5.89e-07 ***
## x2            0.09703    0.11072   0.876     0.40    
## x3           -0.74955    1.77043  -0.423     0.68    
## ---
## Signif. codes:  0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
## 
## Residual standard error: 4.77 on 11 degrees of freedom
## Multiple R-squared:  0.9165, Adjusted R-squared:  0.8937 
## F-statistic: 40.24 on 3 and 11 DF,  p-value: 3.193e-06
y = c(202,186,187,180,156,169,174,172,153,199,193,174,198,183,178)
x1 = c(18,23,25,35,65,54,34,56,72,19,23,42,18,39,37)
x2 = c(56,45,67,89,65,76,55,66,77,63,53,49,76,62,53)
x3 = c("B","B","B","B","B","C","C","C","A","A","A","A","A","B","B")
fit= lm(y~x1+x2+x3)
summary(fit)
## 
## Call:
## lm(formula = y ~ x1 + x2 + x3)
## 
## Residuals:
##     Min      1Q  Median      3Q     Max 
## -8.1066 -2.5487 -0.8259  1.9912  7.3362 
## 
## Coefficients:
##              Estimate Std. Error t value Pr(>|t|)    
## (Intercept) 205.80831    7.33581  28.055 7.69e-11 ***
## x1           -0.81849    0.08383  -9.764 1.98e-06 ***
## x2            0.09552    0.11501   0.831    0.426    
## x3B          -1.76090    2.90297  -0.607    0.558    
## x3C          -1.12668    3.76855  -0.299    0.771    
## ---
## Signif. codes:  0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
## 
## Residual standard error: 4.953 on 10 degrees of freedom
## Multiple R-squared:  0.9181, Adjusted R-squared:  0.8854 
## F-statistic: 28.04 on 4 and 10 DF,  p-value: 2.054e-05
y = c(202,186,187,180,156,169,174,172,153,199,193,174,198,183,178)
x1 = c(18,23,25,35,65,54,34,56,72,19,23,42,18,39,37)
x2 = c(56,45,67,89,65,76,55,66,77,63,53,49,76,62,53)
x3 = c("F","F","F","F","F","F","F","M","M","M","M","M","M","M","M")
fit= lm(y~x1+x2+x3)
summary(fit)
## 
## Call:
## lm(formula = y ~ x1 + x2 + x3)
## 
## Residuals:
##    Min     1Q Median     3Q    Max 
## -5.867 -2.234 -1.738  2.043  8.707 
## 
## Coefficients:
##              Estimate Std. Error t value Pr(>|t|)    
## (Intercept) 201.47962    6.23852  32.296 2.99e-12 ***
## x1           -0.83153    0.06823 -12.187 9.92e-08 ***
## x2            0.12109    0.09845   1.230   0.2444    
## x3M           4.02377    2.20015   1.829   0.0946 .  
## ---
## Signif. codes:  0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
## 
## Residual standard error: 4.211 on 11 degrees of freedom
## Multiple R-squared:  0.9349, Adjusted R-squared:  0.9172 
## F-statistic: 52.67 on 3 and 11 DF,  p-value: 8.171e-07
aaa=read.csv("C:/Users/User/Desktop/LearnR/CA/CAdata/anova_data1.csv", header=T)
aaa
##    Dosage Score
## 1       a    30
## 2       a    38
## 3       a    35
## 4       a    41
## 5       a    27
## 6       a    24
## 7       b    32
## 8       b    26
## 9       b    31
## 10      b    29
## 11      b    27
## 12      b    35
## 13      b    21
## 14      b    25
## 15      c    17
## 16      c    21
## 17      c    20
## 18      c    19
attach(aaa)
fit= lm(Score~Dosage)
summary(fit)
## 
## Call:
## lm(formula = Score ~ Dosage)
## 
## Residuals:
##    Min     1Q Median     3Q    Max 
## -8.500 -2.438  0.250  2.688  8.500 
## 
## Coefficients:
##             Estimate Std. Error t value Pr(>|t|)    
## (Intercept)   32.500      2.010  16.166 6.72e-11 ***
## Dosageb       -4.250      2.659  -1.598 0.130880    
## Dosagec      -13.250      3.179  -4.168 0.000824 ***
## ---
## Signif. codes:  0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
## 
## Residual standard error: 4.924 on 15 degrees of freedom
## Multiple R-squared:  0.5396, Adjusted R-squared:  0.4782 
## F-statistic: 8.789 on 2 and 15 DF,  p-value: 0.002977

Multi-collinearity 多重共線姓

install.packages(“car”)

# Evaluate Collinearity

library(car)
## Loading required package: carData
y = c(202,186,187,180,156,169,174,172,153,199,193,174,198,183,178)
x1 = c(18,23,25,35,65,54,34,56,72,19,23,42,18,39,37)
x2 = c(56,65,67,69,125,76,65,86,97,53,63,69,56,72,63)
fit= lm(y~x1+x2)
cor(x1, x2)
## [1] 0.8668049
vif(fit) #variance inflation factors
##       x1       x2 
## 4.021729 4.021729
y = c(202,186,187,180,156,169,174,172,153,199,193,174,198,183,178)
x1 = c(18,23,25,35,65,54,34,56,72,19,23,42,18,39,37)
x3 = c(16,25,27,39,65,46,35,56,87,13,23,49,16,32,23)
fit= lm(y~x1+x3)
cor(x1, x3)
## [1] 0.9503016
vif(fit)
##       x1       x3 
## 10.31706 10.31706

Residual analysis 回歸診斷

library(car) # package:car
y = c(202,186,187,180,156,169,174,172,153,199,193,174,198,183,178)
x1 = c(18,23,25,35,65,54,34,56,72,19,23,42,18,39,37)
x2 = c(56,45,67,89,65,76,55,66,77,63,53,49,76,62,53)
fit= lm(y~x1+x2)
summary(fit)
## 
## Call:
## lm(formula = y ~ x1 + x2)
## 
## Residuals:
##    Min     1Q Median     3Q    Max 
## -8.164 -2.733 -0.187  2.885  6.784 
## 
## Coefficients:
##              Estimate Std. Error t value Pr(>|t|)    
## (Intercept) 204.60635    6.55970  31.191 7.42e-13 ***
## x1           -0.81965    0.07426 -11.038 1.22e-07 ***
## x2            0.09865    0.10680   0.924    0.374    
## ---
## Signif. codes:  0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
## 
## Residual standard error: 4.604 on 12 degrees of freedom
## Multiple R-squared:  0.9151, Adjusted R-squared:  0.901 
## F-statistic:  64.7 on 2 and 12 DF,  p-value: 3.737e-07
outlierTest(fit) # Bonferonnip-value for most extreme obs
## No Studentized residuals with Bonferroni p < 0.05
## Largest |rstudent|:
##    rstudent unadjusted p-value Bonferroni p
## 7 -2.127814           0.056795      0.85192
qqPlot(fit, main="QQ Plot") #qqplot for studentizedresid

## [1] 7 8
#畫殘差圖
par(mfrow=c(2,2))
plot(fit)

resid(fit)
##          1          2          3          4          5          6          7 
##  6.6232481 -4.1933786 -3.7242792 -4.6979492 -1.7408342  1.1578695 -8.1636447 
##          8          9         10         11         12         13         14 
##  6.7836355 -0.1870073  3.7523811  2.0174547 -1.0145391  0.6503314  4.2441036 
##         15 
## -1.5073916
residualPlots(fit) # package: car, Tukey test for nonadditivity

##            Test stat Pr(>|Test stat|)  
## x1            0.8571          0.40966  
## x2           -1.8647          0.08911 .
## Tukey test    0.8621          0.38862  
## ---
## Signif. codes:  0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1

殘插圖的值 正負3 比較好

Normality

方法一

Shapiro-Wilk test

#Sample size < 50
a<-rnorm(50, mean = 60, sd = 10)
shapiro.test
## function (x) 
## {
##     DNAME <- deparse(substitute(x))
##     stopifnot(is.numeric(x))
##     x <- sort(x[complete.cases(x)])
##     n <- length(x)
##     if (is.na(n) || n < 3L || n > 5000L) 
##         stop("sample size must be between 3 and 5000")
##     rng <- x[n] - x[1L]
##     if (rng == 0) 
##         stop("all 'x' values are identical")
##     if (rng < 1e-10) 
##         x <- x/rng
##     res <- .Call(C_SWilk, x)
##     RVAL <- list(statistic = c(W = res[1]), p.value = res[2], 
##         method = "Shapiro-Wilk normality test", data.name = DNAME)
##     class(RVAL) <- "htest"
##     return(RVAL)
## }
## <bytecode: 0x0000000012606d50>
## <environment: namespace:stats>
(a)
##  [1] 83.45684 70.56542 48.60176 59.07326 55.00837 70.08762 75.87689 65.98744
##  [9] 81.97664 49.31876 61.12105 59.37404 64.45662 51.90351 45.18661 55.15390
## [17] 46.67818 66.41711 53.97992 56.27781 48.45064 54.25767 64.22767 53.11206
## [25] 54.40701 57.65060 55.52188 66.06859 59.82486 48.89068 69.62759 56.01608
## [33] 55.22916 51.12721 70.11492 57.81674 46.98425 40.62899 51.51887 62.26380
## [41] 82.72806 50.49387 54.92946 66.97047 65.09214 53.91634 61.82253 58.62062
## [49] 72.02232 58.37658

方法二

Kolmogorov-Smirnov test

檢查兩個分布

a<-rnorm(100, mean = 60, sd = 10)
b<-rnorm(100, mean = 60, sd = 8)
ks.test(a, b) # Do x and y come from the same distribution?
## 
##  Two-sample Kolmogorov-Smirnov test
## 
## data:  a and b
## D = 0.14, p-value = 0.281
## alternative hypothesis: two-sided

方法三

install.packages(“nortest”)

library(nortest)
lillie.test(a) #Lilliefors (Kolmogorov-Smirnov) test for normality
## 
##  Lilliefors (Kolmogorov-Smirnov) normality test
## 
## data:  a
## D = 0.072953, p-value = 0.2124
#Sample size > 50
pearson.test(a) #Pearson chi-square test for normality
## 
##  Pearson chi-square normality test
## 
## data:  a
## P = 7.9, p-value = 0.6386
sf.test(a) #Shapiro-Francia test for normality
## 
##  Shapiro-Francia normality test
## 
## data:  a
## W = 0.9756, p-value = 0.05747

Non-independence of Errors

#這一期和下一期是否相關 下面結果沒關

# Test for AutocorrelatedErrors
y = c(202,186,187,180,156,169,174,172,153,199,193,174,198,183,178)
x1 = c(88,23,35,25,65,94,24,36,72,59,23,82,28,99,37)
fit= lm(y~x1)
# durbinWatsonTest(fit)

Equal variance test

使用F-test檢定變異數同質性的假設,將殘差取絕對值後依數值大小分為兩群,再檢定此群的變異數是否相同

y = c(202,186,187,180,156,169,174,172,153,199,193,174,198,183,178,177)
x1 = c(18,23,25,35,65,54,34,56,72,19,23,42,18,39,37,39)
fit= lm(y~x1)
res=abs(resid(fit)) # ads取絕對值
a=res[1:8]   # 我選擇切一半
b=res[9:16]
var.test(a,b)
## 
##  F test to compare two variances
## 
## data:  a and b
## F = 4.2844, num df = 7, denom df = 7, p-value = 0.07397
## alternative hypothesis: true ratio of variances is not equal to 1
## 95 percent confidence interval:
##   0.8577478 21.4000516
## sample estimates:
## ratio of variances 
##           4.284372

Identifying influential cases [排除極端值]

Cook’s d D > 4/n, where n is the number of observations cooks.distance(fit)

|DFFITS| >1 for small/medium data or >2sqrt(p/n) for large data 自變項數目(含截距項):p dffits(fit)

|DFBETAS| >1 for small/medium data or >2/sqrt(n) for large data dfbetas (fit)

aaa=read.csv("C:/Users/User/Desktop/LearnR/CA/CAdata/lowbwt.csv", header=T)
attach(aaa)
fit= lm(BWT~AGE+LWT+SMOKE)
summary(fit)
## 
## Call:
## lm(formula = BWT ~ AGE + LWT + SMOKE)
## 
## Residuals:
##      Min       1Q   Median       3Q      Max 
## -2069.89  -433.18    13.67   516.45  1813.75 
## 
## Coefficients:
##             Estimate Std. Error t value Pr(>|t|)    
## (Intercept) 2362.720    300.687   7.858 3.11e-13 ***
## AGE            7.093      9.925   0.715   0.4757    
## LWT            4.019      1.720   2.337   0.0205 *  
## SMOKE       -267.213    105.802  -2.526   0.0124 *  
## ---
## Signif. codes:  0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
## 
## Residual standard error: 708.8 on 185 degrees of freedom
## Multiple R-squared:  0.06988,    Adjusted R-squared:  0.05479 
## F-statistic: 4.633 on 3 and 185 DF,  p-value: 0.003781
cooks.distance(fit)   #越大越不好
##            1            2            3            4            5            6 
## 8.091231e-03 6.419186e-03 9.476283e-05 5.937741e-05 2.885089e-05 7.588305e-04 
##            7            8            9           10           11           12 
## 6.337463e-04 6.727997e-04 1.937808e-04 4.241809e-05 2.324249e-04 1.088990e-03 
##           13           14           15           16           17           18 
## 1.871643e-04 7.630093e-04 2.290077e-04 2.290077e-04 9.700582e-05 9.509760e-06 
##           19           20           21           22           23           24 
## 1.851577e-04 1.998491e-05 7.510465e-04 3.346710e-04 1.161106e-02 2.355037e-04 
##           25           26           27           28           29           30 
## 1.082100e-04 1.205737e-03 4.176751e-04 5.073280e-04 1.494260e-05 2.712675e-06 
##           31           32           33           34           35           36 
## 2.712675e-06 1.219782e-03 3.278110e-04 2.142058e-04 2.062418e-05 1.371800e-04 
##           37           38           39           40           41           42 
## 3.351273e-04 3.746905e-04 1.070923e-03 1.431487e-03 5.396954e-05 7.234268e-04 
##           43           44           45           46           47           48 
## 8.258035e-07 6.929410e-05 3.101368e-03 3.101368e-03 1.939120e-05 2.472959e-05 
##           49           50           51           52           53           54 
## 4.735950e-05 3.114932e-03 4.771181e-05 1.801726e-05 9.001905e-04 3.614989e-03 
##           55           56           57           58           59           60 
## 3.120697e-04 6.899062e-04 2.159992e-03 1.493096e-06 5.977599e-04 5.964558e-04 
##           61           62           63           64           65           66 
## 5.964558e-04 2.503049e-04 3.858828e-04 7.897966e-05 1.558962e-03 8.928576e-05 
##           67           68           69           70           71           72 
## 4.092882e-04 1.029237e-06 3.700017e-04 1.893643e-04 2.920325e-03 2.294233e-03 
##           73           74           75           76           77           78 
## 2.773707e-03 2.066998e-03 4.821409e-03 3.474504e-06 4.727477e-04 4.981517e-03 
##           79           80           81           82           83           84 
## 4.063533e-03 4.495877e-04 7.930186e-04 5.913623e-04 2.327173e-03 1.238137e-03 
##           85           86           87           88           89           90 
## 1.259416e-03 8.012113e-03 3.297179e-03 1.278362e-03 1.879113e-03 1.638709e-03 
##           91           92           93           94           95           96 
## 1.326998e-03 1.700527e-03 1.099909e-02 1.085207e-02 9.889287e-03 2.183452e-03 
##           97           98           99          100          101          102 
## 1.809873e-03 6.976488e-03 6.976488e-03 2.856290e-03 3.313486e-03 1.249258e-02 
##          103          104          105          106          107          108 
## 3.693451e-03 3.665054e-03 3.510126e-03 3.639327e-03 6.449399e-03 3.739138e-03 
##          109          110          111          112          113          114 
## 1.244639e-02 8.349879e-03 5.151642e-03 6.148936e-03 1.200573e-02 1.109605e-02 
##          115          116          117          118          119          120 
## 1.476529e-02 4.580693e-03 1.229218e-02 5.143057e-03 4.728238e-03 1.616729e-02 
##          121          122          123          124          125          126 
## 6.220598e-03 5.376079e-03 6.450495e-03 5.307676e-03 5.547980e-03 1.366785e-02 
##          127          128          129          130          131          132 
## 1.603014e-02 1.983679e-02 1.312272e-02 2.080437e-01 4.293705e-02 3.263085e-02 
##          133          134          135          136          137          138 
## 1.051408e-01 1.856065e-02 2.317522e-02 1.573253e-02 1.386106e-02 8.108206e-03 
##          139          140          141          142          143          144 
## 7.900945e-03 1.506383e-02 1.619219e-02 6.036581e-03 6.868416e-03 1.216299e-02 
##          145          146          147          148          149          150 
## 6.103057e-03 8.091529e-03 4.157642e-02 8.254824e-03 6.357383e-03 5.051545e-03 
##          151          152          153          154          155          156 
## 7.480706e-03 5.567301e-03 3.176305e-03 3.763557e-03 4.452047e-03 4.023523e-03 
##          157          158          159          160          161          162 
## 2.770079e-03 2.420780e-03 4.530571e-03 1.726846e-03 2.053625e-03 3.632937e-03 
##          163          164          165          166          167          168 
## 3.255539e-03 5.757275e-03 1.351428e-03 1.432390e-03 2.800003e-03 3.483477e-03 
##          169          170          171          172          173          174 
## 2.419495e-03 3.745330e-03 7.471918e-03 9.385954e-04 8.327085e-04 3.545521e-03 
##          175          176          177          178          179          180 
## 1.787122e-03 2.663582e-03 9.309099e-04 8.489235e-04 6.079712e-04 2.383996e-03 
##          181          182          183          184          185          186 
## 3.321447e-03 1.612051e-03 6.350367e-03 2.941222e-04 6.258374e-04 2.022937e-03 
##          187          188          189 
## 2.115936e-04 2.966937e-03 5.424351e-04
dffits(fit)  #單獨看
##            1            2            3            4            5            6 
## -0.179914000 -0.160202747 -0.019417664 -0.015370239 -0.010713697 -0.054989664 
##            7            8            9           10           11           12 
## -0.050246992 -0.051755603 -0.027768367 -0.012990921 -0.030412579 -0.065869694 
##           13           14           15           16           17           18 
## -0.027290931 -0.055115545  0.030187525  0.030187525 -0.019645779  0.006150929 
##           19           20           21           22           23           24 
## -0.027145661  0.008916797 -0.054679642 -0.036493530 -0.215360607 -0.030614644 
##           25           26           27           28           29           30 
## -0.020750888 -0.069307531  0.040772503 -0.044941482 -0.007710262 -0.003285131 
##           31           32           33           34           35           36 
## -0.003285131  0.069701168  0.036115927 -0.029197827 -0.009058400  0.023363020 
##           37           38           39           40           41           42 
##  0.036521261  0.038618096 -0.065284206  0.075494924  0.014653267 -0.053659000 
##           43           44           45           46           47           48 
##  0.001812558 -0.016604339  0.111226479  0.111226479 -0.008783332  0.009919062 
##           49           50           51           52           53           54 
##  0.013727069  0.111472856  0.013778092  0.008466560  0.059885828  0.120063580 
##           55           56           57           58           59           60 
##  0.035244281  0.052408927  0.092835312  0.002437234  0.048787475  0.048731203 
##           61           62           63           64           65           66 
##  0.048731203  0.031565322  0.039195560  0.017727166  0.078839232  0.018847926 
##           67           68           69           70           71           72 
##  0.040369311  0.002023536  0.038379365  0.027451448  0.108011165  0.095622841 
##           73           74           75           76           77           78 
##  0.105261089  0.090782325  0.138793575 -0.003717917  0.043390976  0.141030925 
##           79           80           81           82           83           84 
##  0.127508635  0.042298938  0.056221278  0.048515006  0.096331302  0.070269096 
##           85           86           87           88           89           90 
##  0.070890078  0.179289320  0.114798650  0.071427017  0.086500650  0.080897366 
##           91           92           93           94           95           96 
##  0.072776860  0.082401829  0.209440825  0.208894189  0.199225990  0.093350667 
##           97           98           99          100          101          102 
##  0.084954884  0.167231926  0.167231926  0.106791230  0.115170118  0.223709886 
##          103          104          105          106          107          108 
##  0.121637669  0.121176568  0.118568284  0.120382796  0.160710580  0.122231338 
##          109          110          111          112          113          114 
##  0.224154892  0.182759960  0.143380065  0.157119821  0.219910617  0.210941923 
##          115          116          117          118          119          120 
##  0.243772117  0.135518767  0.222219768  0.143656863  0.137845326  0.255446161 
##          121          122          123          124          125          126 
##  0.157951088  0.146845777  0.161221587  0.146231560  0.149535064  0.234759725 
##          127          128          129          130          131          132 
##  0.253635921  0.284569959  0.231710634  0.928254331 -0.423350535 -0.369040853 
##          133          134          135          136          137          138 
## -0.661196522 -0.275773528 -0.307010801 -0.253604805 -0.237123720 -0.181397529 
##          139          140          141          142          143          144 
## -0.179032282 -0.246506827 -0.255032054 -0.155403508 -0.166420854 -0.221449194 
##          145          146          147          148          149          150 
## -0.156261252 -0.180243270 -0.411162345 -0.182127918 -0.159832965 -0.142467791 
##          151          152          153          154          155          156 
## -0.173190803 -0.149382299 -0.112631944 -0.122661730 -0.133796506 -0.126781610 
##          157          158          159          160          161          162 
## -0.105180936 -0.098321535 -0.134836735 -0.082941286 -0.090481954 -0.120564787 
##          163          164          165          166          167          168 
## -0.114113318 -0.151880133 -0.073379463 -0.075564538 -0.105760584 -0.117961703 
##          169          170          171          172          173          174 
## -0.098186236 -0.122276949 -0.172811222 -0.061147348 -0.057586852 -0.118964710 
##          175          176          177          178          179          180 
## -0.084492600 -0.103065407 -0.060900524 -0.058140702 -0.049201278 -0.097536734 
##          181          182          183          184          185          186 
## -0.115281769 -0.080182797 -0.159243637 -0.034210600 -0.049910239 -0.089778361 
##          187          188          189 
## -0.029016966 -0.108830921 -0.046473168
dfbetas(fit) 
##       (Intercept)           AGE           LWT         SMOKE
## 1    0.0160060747  0.0864256263 -1.387811e-01  0.0575249813
## 2    0.0979099409 -0.1198860010 -3.351505e-02  0.0495883262
## 3   -0.0088939901  0.0045793665  7.150718e-03 -0.0127458496
## 4   -0.0057128399  0.0022398909  5.317694e-03 -0.0106035667
## 5   -0.0057755905  0.0045911641  2.972994e-03 -0.0065748705
## 6   -0.0289439230  0.0171016737  5.888033e-03  0.0331851589
## 7   -0.0265161183  0.0072827427  1.403639e-02  0.0304493308
## 8   -0.0445905152  0.0290981334  1.928274e-02  0.0239340188
## 9    0.0088831303 -0.0165788239  5.257476e-03 -0.0177625540
## 10   0.0004875270 -0.0048779425  4.386967e-03 -0.0089766643
## 11  -0.0255492804  0.0104690069  1.714251e-02  0.0143743286
## 12  -0.0167292421  0.0369171503 -3.017182e-02  0.0309215784
## 13  -0.0186179329  0.0009531490  1.798879e-02  0.0133951440
## 14   0.0010956054 -0.0376791397  2.737873e-02  0.0209661978
## 15   0.0177918118 -0.0118100252 -1.156816e-02  0.0176815469
## 16   0.0177918118 -0.0118100252 -1.156816e-02  0.0176815469
## 17  -0.0177582499  0.0127318135  7.211076e-03  0.0080459626
## 18  -0.0001686781  0.0016540231 -1.483045e-03  0.0045247264
## 19  -0.0172355993  0.0110739285  4.576002e-03  0.0156171117
## 20  -0.0021256823  0.0047548912 -2.071636e-03  0.0059151563
## 21   0.0144801977 -0.0432480079  1.580320e-02  0.0190222467
## 22   0.0003384117 -0.0258629791  2.033155e-02  0.0123618840
## 23   0.1756475551 -0.1292281727 -1.255099e-01  0.0403646231
## 24   0.0011151414 -0.0177285051  9.762435e-03  0.0146578431
## 25  -0.0046574631 -0.0055279535  6.274295e-03  0.0120674856
## 26   0.0377182840 -0.0246902455 -3.884585e-02  0.0268530315
## 27   0.0178736623 -0.0231085676 -7.747184e-06  0.0249129333
## 28   0.0207422626 -0.0250713246 -1.165012e-02  0.0192785071
## 29   0.0043723517 -0.0012903729 -4.507792e-03 -0.0048407799
## 30  -0.0026129094  0.0020548841  6.837295e-04  0.0015788692
## 31  -0.0026129094  0.0020548841  6.837295e-04  0.0015788692
## 32   0.0217324685  0.0144418196 -4.394638e-02  0.0404849357
## 33  -0.0168939466  0.0298157634 -8.228504e-03  0.0166775013
## 34   0.0077669138 -0.0030741615 -1.445491e-02  0.0146485749
## 35  -0.0014067662 -0.0022856702  1.695102e-03  0.0053873528
## 36  -0.0116711904  0.0129447453  2.220269e-03  0.0153979358
## 37   0.0057036404 -0.0163813380  9.300644e-03  0.0245926634
## 38  -0.0089292193  0.0173237487 -5.965309e-03  0.0272941533
## 39   0.0525838647 -0.0196805625 -4.991404e-02 -0.0268289634
## 40  -0.0239501201  0.0578672552 -2.777601e-02  0.0365019693
## 41  -0.0052613276 -0.0040842277  1.124646e-02  0.0074485959
## 42   0.0073864435  0.0248051663 -4.328112e-02  0.0157579546
## 43   0.0004973624 -0.0001082106 -2.149949e-05 -0.0011358037
## 44   0.0001185447  0.0061733038 -1.016363e-02  0.0075432669
## 45   0.0715038163 -0.0374153828 -5.686971e-02  0.0599818535
## 46   0.0715038163 -0.0374153828 -5.686971e-02  0.0599818535
## 47   0.0033548757 -0.0069252274  1.085666e-03  0.0031103982
## 48   0.0051689352 -0.0055181953  1.214002e-03 -0.0053409362
## 49   0.0052476681  0.0020122368 -5.395271e-03 -0.0080082721
## 50   0.0519217371  0.0038967108 -7.338145e-02  0.0618228430
## 51   0.0069466827 -0.0021440944 -3.215554e-03 -0.0084303469
## 52   0.0025866158 -0.0004261537 -5.390524e-04 -0.0053111739
## 53   0.0029177732 -0.0071821055  3.520523e-03  0.0460998131
## 54  -0.0093713241  0.0752604715 -6.521492e-02  0.0606911701
## 55   0.0257007120 -0.0169192677 -8.447383e-03 -0.0190302884
## 56   0.0435025275 -0.0350101749 -1.185301e-02 -0.0235906099
## 57   0.0327529642 -0.0143411368 -2.893890e-02  0.0649913075
## 58  -0.0012820902  0.0014866892  6.705890e-04 -0.0009529334
## 59   0.0375865185 -0.0145838943 -2.402059e-02 -0.0254020446
## 60   0.0360800408 -0.0319907248 -4.828496e-03 -0.0236250679
## 61   0.0360800408 -0.0319907248 -4.828496e-03 -0.0236250679
## 62   0.0135795739 -0.0002640427 -9.001036e-03 -0.0192781898
## 63   0.0169459247  0.0062508105 -1.907464e-02 -0.0218878001
## 64  -0.0053246289  0.0094247645  1.671536e-03 -0.0086348518
## 65  -0.0220270212  0.0259133895  2.658806e-03  0.0589836984
## 66  -0.0006836441 -0.0083602448  1.309845e-02 -0.0073423613
## 67   0.0154326277  0.0059176953 -1.586671e-02 -0.0235511618
## 68  -0.0014561022  0.0001160779  1.825160e-03  0.0006708564
## 69   0.0126774086 -0.0183270936  1.158027e-02 -0.0206994972
## 70  -0.0016043741 -0.0073854739  1.614913e-02 -0.0131774134
## 71   0.0285192423 -0.0055132458 -3.240144e-02  0.0780376750
## 72  -0.0609297136  0.0602196195  1.919144e-02  0.0563418008
## 73   0.0165663084  0.0056259942 -2.794000e-02  0.0780293419
## 74   0.0739920802 -0.0616391966 -1.755772e-02 -0.0410433786
## 75   0.0467891156 -0.0901222269  3.058250e-02  0.0787079853
## 76   0.0010178407  0.0015812015 -3.358243e-03  0.0006980907
## 77  -0.0031561820  0.0080581941  8.084954e-03 -0.0252132777
## 78  -0.0741664123  0.1026347534 -6.935697e-03  0.0790955007
## 79   0.0381457288 -0.0393901206 -1.027058e-02  0.0912072003
## 80  -0.0155510262 -0.0077587848  3.551802e-02 -0.0130771807
## 81   0.0195205611 -0.0117979319  2.249993e-03 -0.0347690154
## 82  -0.0330916741  0.0298748841  2.127911e-02 -0.0145751891
## 83  -0.0048869479  0.0665666793 -4.419812e-02 -0.0373398030
## 84   0.0382897139 -0.0311187946 -5.986000e-04 -0.0405912493
## 85   0.0268237828 -0.0018975383 -1.347660e-02 -0.0440808019
## 86   0.0822832433 -0.1004646215 -6.148143e-03  0.1092602638
## 87   0.0920411663 -0.0474330101 -4.837324e-02 -0.0587370084
## 88   0.0195994398 -0.0042642281 -8.472253e-04 -0.0447583363
## 89  -0.0646243145  0.0644226346  3.102263e-02 -0.0196140349
## 90   0.0354326301 -0.0146913086 -9.060464e-03 -0.0502068711
## 91   0.0095105881  0.0055365983  2.853041e-03 -0.0448880146
## 92   0.0310083220 -0.0291040873  1.120359e-02 -0.0488144910
## 93  -0.0863864569 -0.0713128840  1.890503e-01  0.0688574577
## 94   0.0435988201  0.0612320637 -1.214828e-01  0.1258895121
## 95   0.0671616672 -0.1293625437  4.389849e-02  0.1129784021
## 96  -0.0274218685  0.0589076695 -3.066624e-03 -0.0424747887
## 97  -0.0422688169  0.0451839737  2.797504e-02 -0.0353910767
## 98   0.0088370734 -0.0758279432  6.651468e-02  0.1078042285
## 99   0.0088370734 -0.0758279432  6.651468e-02  0.1078042285
## 100 -0.0389556321  0.0734424794 -1.328193e-03 -0.0446016772
## 101  0.0497929899  0.0183670438 -5.604788e-02 -0.0643139295
## 102 -0.0547072463 -0.0916388437  1.677271e-01  0.1078226523
## 103  0.0525891903  0.0193984728 -5.919533e-02 -0.0679255752
## 104  0.0633044221  0.0035294892 -5.684797e-02 -0.0691101774
## 105  0.0752825805 -0.0483693026 -1.998731e-02 -0.0682132630
## 106 -0.0673624843 -0.0110190302  1.131315e-01 -0.0204575935
## 107 -0.0115789275  0.1117540258 -6.943183e-02 -0.0630294613
## 108 -0.0209482988 -0.0329506156  8.675457e-02 -0.0502247574
## 109  0.0935981514 -0.1091846989 -1.225596e-02  0.1448999594
## 110  0.0296860872 -0.1234745533  1.158357e-01 -0.0598481700
## 111 -0.1053282214  0.0734571157  8.573394e-02 -0.0373270608
## 112  0.1114086768 -0.0931419502 -1.683026e-02 -0.0810885816
## 113 -0.0873462288  0.1287021769 -1.625402e-02  0.1438280987
## 114 -0.0549283206  0.1722761128 -7.088949e-02 -0.0670643749
## 115 -0.0528473026 -0.0880447386  1.615304e-01  0.1375130779
## 116 -0.0301655006  0.0754549565 -4.690689e-03 -0.0669117275
## 117  0.1344538870 -0.1819684315  4.559006e-02 -0.0853929956
## 118 -0.0242552825  0.0816506317 -1.723771e-02 -0.0709125733
## 119  0.0309388933  0.0367214427 -4.167928e-02 -0.0801626636
## 120  0.2318280312 -0.1462544496 -1.133372e-01 -0.1080034988
## 121  0.0135782958 -0.0735387578  9.153771e-02 -0.0709477786
## 122 -0.0565399075  0.0290143318  7.945047e-02 -0.0672364974
## 123  0.1017154370 -0.0418895027 -5.026522e-02 -0.0942226567
## 124  0.0553572594 -0.0293804238 -1.589143e-03 -0.0907756167
## 125  0.0120454915  0.0349574850 -9.518093e-03 -0.0896530263
## 126 -0.0497782473  0.1767660428 -7.238840e-02 -0.0882432508
## 127 -0.1824500911  0.1876677899  8.528654e-02 -0.0630457093
## 128  0.1047827980 -0.1143723014 -2.183830e-02  0.1940161375
## 129  0.0859915889  0.0332842029 -8.633538e-02 -0.1362181611
## 130 -0.4671330923  0.8862767577 -2.119728e-01 -0.1385824393
## 131  0.1009228736 -0.2257521225  9.835685e-02 -0.2808390427
## 132  0.0850820055 -0.2375172592  4.875908e-02  0.1678853509
## 133  0.5507403159 -0.3824014950 -3.407104e-01 -0.3002492472
## 134 -0.1057714987 -0.0785647838  1.577439e-01  0.1411731297
## 135 -0.1524173880 -0.0858638371  2.361907e-01  0.1260701892
## 136  0.0902407800 -0.0946731584 -8.481423e-02  0.1239556865
## 137 -0.1427882305 -0.0166879676  1.555175e-01  0.1178261159
## 138 -0.0379276269 -0.0179340722  1.633140e-02  0.1123780373
## 139 -0.0262461840 -0.0144576821 -2.382096e-03  0.1106713699
## 140  0.0602830311  0.0700439240 -1.528708e-01 -0.1504466827
## 141  0.0635654069 -0.1857481068  1.069404e-01 -0.1264879941
## 142 -0.0927773240  0.0402221874  8.238770e-02 -0.0874417872
## 143 -0.0475855880 -0.0459501104  6.752396e-02  0.0935097490
## 144 -0.1403257068  0.1645720434 -2.602789e-02  0.0987829713
## 145 -0.0362638219 -0.0458290741  9.577572e-02 -0.0911890921
## 146  0.0078978273  0.0666537619 -7.939347e-02 -0.1195302035
## 147  0.1236788904  0.1313621675 -3.571786e-01  0.1096530663
## 148  0.0675317826 -0.0042882021 -8.577943e-02 -0.1281922643
## 149 -0.1144746168  0.0299939161  8.442331e-02  0.0849475598
## 150 -0.0815497289  0.0618206842  7.816488e-03  0.0824194217
## 151 -0.0832032151 -0.0488244094  1.283004e-01  0.0743426076
## 152 -0.1219569855  0.0585976597  6.995140e-02  0.0747855482
## 153 -0.0506944303  0.0412999695  2.506045e-02 -0.0744758035
## 154  0.0103708790 -0.0454623302  3.310259e-02 -0.0870489435
## 155 -0.0068109258 -0.0069922895 -2.227796e-02  0.0808050055
## 156 -0.0445992014  0.0742574049 -1.712173e-02 -0.0773882870
## 157 -0.0326696809  0.0320670692  1.051935e-02 -0.0750270089
## 158 -0.0047904480  0.0117916987 -5.780053e-03 -0.0756874297
## 159  0.0129511451 -0.0641300360  1.412024e-02  0.0718197725
## 160 -0.0491922311  0.0102755350  5.509754e-02 -0.0426457296
## 161 -0.0499388919  0.0469996897  1.816423e-02 -0.0534926634
## 162 -0.0462419952 -0.0343475548  6.896371e-02  0.0617191518
## 163 -0.0839840116  0.0386599184  4.475680e-02  0.0622540278
## 164 -0.0504991630  0.0962422627 -6.354910e-02  0.0687170803
## 165 -0.0377281481  0.0325528540  1.677467e-02 -0.0457738840
## 166 -0.0226060327  0.0233434878  6.086578e-03 -0.0540514762
## 167 -0.0771299811  0.0180498212  6.038900e-02  0.0545795484
## 168 -0.0429100138 -0.0446622177  7.954214e-02  0.0524196004
## 169  0.0181130533 -0.0672708756  4.528569e-02 -0.0500721918
## 170 -0.1022177090  0.0877355019  2.387310e-02  0.0519368859
## 171  0.0823416649  0.0232218356 -1.339762e-01 -0.0899651830
## 172 -0.0175840331  0.0191306426  3.729478e-03 -0.0438440129
## 173 -0.0109123392 -0.0108865350  2.582083e-02 -0.0391074216
## 174 -0.0948284579  0.0881560184  1.389432e-02  0.0508143721
## 175 -0.0353054931  0.0010895052  2.214137e-02  0.0518788544
## 176  0.0566303491 -0.0627533986 -1.083136e-02 -0.0645476221
## 177 -0.0029672115  0.0073037980 -3.580175e-03 -0.0468809210
## 178 -0.0266831596  0.0325790942  1.993746e-03 -0.0354313824
## 179 -0.0104397107 -0.0034066076  1.746383e-02 -0.0351895648
## 180 -0.0712168720  0.0644657374  7.846217e-03  0.0472984439
## 181  0.0380587539 -0.0269379012 -5.134276e-02  0.0567505493
## 182 -0.0609698982  0.0250292850  3.694969e-02  0.0424469877
## 183  0.1004684778 -0.0134877031 -1.202743e-01 -0.0824274772
## 184 -0.0235332423  0.0222017804  8.504356e-03 -0.0163239061
## 185 -0.0011619578 -0.0258462684  2.824756e-02 -0.0273855606
## 186 -0.0805572980  0.0634071912  2.748089e-02  0.0352292255
## 187 -0.0101771452 -0.0031569306  1.685521e-02 -0.0177792076
## 188 -0.0494435460  0.0760852839 -3.451561e-02  0.0486355261
## 189 -0.0056953382  0.0108955013 -3.645666e-03 -0.0348291929
influence.measures(fit)  #一起看
## Influence measures of
##   lm(formula = BWT ~ AGE + LWT + SMOKE) :
## 
##        dfb.1_   dfb.AGE   dfb.LWT  dfb.SMOK    dffit cov.r   cook.d     hat inf
## 1    0.016006  0.086426 -1.39e-01  0.057525 -0.17991 1.031 8.09e-03 0.03065    
## 2    0.097910 -0.119886 -3.35e-02  0.049588 -0.16020 1.030 6.42e-03 0.02729    
## 3   -0.008894  0.004579  7.15e-03 -0.012746 -0.01942 1.040 9.48e-05 0.01754    
## 4   -0.005713  0.002240  5.32e-03 -0.010604 -0.01537 1.038 5.94e-05 0.01617    
## 5   -0.005776  0.004591  2.97e-03 -0.006575 -0.01071 1.042 2.89e-05 0.01967    
## 6   -0.028944  0.017102  5.89e-03  0.033185 -0.05499 1.025 7.59e-04 0.00993    
## 7   -0.026516  0.007283  1.40e-02  0.030449 -0.05025 1.026 6.34e-04 0.00985    
## 8   -0.044591  0.029098  1.93e-02  0.023934 -0.05176 1.039 6.73e-04 0.01920    
## 9    0.008883 -0.016579  5.26e-03 -0.017763 -0.02777 1.043 1.94e-04 0.02123    
## 10   0.000488 -0.004878  4.39e-03 -0.008977 -0.01299 1.040 4.24e-05 0.01725    
## 11  -0.025549  0.010469  1.71e-02  0.014374 -0.03041 1.040 2.32e-04 0.01820    
## 12  -0.016729  0.036917 -3.02e-02  0.030922 -0.06587 1.032 1.09e-03 0.01571    
## 13  -0.018618  0.000953  1.80e-02  0.013395 -0.02729 1.038 1.87e-04 0.01606    
## 14   0.001096 -0.037679  2.74e-02  0.020966 -0.05512 1.042 7.63e-04 0.02244    
## 15   0.017792 -0.011810 -1.16e-02  0.017682  0.03019 1.043 2.29e-04 0.02128    
## 16   0.017792 -0.011810 -1.16e-02  0.017682  0.03019 1.043 2.29e-04 0.02128    
## 17  -0.017758  0.012732  7.21e-03  0.008046 -0.01965 1.048 9.70e-05 0.02563    
## 18  -0.000169  0.001654 -1.48e-03  0.004525  0.00615 1.038 9.51e-06 0.01520    
## 19  -0.017236  0.011074  4.58e-03  0.015617 -0.02715 1.032 1.85e-04 0.01124    
## 20  -0.002126  0.004755 -2.07e-03  0.005915  0.00892 1.042 2.00e-05 0.01941    
## 21   0.014480 -0.043248  1.58e-02  0.019022 -0.05468 1.045 7.51e-04 0.02471    
## 22   0.000338 -0.025863  2.03e-02  0.012362 -0.03649 1.051 3.35e-04 0.02840    
## 23   0.175648 -0.129228 -1.26e-01  0.040365 -0.21536 1.068 1.16e-02 0.05862   *
## 24   0.001115 -0.017729  9.76e-03  0.014658 -0.03061 1.035 2.36e-04 0.01410    
## 25  -0.004657 -0.005528  6.27e-03  0.012067 -0.02075 1.031 1.08e-04 0.01009    
## 26   0.037718 -0.024690 -3.88e-02  0.026853 -0.06931 1.035 1.21e-03 0.01847    
## 27   0.017874 -0.023109 -7.75e-06  0.024913  0.04077 1.041 4.18e-04 0.02022    
## 28   0.020742 -0.025071 -1.17e-02  0.019279 -0.04494 1.035 5.07e-04 0.01564    
## 29   0.004372 -0.001290 -4.51e-03 -0.004841 -0.00771 1.046 1.49e-05 0.02322    
## 30  -0.002613  0.002055  6.84e-04  0.001579 -0.00329 1.040 2.71e-06 0.01728    
## 31  -0.002613  0.002055  6.84e-04  0.001579 -0.00329 1.040 2.71e-06 0.01728    
## 32   0.021732  0.014442 -4.39e-02  0.040485  0.06970 1.041 1.22e-03 0.02278    
## 33  -0.016894  0.029816 -8.23e-03  0.016678  0.03612 1.068 3.28e-04 0.04335   *
## 34   0.007767 -0.003074 -1.45e-02  0.014649 -0.02920 1.033 2.14e-04 0.01214    
## 35  -0.001407 -0.002286  1.70e-03  0.005387 -0.00906 1.032 2.06e-05 0.00950    
## 36  -0.011671  0.012945  2.22e-03  0.015398  0.02336 1.043 1.37e-04 0.02066    
## 37   0.005704 -0.016381  9.30e-03  0.024593  0.03652 1.039 3.35e-04 0.01761    
## 38  -0.008929  0.017324 -5.97e-03  0.027294  0.03862 1.038 3.75e-04 0.01704    
## 39   0.052584 -0.019681 -4.99e-02 -0.026829 -0.06528 1.088 1.07e-03 0.06217   *
## 40  -0.023950  0.057867 -2.78e-02  0.036502  0.07549 1.059 1.43e-03 0.03782    
## 41  -0.005261 -0.004084  1.12e-02  0.007449  0.01465 1.058 5.40e-05 0.03463    
## 42   0.007386  0.024805 -4.33e-02  0.015758 -0.05366 1.058 7.23e-04 0.03553    
## 43   0.000497 -0.000108 -2.15e-05 -0.001136  0.00181 1.031 8.26e-07 0.00873    
## 44   0.000119  0.006173 -1.02e-02  0.007543 -0.01660 1.038 6.93e-05 0.01569    
## 45   0.071504 -0.037415 -5.69e-02  0.059982  0.11123 1.037 3.10e-03 0.02458    
## 46   0.071504 -0.037415 -5.69e-02  0.059982  0.11123 1.037 3.10e-03 0.02458    
## 47   0.003355 -0.006925  1.09e-03  0.003110 -0.00878 1.046 1.94e-05 0.02300    
## 48   0.005169 -0.005518  1.21e-03 -0.005341  0.00992 1.035 2.47e-05 0.01260    
## 49   0.005248  0.002012 -5.40e-03 -0.008008  0.01373 1.032 4.74e-05 0.01036    
## 50   0.051922  0.003897 -7.34e-02  0.061823  0.11147 1.036 3.11e-03 0.02415    
## 51   0.006947 -0.002144 -3.22e-03 -0.008430  0.01378 1.031 4.77e-05 0.00960    
## 52   0.002587 -0.000426 -5.39e-04 -0.005311  0.00847 1.031 1.80e-05 0.00877    
## 53   0.002918 -0.007182  3.52e-03  0.046100  0.05989 1.030 9.00e-04 0.01373    
## 54  -0.009371  0.075260 -6.52e-02  0.060691  0.12006 1.047 3.61e-03 0.03257    
## 55   0.025701 -0.016919 -8.45e-03 -0.019030  0.03524 1.033 3.12e-04 0.01317    
## 56   0.043503 -0.035010 -1.19e-02 -0.023591  0.05241 1.040 6.90e-04 0.02020    
## 57   0.032753 -0.014341 -2.89e-02  0.064991  0.09284 1.026 2.16e-03 0.01576    
## 58  -0.001282  0.001487  6.71e-04 -0.000953  0.00244 1.041 1.49e-06 0.01828    
## 59   0.037587 -0.014584 -2.40e-02 -0.025402  0.04879 1.033 5.98e-04 0.01442    
## 60   0.036080 -0.031991 -4.83e-03 -0.023625  0.04873 1.036 5.96e-04 0.01670    
## 61   0.036080 -0.031991 -4.83e-03 -0.023625  0.04873 1.036 5.96e-04 0.01670    
## 62   0.013580 -0.000264 -9.00e-03 -0.019278  0.03157 1.029 2.50e-04 0.00950    
## 63   0.016946  0.006251 -1.91e-02 -0.021888  0.03920 1.031 3.86e-04 0.01148    
## 64  -0.005325  0.009425  1.67e-03 -0.008635  0.01773 1.035 7.90e-05 0.01278    
## 65  -0.022027  0.025913  2.66e-03  0.058984  0.07884 1.029 1.56e-03 0.01530    
## 66  -0.000684 -0.008360  1.31e-02 -0.007342  0.01885 1.044 8.93e-05 0.02113    
## 67   0.015433  0.005918 -1.59e-02 -0.023551  0.04037 1.029 4.09e-04 0.01036    
## 68  -0.001456  0.000116  1.83e-03  0.000671  0.00202 1.134 1.03e-06 0.09848   *
## 69   0.012677 -0.018327  1.16e-02 -0.020699  0.03838 1.032 3.70e-04 0.01202    
## 70  -0.001604 -0.007385  1.61e-02 -0.013177  0.02745 1.035 1.89e-04 0.01389    
## 71   0.028519 -0.005513 -3.24e-02  0.078038  0.10801 1.020 2.92e-03 0.01504    
## 72  -0.060930  0.060220  1.92e-02  0.056342  0.09562 1.043 2.29e-03 0.02691    
## 73   0.016566  0.005626 -2.79e-02  0.078029  0.10526 1.020 2.77e-03 0.01454    
## 74   0.073992 -0.061639 -1.76e-02 -0.041043  0.09078 1.034 2.07e-03 0.01991    
## 75   0.046789 -0.090122  3.06e-02  0.078708  0.13879 1.029 4.82e-03 0.02383    
## 76   0.001018  0.001581 -3.36e-03  0.000698 -0.00372 1.108 3.47e-06 0.07758   *
## 77  -0.003156  0.008058  8.08e-03 -0.025213  0.04339 1.027 4.73e-04 0.00950    
## 78  -0.074166  0.102635 -6.94e-03  0.079096  0.14103 1.038 4.98e-03 0.02915    
## 79   0.038146 -0.039390 -1.03e-02  0.091207  0.12751 1.014 4.06e-03 0.01526    
## 80  -0.015551 -0.007759  3.55e-02 -0.013077  0.04230 1.052 4.50e-04 0.02960    
## 81   0.019521 -0.011798  2.25e-03 -0.034769  0.05622 1.024 7.93e-04 0.00910    
## 82  -0.033092  0.029875  2.13e-02 -0.014575  0.04852 1.050 5.91e-04 0.02805    
## 83  -0.004887  0.066567 -4.42e-02 -0.037340  0.09633 1.035 2.33e-03 0.02141    
## 84   0.038290 -0.031119 -5.99e-04 -0.040591  0.07027 1.023 1.24e-03 0.01092    
## 85   0.026824 -0.001898 -1.35e-02 -0.044081  0.07089 1.019 1.26e-03 0.00906    
## 86   0.082283 -0.100465 -6.15e-03  0.109260  0.17929 1.009 8.01e-03 0.02025    
## 87   0.092041 -0.047433 -4.84e-02 -0.058737  0.11480 1.018 3.30e-03 0.01509    
## 88   0.019599 -0.004264 -8.47e-04 -0.044758  0.07143 1.018 1.28e-03 0.00873    
## 89  -0.064624  0.064423  3.10e-02 -0.019614  0.08650 1.066 1.88e-03 0.04442   *
## 90   0.035433 -0.014691 -9.06e-03 -0.050207  0.08090 1.016 1.64e-03 0.00920    
## 91   0.009511  0.005537  2.85e-03 -0.044888  0.07278 1.018 1.33e-03 0.00877    
## 92   0.031008 -0.029104  1.12e-02 -0.048814  0.08240 1.017 1.70e-03 0.01000    
## 93  -0.086386 -0.071313  1.89e-01  0.068857  0.20944 1.111 1.10e-02 0.08891   *
## 94   0.043599  0.061232 -1.21e-01  0.125890  0.20889 1.001 1.09e-02 0.02163    
## 95   0.067162 -0.129363  4.39e-02  0.112978  0.19923 1.011 9.89e-03 0.02383    
## 96  -0.027422  0.058908 -3.07e-03 -0.042475  0.09335 1.024 2.18e-03 0.01461    
## 97  -0.042269  0.045184  2.80e-02 -0.035391  0.08495 1.029 1.81e-03 0.01637    
## 98   0.008837 -0.075828  6.65e-02  0.107804  0.16723 1.011 6.98e-03 0.01956    
## 99   0.008837 -0.075828  6.65e-02  0.107804  0.16723 1.011 6.98e-03 0.01956    
## 100 -0.038956  0.073442 -1.33e-03 -0.044602  0.10679 1.025 2.86e-03 0.01690    
## 101  0.049793  0.018367 -5.60e-02 -0.064314  0.11517 1.008 3.31e-03 0.01148    
## 102 -0.054707 -0.091639  1.68e-01  0.107823  0.22371 1.033 1.25e-02 0.03761    
## 103  0.052589  0.019398 -5.92e-02 -0.067926  0.12164 1.006 3.69e-03 0.01148    
## 104  0.063304  0.003529 -5.68e-02 -0.069110  0.12118 1.005 3.67e-03 0.01119    
## 105  0.075283 -0.048369 -2.00e-02 -0.068213  0.11857 1.006 3.51e-03 0.01124    
## 106 -0.067362 -0.011019  1.13e-01 -0.020458  0.12038 1.105 3.64e-03 0.07845   *
## 107 -0.011579  0.111754 -6.94e-02 -0.063029  0.16071 1.016 6.45e-03 0.02078    
## 108 -0.020948 -0.032951  8.68e-02 -0.050225  0.12223 1.023 3.74e-03 0.01830    
## 109  0.093598 -0.109185 -1.23e-02  0.144900  0.22415 0.982 1.24e-02 0.01821    
## 110  0.029686 -0.123475  1.16e-01 -0.059848  0.18276 1.033 8.35e-03 0.03203    
## 111 -0.105328  0.073457  8.57e-02 -0.037327  0.14338 1.046 5.15e-03 0.03526    
## 112  0.111409 -0.093142 -1.68e-02 -0.081089  0.15712 1.000 6.15e-03 0.01445    
## 113 -0.087346  0.128702 -1.63e-02  0.143828  0.21991 0.993 1.20e-02 0.02058    
## 114 -0.054928  0.172276 -7.09e-02 -0.067064  0.21094 1.020 1.11e-02 0.02942    
## 115 -0.052847 -0.088045  1.62e-01  0.137513  0.24377 1.003 1.48e-02 0.02703    
## 116 -0.030166  0.075455 -4.69e-03 -0.066912  0.13552 1.003 4.58e-03 0.01268    
## 117  0.134454 -0.181968  4.56e-02 -0.085393  0.22222 1.010 1.23e-02 0.02669    
## 118 -0.024255  0.081651 -1.72e-02 -0.070913  0.14366 1.000 5.14e-03 0.01285    
## 119  0.030939  0.036721 -4.17e-02 -0.080163  0.13785 0.992 4.73e-03 0.01009    
## 120  0.231828 -0.146254 -1.13e-01 -0.108003  0.25545 0.988 1.62e-02 0.02386    
## 121  0.013578 -0.073539  9.15e-02 -0.070948  0.15795 1.006 6.22e-03 0.01645    
## 122 -0.056540  0.029014  7.95e-02 -0.067236  0.14685 1.003 5.38e-03 0.01407    
## 123  0.101715 -0.041890 -5.03e-02 -0.094223  0.16122 0.982 6.45e-03 0.01087    
## 124  0.055357 -0.029380 -1.59e-03 -0.090776  0.14623 0.981 5.31e-03 0.00908    
## 125  0.012045  0.034957 -9.52e-03 -0.089653  0.14954 0.979 5.55e-03 0.00920    
## 126 -0.049778  0.176766 -7.24e-02 -0.088243  0.23476 0.990 1.37e-02 0.02164    
## 127 -0.182450  0.187668  8.53e-02 -0.063046  0.25364 1.026 1.60e-02 0.03847    
## 128  0.104783 -0.114372 -2.18e-02  0.194016  0.28457 0.937 1.98e-02 0.01657    
## 129  0.085992  0.033284 -8.63e-02 -0.136218  0.23171 0.923 1.31e-02 0.01017   *
## 130 -0.467133  0.886277 -2.12e-01 -0.138582  0.92825 0.969 2.08e-01 0.10239   *
## 131  0.100923 -0.225752  9.84e-02 -0.280839 -0.42335 0.860 4.29e-02 0.01941   *
## 132  0.085082 -0.237517  4.88e-02  0.167885 -0.36904 0.856 3.26e-02 0.01485   *
## 133  0.550740 -0.382401 -3.41e-01 -0.300249 -0.66120 0.901 1.05e-01 0.04998   *
## 134 -0.105771 -0.078565  1.58e-01  0.141173 -0.27577 0.921 1.86e-02 0.01362   *
## 135 -0.152417 -0.085864  2.36e-01  0.126070 -0.30701 0.957 2.32e-02 0.02246    
## 136  0.090241 -0.094673 -8.48e-02  0.123956 -0.25360 0.928 1.57e-02 0.01252   *
## 137 -0.142788 -0.016688  1.56e-01  0.117826 -0.23712 0.960 1.39e-02 0.01532    
## 138 -0.037928 -0.017934  1.63e-02  0.112378 -0.18140 0.952 8.11e-03 0.00883    
## 139 -0.026246 -0.014458 -2.38e-03  0.110671 -0.17903 0.954 7.90e-03 0.00876    
## 140  0.060283  0.070044 -1.53e-01 -0.150447 -0.24651 0.990 1.51e-02 0.02313    
## 141  0.063565 -0.185748  1.07e-01 -0.126488 -0.25503 1.019 1.62e-02 0.03528    
## 142 -0.092777  0.040222  8.24e-02 -0.087442 -0.15540 1.023 6.04e-03 0.02291    
## 143 -0.047586 -0.045950  6.75e-02  0.093510 -0.16642 0.979 6.87e-03 0.01097    
## 144 -0.140326  0.164572 -2.60e-02  0.098783 -0.22145 0.988 1.22e-02 0.01943    
## 145 -0.036264 -0.045829  9.58e-02 -0.091189 -0.15626 1.023 6.10e-03 0.02293    
## 146  0.007898  0.066654 -7.94e-02 -0.119530 -0.18024 1.004 8.09e-03 0.01881    
## 147  0.123679  0.131362 -3.57e-01  0.109653 -0.41116 0.976 4.16e-02 0.04000    
## 148  0.067532 -0.004288 -8.58e-02 -0.128192 -0.18213 1.000 8.25e-03 0.01764    
## 149 -0.114475  0.029994  8.44e-02  0.084948 -0.15983 0.995 6.36e-03 0.01361    
## 150 -0.081550  0.061821  7.82e-03  0.082419 -0.14247 0.993 5.05e-03 0.01096    
## 151 -0.083203 -0.048824  1.28e-01  0.074343 -0.17319 1.011 7.48e-03 0.02031    
## 152 -0.121957  0.058598  7.00e-02  0.074786 -0.14938 1.008 5.57e-03 0.01590    
## 153 -0.050694  0.041300  2.51e-02 -0.074476 -0.11263 1.024 3.18e-03 0.01733    
## 154  0.010371 -0.045462  3.31e-02 -0.087049 -0.12266 1.019 3.76e-03 0.01648    
## 155 -0.006811 -0.006992 -2.23e-02  0.080805 -0.13380 0.988 4.45e-03 0.00901    
## 156 -0.044599  0.074257 -1.71e-02 -0.077388 -0.12678 1.026 4.02e-03 0.02060    
## 157 -0.032670  0.032067  1.05e-02 -0.075027 -0.10518 1.022 2.77e-03 0.01531    
## 158 -0.004790  0.011792 -5.78e-03 -0.075687 -0.09832 1.021 2.42e-03 0.01373    
## 159  0.012951 -0.064130  1.41e-02  0.071820 -0.13484 0.998 4.53e-03 0.01124    
## 160 -0.049192  0.010276  5.51e-02 -0.042646 -0.08294 1.045 1.73e-03 0.02714    
## 161 -0.049939  0.047000  1.82e-02 -0.053493 -0.09048 1.035 2.05e-03 0.02107    
## 162 -0.046242 -0.034348  6.90e-02  0.061719 -0.12056 1.013 3.63e-03 0.01362    
## 163 -0.083984  0.038660  4.48e-02  0.062254 -0.11411 1.013 3.26e-03 0.01291    
## 164 -0.050499  0.096242 -6.35e-02  0.068717 -0.15188 1.011 5.76e-03 0.01732    
## 165 -0.037728  0.032553  1.68e-02 -0.045774 -0.07338 1.036 1.35e-03 0.01916    
## 166 -0.022606  0.023343  6.09e-03 -0.054051 -0.07556 1.029 1.43e-03 0.01526    
## 167 -0.077130  0.018050  6.04e-02  0.054580 -0.10576 1.020 2.80e-03 0.01456    
## 168 -0.042910 -0.044662  7.95e-02  0.052420 -0.11796 1.024 3.48e-03 0.01825    
## 169  0.018113 -0.067271  4.53e-02 -0.050072 -0.09819 1.050 2.42e-03 0.03281    
## 170 -0.102218  0.087736  2.39e-02  0.051937 -0.12228 1.032 3.75e-03 0.02305    
## 171  0.082342  0.023222 -1.34e-01 -0.089965 -0.17281 1.038 7.47e-03 0.03387    
## 172 -0.017584  0.019131  3.73e-03 -0.043844 -0.06115 1.032 9.39e-04 0.01522    
## 173 -0.010912 -0.010887  2.58e-02 -0.039107 -0.05759 1.036 8.33e-04 0.01718    
## 174 -0.094828  0.088156  1.39e-02  0.050814 -0.11896 1.032 3.55e-03 0.02248    
## 175 -0.035305  0.001090  2.21e-02  0.051879 -0.08449 1.015 1.79e-03 0.00937    
## 176  0.056630 -0.062753 -1.08e-02 -0.064548 -0.10307 1.036 2.66e-03 0.02322    
## 177 -0.002967  0.007304 -3.58e-03 -0.046881 -0.06090 1.030 9.31e-04 0.01373    
## 178 -0.026683  0.032579  1.99e-03 -0.035431 -0.05814 1.039 8.49e-04 0.02025    
## 179 -0.010440 -0.003407  1.75e-02 -0.035190 -0.04920 1.034 6.08e-04 0.01546    
## 180 -0.071217  0.064466  7.85e-03  0.047298 -0.09754 1.027 2.38e-03 0.01664    
## 181  0.038059 -0.026938 -5.13e-02  0.056751 -0.11528 1.011 3.32e-03 0.01242    
## 182 -0.060970  0.025029  3.69e-02  0.042447 -0.08018 1.026 1.61e-03 0.01387    
## 183  0.100468 -0.013488 -1.20e-01 -0.082427 -0.15924 1.044 6.35e-03 0.03558    
## 184 -0.023533  0.022202  8.50e-03 -0.016324 -0.03421 1.053 2.94e-04 0.03060    
## 185 -0.001162 -0.025846  2.82e-02 -0.027386 -0.04991 1.048 6.26e-04 0.02702    
## 186 -0.080557  0.063407  2.75e-02  0.035229 -0.08978 1.045 2.02e-03 0.02820    
## 187 -0.010177 -0.003157  1.69e-02 -0.017779 -0.02902 1.042 2.12e-04 0.02040    
## 188 -0.049444  0.076085 -3.45e-02  0.048636 -0.10883 1.027 2.97e-03 0.01838    
## 189 -0.005695  0.010896 -3.65e-03 -0.034829 -0.04647 1.033 5.42e-04 0.01432

Distinction between outliers and influential data points

考慮這個點是outliers 還是influential

範例 它同時做了很多假設

Global test of model assumptions

the gvlmapackage, performs a global validation of linear model assumptions as well separate evaluations of skewness, kurtosis, and heteroscedasticity install.packages(“gvlma”)

library(gvlma)
gvmodel<-gvlma(fit)
summary(gvmodel)
## 
## Call:
## lm(formula = BWT ~ AGE + LWT + SMOKE)
## 
## Residuals:
##      Min       1Q   Median       3Q      Max 
## -2069.89  -433.18    13.67   516.45  1813.75 
## 
## Coefficients:
##             Estimate Std. Error t value Pr(>|t|)    
## (Intercept) 2362.720    300.687   7.858 3.11e-13 ***
## AGE            7.093      9.925   0.715   0.4757    
## LWT            4.019      1.720   2.337   0.0205 *  
## SMOKE       -267.213    105.802  -2.526   0.0124 *  
## ---
## Signif. codes:  0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
## 
## Residual standard error: 708.8 on 185 degrees of freedom
## Multiple R-squared:  0.06988,    Adjusted R-squared:  0.05479 
## F-statistic: 4.633 on 3 and 185 DF,  p-value: 0.003781
## 
## 
## ASSESSMENT OF THE LINEAR MODEL ASSUMPTIONS
## USING THE GLOBAL TEST ON 4 DEGREES-OF-FREEDOM:
## Level of Significance =  0.05 
## 
## Call:
##  gvlma(x = fit) 
## 
##                      Value   p-value                   Decision
## Global Stat        31.5604 2.353e-06 Assumptions NOT satisfied!
## Skewness            3.7646 5.235e-02    Assumptions acceptable.
## Kurtosis            0.2277 6.332e-01    Assumptions acceptable.
## Link Function       0.1574 6.916e-01    Assumptions acceptable.
## Heteroscedasticity 27.4107 1.645e-07 Assumptions NOT satisfied!

Variable Selection

Stepwise Regression

install.packages(“MASS”)

library(MASS)
y =c(202,186,187,180,156,169,174,172,153,199,193,174,198,183,178)
x1 = c(18,23,25,35,65,54,34,56,72,19,23,42,18,39,37)
x2 = c(56,45,67,89,65,76,55,66,77,63,53,49,76,62,53)
x3 = c(6,5,7,9,5,6,5,6,7,3,3,9,6,2,3)
fit <-lm(y~x1+x2+x3)
step <-stepAIC(fit, direction="both")      # direction = c("both", "backward", "forward")
## Start:  AIC=48.81
## y ~ x1 + x2 + x3
## 
##        Df Sum of Sq     RSS    AIC
## - x3    1     26.45  254.35 48.460
## <none>               227.89 48.812
## - x2    1     34.09  261.99 48.904
## - x1    1   2504.38 2732.27 84.073
## 
## Step:  AIC=48.46
## y ~ x1 + x2
## 
##        Df Sum of Sq     RSS    AIC
## - x2    1     18.08  272.43 47.490
## <none>               254.35 48.460
## + x3    1     26.45  227.89 48.812
## - x1    1   2582.36 2836.71 82.635
## 
## Step:  AIC=47.49
## y ~ x1
## 
##        Df Sum of Sq     RSS    AIC
## <none>               272.43 47.490
## + x2    1     18.08  254.35 48.460
## + x3    1     10.45  261.99 48.904
## - x1    1   2724.50 2996.93 81.459
#   -的 back +的for   
step$anova # display results
## Stepwise Model Path 
## Analysis of Deviance Table
## 
## Initial Model:
## y ~ x1 + x2 + x3
## 
## Final Model:
## y ~ x1
## 
## 
##   Step Df Deviance Resid. Df Resid. Dev      AIC
## 1                         11   227.8941 48.81246
## 2 - x3  1 26.45498        12   254.3491 48.45986
## 3 - x2  1 18.08209        13   272.4312 47.49004