2a

yo <- ts(USA_data$YO, start = 1960, frequency = 1)
io <- ts(USA_data$IO, start = 1960, frequency = 1)

lnyo <- log(yo)
lnio <- log(io)

ts.plot(lnio, lnyo, ylab = "lnIO and lnYO", col = c("blue", "black"))
legend("topleft", c("lnIO", "lnYO"),
col = c("blue", "black"), lty = c(1, 1), bty = "n")

b)

acf(lnio)

acf(lnyo)

Both are slowly decreasing and exceed the 95% confidence interval for many lags. Thus, there is clear autocorrelation, indicating non-stationarity. It may be weakly dependent as at 20 lags, the correlation becomes insignificant.

c)

dlnio <- diff(lnio)
dlnyo <- diff(lnyo)

acf(dlnio)

acf(dlnyo)

After first difference, autocorrelation is removed. Only lag 0 shows autocorrelation (by definition). Thus, the differenced series are stationary, at least weakly.

3

llnio <- embed(lnio, dimension = 2)
head(llnio)
##          [,1]     [,2]
## [1,] 13.34799 13.31652
## [2,] 13.44161 13.34799
## [3,] 13.49743 13.44161
## [4,] 13.56089 13.49743
## [5,] 13.65956 13.56089
## [6,] 13.74639 13.65956
llnio0 <- llnio[, 1]
llnio1 <- llnio[, 2]
lnioAR1<-lm(llnio0~llnio1)
summary(lnioAR1)
## 
## Call:
## lm(formula = llnio0 ~ llnio1)
## 
## Residuals:
##      Min       1Q   Median       3Q      Max 
## -0.20627 -0.01933  0.01459  0.04117  0.17305 
## 
## Coefficients:
##             Estimate Std. Error t value Pr(>|t|)    
## (Intercept)  0.24327    0.18913   1.286    0.203    
## llnio1       0.98560    0.01304  75.580   <2e-16 ***
## ---
## Signif. codes:  0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
## 
## Residual standard error: 0.06419 on 61 degrees of freedom
## Multiple R-squared:  0.9894, Adjusted R-squared:  0.9893 
## F-statistic:  5712 on 1 and 61 DF,  p-value: < 2.2e-16
plot(llnio1, llnio0, xlab = "lnIO (t-1)", ylab = "lnIO (t)", main = "AR(1) Regression Line: lnIO_(t) vs lnIO_(t-1)", pch = 16, col = "blue")
abline(lnioAR1, col = "red", lwd = 2)

lnio(t) is 1-B1 different from lnio(t-1) B1= 0.98560 and is highly significant with very high R^2, meaning lnio and its lag correlate highly. In addition, 1% change in lnio(t-1) leads to 1% change in lnio(t).

These mean that it is not stationary at least, there is a trend between t and t-1 variables.

4

ldlnio<-embed(dlnio, dimension=2)
ldlnio0 <- ldlnio[, 1]
ldlnio1 <- ldlnio[, 2]
lm(ldlnio0~ldlnio1)
## 
## Call:
## lm(formula = ldlnio0 ~ ldlnio1)
## 
## Coefficients:
## (Intercept)      ldlnio1  
##      0.0310       0.1038

B1 is fairly low, 0,031. This means that there is a correlation but it is very low. Thus, it is much more stationary than the earlier model.

5

Static models are models without a time axis or with a constant time axis.

q<-lm(lnio~lnyo)
plot(q$residuals, type ="l")

acf(q$residuals)

#### The autocrrelation demonstrated in ACF does not make the model biased as first 3 MLR(new ones) are not violated. However, it leads to incorrect standard errors. This means invalid p and t values. To circumvent this problem, lags which will absorb the earlier sessions’ effect on the independent variable’s variance and correct se.

6

Dynamic regression has variables that is based on different time periods, the regression 7 has variables from t and t-1.

llnyo<-embed(lnyo, dimension =2)
llnyo0<-llnyo[, 1]
llnyo1<-llnyo[, 2]
FDL1<-lm(llnio0~llnyo0+llnyo1)
# Since I knew and did not attended the given regression in c, I did the regression with llnio0 which has same amount of observation as lagged ones. However, if lnio0 was used, the regression wouldnt work as there is a mismatch in observations.One observation drops to match the observations of t-1.
# to do the question properly
#FDL1<-lm(lnio~llnyo0+llnyo1) # does not work, obs mismatch.
lnio0 <- lnio[-1] #as lnio0 is already transformed by embed function, its first obs is dropped. Thus this did nothing.
FDL1<-lm(llnio0~llnyo0+llnyo1)
summary(FDL1)
## 
## Call:
## lm(formula = llnio0 ~ llnyo0 + llnyo1)
## 
## Residuals:
##      Min       1Q   Median       3Q      Max 
## -0.11652 -0.03579  0.01002  0.04068  0.11918 
## 
## Coefficients:
##             Estimate Std. Error t value Pr(>|t|)    
## (Intercept)  -4.7245     0.2229 -21.199  < 2e-16 ***
## llnyo0        2.9272     0.3531   8.290 1.57e-11 ***
## llnyo1       -1.7444     0.3488  -5.002 5.25e-06 ***
## ---
## Signif. codes:  0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
## 
## Residual standard error: 0.05378 on 60 degrees of freedom
## Multiple R-squared:  0.9927, Adjusted R-squared:  0.9925 
## F-statistic:  4081 on 2 and 60 DF,  p-value: < 2.2e-16
#The impact multiplier is positive, it has a strong effet of 2.93, but, with one lag, delayed impact is negtive with lagged multiplier of -1.74. Long run multiplier is 1.19.
#P value is extemely small, null is rejected. B1 and B5 are significant

7

#a
py <- YU/YO
pi <- IU/IO
lnpy <- log(py)
lnpi <- log(pi)
llnpi<-embed(pi, dimension=2)
llnpy<-embed(py, dimension=2)
llnpy0<-lnpy[-1]
llnpi0<-llnpi[,1]
llnpi1<-llnpi[,2]
r7 <- lm(llnio0 ~ llnyo0+ llnpi0 + llnpy0 + llnpi1 + llnyo1)
summary(r7)
## 
## Call:
## lm(formula = llnio0 ~ llnyo0 + llnpi0 + llnpy0 + llnpi1 + llnyo1)
## 
## Residuals:
##       Min        1Q    Median        3Q       Max 
## -0.113140 -0.026378  0.004365  0.032633  0.091531 
## 
## Coefficients:
##             Estimate Std. Error t value Pr(>|t|)    
## (Intercept) -8.60575    0.94365  -9.120 9.85e-13 ***
## llnyo0       3.01745    0.31105   9.701 1.13e-13 ***
## llnpi0       0.51101    0.41999   1.217 0.228724    
## llnpy0      -0.29536    0.08217  -3.595 0.000679 ***
## llnpi1      -0.24250    0.41753  -0.581 0.563673    
## llnyo1      -1.62008    0.30695  -5.278 2.11e-06 ***
## ---
## Signif. codes:  0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
## 
## Residual standard error: 0.04672 on 57 degrees of freedom
## Multiple R-squared:  0.9948, Adjusted R-squared:  0.9943 
## F-statistic:  2168 on 5 and 57 DF,  p-value: < 2.2e-16
#B1, B3 and B5 have 3 star, they are significant. Other variables are not. lnpi is not significant while lnpy is.
# b)
# F = ((SSRr - SSRur)/q) / (SSRur/(n - k - 1))
SSRr <- sum(residuals(FDL1)^2)
SSRur <- sum(residuals(r7)^2)
q <- 3
n <- nobs(r7)
k <- 5
f<-((SSRr - SSRur)/q) / (SSRur/(n - k - 1)) #= 7.507215
n-k-1 #= 57=df
## [1] 57
critical_value <- qf(0.95, df1 = q, df2 = n - k - 1)# = 2.7664 
# f stat is bigger than critical value, so it is rejected
library(car)
## Loading required package: carData
linearHypothesis(r7, c("llnpi0=0", "llnpy0=0", "llnpi1=0"), test = "F")
## 
## Linear hypothesis test:
## llnpi0 = 0
## llnpy0 = 0
## llnpi1 = 0
## 
## Model 1: restricted model
## Model 2: llnio0 ~ llnyo0 + llnpi0 + llnpy0 + llnpi1 + llnyo1
## 
##   Res.Df     RSS Df Sum of Sq      F    Pr(>F)    
## 1     60 0.17356                                  
## 2     57 0.12441  3  0.049155 7.5072 0.0002557 ***
## ---
## Signif. codes:  0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
# same F value came, still jointly significant
#e)
pir <- pi / py
lnpir <- log(pir)
# H0: B2 + B3 = 0
#f)
linearHypothesis(r7, c("llnpi0 + llnpy0 = 0", "llnpi1 = 0"), test = "F") # p value is 0.8443, price homogenity is not rejected.
## 
## Linear hypothesis test:
## llnpi0  + llnpy0 = 0
## llnpi1 = 0
## 
## Model 1: restricted model
## Model 2: llnio0 ~ llnyo0 + llnpi0 + llnpy0 + llnpi1 + llnyo1
## 
##   Res.Df     RSS Df  Sum of Sq      F Pr(>F)
## 1     59 0.12515                            
## 2     57 0.12441  2 0.00074109 0.1698 0.8443
#g)
# H0: B2 + B3 = 0, B4 = 0
linearHypothesis(r7, c("llnpi0 + llnpy0 + llnpi1 = 0"), test = "F") # p is 0.8556, we cannot reject weak price homogeneity
## 
## Linear hypothesis test:
## llnpi0  + llnpy0  + llnpi1 = 0
## 
## Model 1: restricted model
## Model 2: llnio0 ~ llnyo0 + llnpi0 + llnpy0 + llnpi1 + llnyo1
## 
##   Res.Df     RSS Df  Sum of Sq      F Pr(>F)
## 1     58 0.12448                            
## 2     57 0.12441  1 7.2961e-05 0.0334 0.8556
#i)
#H0: B2 = -1, B3 = B1 - 1, B4 = 0, B5 = 0
linearHypothesis(r7, c("llnpi0 = -1", "llnpy0 = llnyo0 - 1", "llnpi1 = 0", "llnyo1 = 0"), test = "F") # P is ver low, we reject the  simplification to nominal relation
## 
## Linear hypothesis test:
## llnpi0 = - 1
## - llnyo0  + llnpy0 = - 1
## llnpi1 = 0
## llnyo1 = 0
## 
## Model 1: restricted model
## Model 2: llnio0 ~ llnyo0 + llnpi0 + llnpy0 + llnpi1 + llnyo1
## 
##   Res.Df     RSS Df Sum of Sq      F    Pr(>F)    
## 1     61 0.31690                                  
## 2     57 0.12441  4    0.1925 22.049 4.891e-11 ***
## ---
## Signif. codes:  0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1

8

# it is. dynamic as it is not contemprenous, meaning it has more than one time period in its variables. 
# In FDL any lagged regressand is not used as variable , but in ARDL it is used as an variable. ARDL includes the effect of investments past on investment in addition to GDP and past GDP.
ardl11 <- lm(llnio0 ~ llnyo0 + llnyo1 + llnio1)
summary(ardl11)
## 
## Call:
## lm(formula = llnio0 ~ llnyo0 + llnyo1 + llnio1)
## 
## Residuals:
##       Min        1Q    Median        3Q       Max 
## -0.071637 -0.012508  0.003956  0.012111  0.080946 
## 
## Coefficients:
##             Estimate Std. Error t value Pr(>|t|)    
## (Intercept)  -1.5464     0.2457  -6.294 4.18e-08 ***
## llnyo0        2.9219     0.1681  17.382  < 2e-16 ***
## llnyo1       -2.5986     0.1764 -14.731  < 2e-16 ***
## llnio1        0.7416     0.0517  14.342  < 2e-16 ***
## ---
## Signif. codes:  0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
## 
## Residual standard error: 0.02561 on 59 degrees of freedom
## Multiple R-squared:  0.9984, Adjusted R-squared:  0.9983 
## F-statistic: 1.207e+04 on 3 and 59 DF,  p-value: < 2.2e-16
# impact multiplier is 2.9219.
# Long run multiplier is found when we assume t going to inifnity. Thus, it is (B1+B2)/(1-B3)
B <- coef(ardl11)
(B["llnyo0"] + B["llnyo1"]) / (1 - B["llnio1"]) #= 1.251296 
##   llnyo0 
## 1.251296