library(fpp2)
## Loading required package: ggplot2
## Warning: package 'ggplot2' was built under R version 3.6.2
## Loading required package: forecast
## Warning: package 'forecast' was built under R version 3.6.2
## Registered S3 method overwritten by 'xts':
##   method     from
##   as.zoo.xts zoo
## Registered S3 method overwritten by 'quantmod':
##   method            from
##   as.zoo.data.frame zoo
## Loading required package: fma
## Loading required package: expsmooth
#This week I choose the same dataset as last week - uschange - consumption. It shows quarterly percentage changes in US consumption expenditure.
consum.ts<-ts(uschange[,'Consumption'],start=c(1970,1),frequency=4)
str(consum.ts)
##  Time-Series [1:187] from 1970 to 2016: 0.616 0.46 0.877 -0.274 1.897 ...
autoplot(consum.ts)

# We can see the variance is getting smaller overtime, which we call Heteroskedasticity. So, it is suitable to use GARCH model to forecast.
# Now let's begin at arima model.
arima.consum<-auto.arima(consum.ts)
checkresiduals(arima.consum)

## 
##  Ljung-Box test
## 
## data:  Residuals from ARIMA(1,0,3)(1,0,1)[4] with non-zero mean
## Q* = 2.6139, df = 3, p-value = 0.4551
## 
## Model df: 7.   Total lags used: 10
summary(arima.consum)
## Series: consum.ts 
## ARIMA(1,0,3)(1,0,1)[4] with non-zero mean 
## 
## Coefficients:
##           ar1     ma1     ma2     ma3     sar1    sma1    mean
##       -0.3548  0.5958  0.3437  0.4111  -0.1376  0.3834  0.7460
## s.e.   0.1592  0.1496  0.0960  0.0825   0.2117  0.1780  0.0886
## 
## sigma^2 estimated as 0.3481:  log likelihood=-163.34
## AIC=342.67   AICc=343.48   BIC=368.52
## 
## Training set error measures:
##                        ME      RMSE      MAE      MPE     MAPE      MASE
## Training set 0.0008112515 0.5788296 0.432668 65.97513 187.9184 0.6779023
##                      ACF1
## Training set -0.007489122
# Next, build the ets model
ets.consum<-ets(consum.ts,model='ZZZ')
checkresiduals(ets.consum)

## 
##  Ljung-Box test
## 
## data:  Residuals from ETS(A,N,N)
## Q* = 16.27, df = 6, p-value = 0.01237
## 
## Model df: 2.   Total lags used: 8
summary(ets.consum)
## ETS(A,N,N) 
## 
## Call:
##  ets(y = consum.ts, model = "ZZZ") 
## 
##   Smoothing parameters:
##     alpha = 0.3298 
## 
##   Initial states:
##     l = 0.6889 
## 
##   sigma:  0.6215
## 
##      AIC     AICc      BIC 
## 804.3353 804.4665 814.0287 
## 
## Training set error measures:
##                        ME      RMSE       MAE      MPE     MAPE      MASE
## Training set 0.0007327055 0.6181848 0.4517208 14.72506 157.9753 0.7077542
##                      ACF1
## Training set -0.003046691
# Final, let's use garch model.
library(fGarch)
## Loading required package: timeDate
## Loading required package: timeSeries
## Loading required package: fBasics
library(rugarch)
## Warning: package 'rugarch' was built under R version 3.6.2
## Loading required package: parallel
## 
## Attaching package: 'rugarch'
## The following object is masked from 'package:stats':
## 
##     sigma
library(tseries)
garch.consum <- garch(consum.ts)
## 
##  ***** ESTIMATION WITH ANALYTICAL GRADIENT ***** 
## 
## 
##      I     INITIAL X(I)        D(I)
## 
##      1     3.865337e-01     1.000e+00
##      2     5.000000e-02     1.000e+00
##      3     5.000000e-02     1.000e+00
## 
##     IT   NF      F         RELDF    PRELDF    RELDX   STPPAR   D*STEP   NPRELDF
##      0    1  1.253e+02
##      1    2  1.243e+02  7.91e-03  2.90e+00  4.7e-01  3.7e+02  1.0e+00  5.29e+02
##      2    4  1.193e+02  4.05e-02  3.98e-02  2.2e-02  8.1e+00  5.0e-02  1.60e+01
##      3    6  1.107e+02  7.18e-02  7.11e-02  4.4e-02  2.1e+00  1.0e-01  3.46e+00
##      4    8  9.877e+01  1.08e-01  1.05e-01  8.9e-02  2.0e+00  2.0e-01  2.70e+00
##      5   10  9.712e+01  1.67e-02  1.67e-02  1.6e-02  8.7e+00  4.0e-02  1.30e+00
##      6   12  9.682e+01  3.09e-03  3.10e-03  3.1e-03  3.4e+01  8.0e-03  8.22e-01
##      7   14  9.625e+01  5.91e-03  5.91e-03  6.1e-03  4.9e+00  1.6e-02  7.66e-01
##      8   16  9.522e+01  1.07e-02  1.07e-02  1.2e-02  2.8e+00  3.2e-02  7.37e-01
##      9   19  9.520e+01  1.99e-04  1.99e-04  2.2e-04  3.2e+02  6.4e-04  6.88e-01
##     10   21  9.520e+01  3.98e-05  3.98e-05  4.5e-05  1.6e+03  1.3e-04  6.67e-01
##     11   23  9.520e+01  7.95e-06  7.95e-06  9.0e-06  7.8e+03  2.6e-05  6.67e-01
##     12   25  9.520e+01  1.59e-05  1.59e-05  1.8e-05  9.8e+02  5.1e-05  6.67e-01
##     13   28  9.520e+01  3.18e-07  3.18e-07  3.6e-07  1.9e+05  1.0e-06  6.67e-01
##     14   30  9.520e+01  6.36e-07  6.36e-07  7.2e-07  2.4e+04  2.0e-06  6.67e-01
##     15   32  9.520e+01  1.27e-07  1.27e-07  1.4e-07  4.9e+05  4.1e-07  6.67e-01
##     16   34  9.519e+01  2.54e-07  2.54e-07  2.9e-07  6.1e+04  8.2e-07  6.67e-01
##     17   36  9.519e+01  5.09e-08  5.09e-08  5.7e-08  1.2e+06  1.6e-07  6.67e-01
##     18   38  9.519e+01  1.02e-08  1.02e-08  1.1e-08  6.1e+06  3.3e-08  6.67e-01
##     19   40  9.519e+01  2.04e-08  2.04e-08  2.3e-08  7.6e+05  6.6e-08  6.67e-01
##     20   42  9.519e+01  4.07e-08  4.07e-08  4.6e-08  3.8e+05  1.3e-07  6.67e-01
##     21   44  9.519e+01  8.14e-09  8.14e-09  9.2e-09  7.6e+06  2.6e-08  6.67e-01
##     22   46  9.519e+01  1.63e-09  1.63e-09  1.8e-09  3.8e+07  5.2e-09  6.67e-01
##     23   48  9.519e+01  3.26e-09  3.26e-09  3.7e-09  4.8e+06  1.0e-08  6.67e-01
##     24   50  9.519e+01  6.51e-09  6.51e-09  7.4e-09  2.4e+06  2.1e-08  6.67e-01
##     25   53  9.519e+01  1.30e-10  1.30e-10  1.5e-10  4.8e+08  4.2e-10  6.67e-01
##     26   55  9.519e+01  2.61e-10  2.61e-10  2.9e-10  5.9e+07  8.4e-10  6.67e-01
##     27   57  9.519e+01  5.21e-11  5.21e-11  5.9e-11  1.6e+00  1.7e-10 -1.50e-01
##     28   59  9.519e+01  1.04e-11  1.04e-11  1.2e-11  1.6e+00  3.4e-11 -1.50e-01
##     29   61  9.519e+01  2.08e-11  2.08e-11  2.4e-11  1.6e+00  6.7e-11 -1.50e-01
##     30   63  9.519e+01  4.17e-12  4.17e-12  4.7e-12  1.6e+00  1.3e-11 -1.50e-01
##     31   65  9.519e+01  8.34e-12  8.34e-12  9.4e-12  1.6e+00  2.7e-11 -1.50e-01
##     32   67  9.519e+01  1.67e-12  1.67e-12  1.9e-12  1.6e+00  5.4e-12 -1.50e-01
##     33   69  9.519e+01  3.34e-12  3.34e-12  3.8e-12  1.6e+00  1.1e-11 -1.50e-01
##     34   71  9.519e+01  6.67e-13  6.67e-13  7.5e-13  1.6e+00  2.1e-12 -1.50e-01
##     35   73  9.519e+01  1.33e-12  1.33e-12  1.5e-12  1.6e+00  4.3e-12 -1.50e-01
##     36   75  9.519e+01  2.67e-13  2.67e-13  3.0e-13  1.6e+00  8.6e-13 -1.50e-01
##     37   77  9.519e+01  5.25e-14  5.34e-14  6.0e-14  1.6e+00  1.7e-13 -1.50e-01
##     38   79  9.519e+01  1.07e-13  1.07e-13  1.2e-13  1.6e+00  3.4e-13 -1.50e-01
##     39   81  9.519e+01  2.18e-14  2.13e-14  2.4e-14  1.6e+00  6.9e-14 -1.50e-01
##     40   83  9.519e+01  4.03e-15  4.27e-15  4.8e-15  1.6e+00  1.4e-14 -1.50e-01
##     41   85  9.519e+01  8.51e-15  8.54e-15  9.6e-15  1.6e+00  2.7e-14 -1.50e-01
##     42   87  9.519e+01  1.73e-14  1.71e-14  1.9e-14  1.6e+00  5.5e-14 -1.50e-01
##     43   89  9.519e+01 -1.05e+08  3.42e-15  3.9e-15  1.6e+00  1.1e-14 -1.50e-01
## 
##  ***** FALSE CONVERGENCE *****
## 
##  FUNCTION     9.519496e+01   RELDX        3.854e-15
##  FUNC. EVALS      89         GRAD. EVALS      43
##  PRELDF       3.416e-15      NPRELDF     -1.501e-01
## 
##      I      FINAL X(I)        D(I)          G(I)
## 
##      1    8.563705e-01     1.000e+00     1.642e+01
##      2    5.728615e-01     1.000e+00     1.701e+01
##      3    3.436608e-15     1.000e+00     1.775e+01
checkresiduals(garch.consum)
## Warning in modeldf.default(object): Could not find appropriate degrees of
## freedom for this model.

# It seems that the residuals are not like white noise.
summary(garch.consum)
## 
## Call:
## garch(x = consum.ts)
## 
## Model:
## GARCH(1,1)
## 
## Residuals:
##     Min      1Q  Median      3Q     Max 
## -2.4382  0.3850  0.6175  0.9521  2.0642 
## 
## Coefficient(s):
##     Estimate  Std. Error  t value Pr(>|t|)  
## a0 8.564e-01   4.664e-01    1.836   0.0663 .
## a1 5.729e-01   3.799e-01    1.508   0.1316  
## b1 3.437e-15   3.609e-01    0.000   1.0000  
## ---
## Signif. codes:  0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
## 
## Diagnostic Tests:
##  Jarque Bera Test
## 
## data:  Residuals
## X-squared = 188.7, df = 2, p-value < 2.2e-16
## 
## 
##  Box-Ljung test
## 
## data:  Squared.Residuals
## X-squared = 6.2778, df = 1, p-value = 0.01223
#In order to test our forecasting, I split the whole dataset into train and test data, and train our model. 
consum.ts.train<-ts(consum.ts[1:(length(consum.ts)-6)],end=c(2014,4),frequency=4)
consum.ts.test<-ts(consum.ts[(length(consum.ts)-6):length(consum.ts)],start=c(2014,4),frequency=4)
# train by using arima model:
train_arima<- auto.arima(consum.ts.train)
# train by using ets model:
train_ets<-ets(consum.ts.train,model='ZZZ')
# train by using garch model:
garch.consum.spec1 <- ugarchspec(variance.model=list(model='sGARCH',garchOrder=c(1,1)),mean.model = list(armaOrder=c(1,1)),distribution.model = 'std')
garch.consum2 <- ugarchfit(spec=garch.consum.spec1, data=consum.ts.train)
garch.consum2 # I get a Akaike(AIC) equals to 1.7308
## 
## *---------------------------------*
## *          GARCH Model Fit        *
## *---------------------------------*
## 
## Conditional Variance Dynamics    
## -----------------------------------
## GARCH Model  : sGARCH(1,1)
## Mean Model   : ARFIMA(1,0,1)
## Distribution : std 
## 
## Optimal Parameters
## ------------------------------------
##         Estimate  Std. Error  t value Pr(>|t|)
## mu      0.708489    0.103488  6.84611 0.000000
## ar1     0.830733    0.061790 13.44444 0.000000
## ma1    -0.521490    0.088354 -5.90228 0.000000
## omega   0.002361    0.005612  0.42061 0.674042
## alpha1  0.051105    0.039783  1.28459 0.198934
## beta1   0.935956    0.047333 19.77366 0.000000
## shape   6.985411    3.232167  2.16122 0.030679
## 
## Robust Standard Errors:
##         Estimate  Std. Error  t value Pr(>|t|)
## mu      0.708489    0.119842  5.91188 0.000000
## ar1     0.830733    0.056390 14.73185 0.000000
## ma1    -0.521490    0.076222 -6.84173 0.000000
## omega   0.002361    0.007209  0.32744 0.743336
## alpha1  0.051105    0.053277  0.95923 0.337441
## beta1   0.935956    0.063336 14.77757 0.000000
## shape   6.985411    2.541288  2.74877 0.005982
## 
## LogLikelihood : -149.634 
## 
## Information Criteria
## ------------------------------------
##                    
## Akaike       1.7308
## Bayes        1.8545
## Shibata      1.7279
## Hannan-Quinn 1.7809
## 
## Weighted Ljung-Box Test on Standardized Residuals
## ------------------------------------
##                         statistic  p-value
## Lag[1]                     0.2152 0.642751
## Lag[2*(p+q)+(p+q)-1][5]    4.9059 0.004552
## Lag[4*(p+q)+(p+q)-1][9]    7.4362 0.091459
## d.o.f=2
## H0 : No serial correlation
## 
## Weighted Ljung-Box Test on Standardized Squared Residuals
## ------------------------------------
##                         statistic p-value
## Lag[1]                      1.948  0.1627
## Lag[2*(p+q)+(p+q)-1][5]     2.132  0.5876
## Lag[4*(p+q)+(p+q)-1][9]     2.405  0.8516
## d.o.f=2
## 
## Weighted ARCH LM Tests
## ------------------------------------
##             Statistic Shape Scale P-Value
## ARCH Lag[3]    0.2093 0.500 2.000  0.6473
## ARCH Lag[5]    0.3282 1.440 1.667  0.9327
## ARCH Lag[7]    0.5218 2.315 1.543  0.9764
## 
## Nyblom stability test
## ------------------------------------
## Joint Statistic:  0.9986
## Individual Statistics:             
## mu     0.2288
## ar1    0.2445
## ma1    0.2918
## omega  0.1178
## alpha1 0.2513
## beta1  0.1906
## shape  0.1133
## 
## Asymptotic Critical Values (10% 5% 1%)
## Joint Statistic:          1.69 1.9 2.35
## Individual Statistic:     0.35 0.47 0.75
## 
## Sign Bias Test
## ------------------------------------
##                    t-value    prob sig
## Sign Bias           0.0405 0.96774    
## Negative Sign Bias  2.0334 0.04351  **
## Positive Sign Bias  0.7136 0.47639    
## Joint Effect        9.0669 0.02841  **
## 
## 
## Adjusted Pearson Goodness-of-Fit Test:
## ------------------------------------
##   group statistic p-value(g-1)
## 1    20     21.98      0.28508
## 2    30     34.30      0.22831
## 3    40     57.01      0.03124
## 4    50     62.37      0.09506
## 
## 
## Elapsed time : 0.2524829
# Here, I change the parameters to ARMA(2,2), to find a better model.
garch.consum.spec2 <- ugarchspec(variance.model=list(model='sGARCH',garchOrder=c(1,1)),mean.model = list(armaOrder=c(2,2)),distribution.model = 'std')
garch.consum3 <- ugarchfit(spec=garch.consum.spec2, data=consum.ts.train)
garch.consum3 # Now, I get a Akaike(AIC) equals to 1.7136, which indicates that ARMA(2,2) is better
## 
## *---------------------------------*
## *          GARCH Model Fit        *
## *---------------------------------*
## 
## Conditional Variance Dynamics    
## -----------------------------------
## GARCH Model  : sGARCH(1,1)
## Mean Model   : ARFIMA(2,0,2)
## Distribution : std 
## 
## Optimal Parameters
## ------------------------------------
##         Estimate  Std. Error  t value Pr(>|t|)
## mu      0.697024    0.093278   7.4726 0.000000
## ar1     1.168818    0.358645   3.2590 0.001118
## ar2    -0.360314    0.299851  -1.2016 0.229501
## ma1    -0.944283    0.331353  -2.8498 0.004375
## ma2     0.437750    0.168483   2.5982 0.009372
## omega   0.001815    0.005657   0.3208 0.748359
## alpha1  0.052265    0.041932   1.2464 0.212602
## beta1   0.936013    0.050069  18.6946 0.000000
## shape   6.512433    2.751462   2.3669 0.017938
## 
## Robust Standard Errors:
##         Estimate  Std. Error  t value Pr(>|t|)
## mu      0.697024    0.112940  6.17166 0.000000
## ar1     1.168818    0.392830  2.97538 0.002926
## ar2    -0.360314    0.324693 -1.10971 0.267125
## ma1    -0.944283    0.359618 -2.62579 0.008645
## ma2     0.437750    0.158266  2.76592 0.005676
## omega   0.001815    0.008066  0.22500 0.821981
## alpha1  0.052265    0.061254  0.85325 0.393521
## beta1   0.936013    0.074934 12.49117 0.000000
## shape   6.512433    2.322771  2.80373 0.005051
## 
## LogLikelihood : -146.081 
## 
## Information Criteria
## ------------------------------------
##                    
## Akaike       1.7136
## Bayes        1.8726
## Shibata      1.7090
## Hannan-Quinn 1.7781
## 
## Weighted Ljung-Box Test on Standardized Residuals
## ------------------------------------
##                          statistic p-value
## Lag[1]                      0.1347  0.7136
## Lag[2*(p+q)+(p+q)-1][11]    4.1999  0.9995
## Lag[4*(p+q)+(p+q)-1][19]    6.2354  0.9603
## d.o.f=4
## H0 : No serial correlation
## 
## Weighted Ljung-Box Test on Standardized Squared Residuals
## ------------------------------------
##                         statistic p-value
## Lag[1]                     0.9036  0.3418
## Lag[2*(p+q)+(p+q)-1][5]    1.2454  0.8021
## Lag[4*(p+q)+(p+q)-1][9]    1.6840  0.9392
## d.o.f=2
## 
## Weighted ARCH LM Tests
## ------------------------------------
##             Statistic Shape Scale P-Value
## ARCH Lag[3]    0.0793 0.500 2.000  0.7782
## ARCH Lag[5]    0.4134 1.440 1.667  0.9089
## ARCH Lag[7]    0.6644 2.315 1.543  0.9612
## 
## Nyblom stability test
## ------------------------------------
## Joint Statistic:  1.2849
## Individual Statistics:             
## mu     0.3154
## ar1    0.2794
## ar2    0.1997
## ma1    0.2490
## ma2    0.1783
## omega  0.1072
## alpha1 0.2623
## beta1  0.1715
## shape  0.2095
## 
## Asymptotic Critical Values (10% 5% 1%)
## Joint Statistic:          2.1 2.32 2.82
## Individual Statistic:     0.35 0.47 0.75
## 
## Sign Bias Test
## ------------------------------------
##                    t-value   prob sig
## Sign Bias           0.5120 0.6093    
## Negative Sign Bias  1.4970 0.1362    
## Positive Sign Bias  0.1613 0.8720    
## Joint Effect        5.9449 0.1143    
## 
## 
## Adjusted Pearson Goodness-of-Fit Test:
## ------------------------------------
##   group statistic p-value(g-1)
## 1    20     23.09       0.2335
## 2    30     34.30       0.2283
## 3    40     34.47       0.6765
## 4    50     51.87       0.3625
## 
## 
## Elapsed time : 0.1694
# Forecast the future 
fc.arima<-forecast(train_arima,h=7)
fc.ets<-forecast(train_ets,h=7)
garch.consum.fc <- ugarchboot(garch.consum3,n.ahead=7, method=c('Partial','Full')[1])

#plot the results. We can find that garch model is more similar to ets model than arima model in this case.
par(mfrow=c(2,2))
plot(consum.ts.test, main='actual')
plot(fc.arima$mean, main='arima')
plot(fc.ets$mean, main='ets')    
plot(garch.consum.fc, which =2, main='garch')

# But I don't know how to use accuracy to test garch model.