Paquetes

library(TSA)
## 
## Attaching package: 'TSA'
## The following objects are masked from 'package:stats':
## 
##     acf, arima
## The following object is masked from 'package:utils':
## 
##     tar
library(ggfortify)
## Loading required package: ggplot2
library(forecast)
library(fpp)
## Loading required package: fma
## Loading required package: expsmooth
## Loading required package: lmtest
## Loading required package: zoo
## 
## Attaching package: 'zoo'
## The following objects are masked from 'package:base':
## 
##     as.Date, as.Date.numeric
## Loading required package: tseries
library(seasonal)
library(fpp2)
## 
## Attaching package: 'fpp2'
## The following objects are masked from 'package:fpp':
## 
##     ausair, ausbeer, austa, austourists, debitcards, departures,
##     elecequip, euretail, guinearice, oil, sunspotarea, usmelec
library(gridExtra)
library(urca)
library(foreign)
library(vars)
## Loading required package: MASS
## 
## Attaching package: 'MASS'
## The following objects are masked from 'package:fma':
## 
##     cement, housing, petrol
## Loading required package: strucchange
## Loading required package: sandwich
library(zoo)
library(dynlm)
library(lmtest)
library(strucchange)
library(readxl)
library(seasonalview)
## 
## Attaching package: 'seasonalview'
## The following object is masked from 'package:seasonal':
## 
##     view

1. Breve descripcion de la serie elegida, asi como su frecuencia, unidad de medida, fuente, etc.

Serie: PM

2006-2018, incluye serie original, desestacionalizada.

Unidad de medida:indice.

Periodicidad: Mensual

Fuente:(www.inegi.org.mx)

2. Graficar los datos, analizar patrones y observaciones atipicas. Apoye su analisis con una descomposicion clasica.

base<-read_xls("C:/Users/rodar/OneDrive/Escritorio/SERIES/INDUSTRIA MANUFACTURERA.xls")
PM<-ts(base$PM, frequency = 12, start=c(2006,1))
autoplot(PM) + xlab("Year")+ylab("INDICE") +
  ggtitle('INDUSTRIA MANUFACTURERA (PM ORIGINAL)')

fit <- decompose(PM,type = "additive")
autoplot(fit)

3. Si es necesario, utilizar transformacion Box-Cox/logaritmos para estabilizar la varianza.

PMlog <-log(PM)
plot(PMlog)

Se utilizo la aplicacion de un logaritmo ya que se hiba a estabilizar la varianza para visualizar el comportamiento de la serie.

4. En caso de estacionalidad, aplicar diferencias estacionales.

PMdif <-diff(PMlog)
plot(PMlog)

PMdif2 <-diff(log(PM))
BoxCox.ar(PM)
## Warning in arima0(x, order = c(i, 0L, 0L), include.mean = demean): possible
## convergence problem: optim gave code = 1

## Warning in arima0(x, order = c(i, 0L, 0L), include.mean = demean): possible
## convergence problem: optim gave code = 1

## Warning in arima0(x, order = c(i, 0L, 0L), include.mean = demean): possible
## convergence problem: optim gave code = 1

## Warning in arima0(x, order = c(i, 0L, 0L), include.mean = demean): possible
## convergence problem: optim gave code = 1

## Warning in arima0(x, order = c(i, 0L, 0L), include.mean = demean): possible
## convergence problem: optim gave code = 1

## Warning in arima0(x, order = c(i, 0L, 0L), include.mean = demean): possible
## convergence problem: optim gave code = 1

## Warning in arima0(x, order = c(i, 0L, 0L), include.mean = demean): possible
## convergence problem: optim gave code = 1

## Warning in arima0(x, order = c(i, 0L, 0L), include.mean = demean): possible
## convergence problem: optim gave code = 1

## Warning in arima0(x, order = c(i, 0L, 0L), include.mean = demean): possible
## convergence problem: optim gave code = 1

## Warning in arima0(x, order = c(i, 0L, 0L), include.mean = demean): possible
## convergence problem: optim gave code = 1

## Warning in arima0(x, order = c(i, 0L, 0L), include.mean = demean): possible
## convergence problem: optim gave code = 1

## Warning in arima0(x, order = c(i, 0L, 0L), include.mean = demean): possible
## convergence problem: optim gave code = 1

## Warning in arima0(x, order = c(i, 0L, 0L), include.mean = demean): possible
## convergence problem: optim gave code = 1

## Warning in arima0(x, order = c(i, 0L, 0L), include.mean = demean): possible
## convergence problem: optim gave code = 1

plot(PMdif2) 

Sabemos que contiene un valor =0 muy cerca de su centro por lo que sugiere este una transformacion logaritmica.

plot(PM^1.5, type="o")

plot(diff(PM^1.5), type="o")

plot(diff(PMdif2))

Se agrearon dos diferencias.

5. Usar preba Dickey-Fuller para evaluar el orden de integracion de la serie. Diferenciar hasta que la serie sea estacionaria.

summary(ur.df(PM))
## 
## ############################################### 
## # Augmented Dickey-Fuller Test Unit Root Test # 
## ############################################### 
## 
## Test regression none 
## 
## 
## Call:
## lm(formula = z.diff ~ z.lag.1 - 1 + z.diff.lag)
## 
## Residuals:
##     Min      1Q  Median      3Q     Max 
## -8.6093 -1.1080 -0.2844  2.0807  4.9716 
## 
## Coefficients:
##            Estimate Std. Error t value Pr(>|t|)
## z.lag.1    0.003696   0.003588   1.030    0.308
## z.diff.lag 0.115448   0.138306   0.835    0.408
## 
## Residual standard error: 2.675 on 49 degrees of freedom
## Multiple R-squared:  0.04397,    Adjusted R-squared:  0.004953 
## F-statistic: 1.127 on 2 and 49 DF,  p-value: 0.3323
## 
## 
## Value of test-statistic is: 1.03 
## 
## Critical values for test statistics: 
##      1pct  5pct 10pct
## tau1 -2.6 -1.95 -1.61

No es estacionaria debido a que el valor de P es de 0.3323, y no es apto para 0.5

summary(ur.df(PMdif2))
## 
## ############################################### 
## # Augmented Dickey-Fuller Test Unit Root Test # 
## ############################################### 
## 
## Test regression none 
## 
## 
## Call:
## lm(formula = z.diff ~ z.lag.1 - 1 + z.diff.lag)
## 
## Residuals:
##       Min        1Q    Median        3Q       Max 
## -0.088854 -0.008099  0.000222  0.022948  0.052844 
## 
## Coefficients:
##            Estimate Std. Error t value Pr(>|t|)    
## z.lag.1    -0.84361    0.18071  -4.668 2.47e-05 ***
## z.diff.lag  0.04486    0.13834   0.324    0.747    
## ---
## Signif. codes:  0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
## 
## Residual standard error: 0.02737 on 48 degrees of freedom
## Multiple R-squared:  0.405,  Adjusted R-squared:  0.3802 
## F-statistic: 16.33 on 2 and 48 DF,  p-value: 3.884e-06
## 
## 
## Value of test-statistic is: -4.6684 
## 
## Critical values for test statistics: 
##      1pct  5pct 10pct
## tau1 -2.6 -1.95 -1.61

Como podemos ver ya es estacionaria.

6. Usar herramientas ACF, PACF, EACF y criterios de Akaike/ Bayes para construir propuestas de modelos.

ggAcf(PMdif2)

ggPacf(PMdif2) 

eacf(PMdif2)
## AR/MA
##   0 1 2 3 4 5 6 7 8 9 10 11 12 13
## 0 o o o o o o o o o o o  o  o  o 
## 1 x o o o o o o o o o o  o  o  o 
## 2 x x o o o o o o o o o  o  o  o 
## 3 x o x o o o o o o o o  o  o  o 
## 4 x o x o o o o o o o o  o  o  o 
## 5 o o o o o o o o o o o  o  o  o 
## 6 x o o x o o o o o o o  o  o  o 
## 7 x o o x o o o o o o o  o  o  o
ggtsdisplay(PMdif2)

PM2 <-armasubsets(y=PMdif2,nar= 12, nma=12, y.name="test", ar.method="ols")
## Warning in leaps.setup(x, y, wt = wt, nbest = nbest, nvmax = nvmax,
## force.in = force.in, : 2 linear dependencies found
plot(PM2)

propuesta1<- Arima(PMdif2, order = c(1,0,0),method = "ML")
propuesta1
## Series: PMdif2 
## ARIMA(1,0,0) with non-zero mean 
## 
## Coefficients:
##          ar1    mean
##       0.1540  0.0056
## s.e.  0.1402  0.0044
## 
## sigma^2 estimated as 0.0007603:  log likelihood=113.95
## AIC=-221.9   AICc=-221.4   BIC=-216.04
propuesta2 <-Arima(PMdif2, order = c(0,0,2),method = "ML")
propuesta2
## Series: PMdif2 
## ARIMA(0,0,2) with non-zero mean 
## 
## Coefficients:
##          ma1     ma2    mean
##       0.1863  0.0439  0.0057
## s.e.  0.1784  0.1484  0.0046
## 
## sigma^2 estimated as 0.0007736:  log likelihood=114.02
## AIC=-220.03   AICc=-219.18   BIC=-212.23

7. Como parte del diagnostico del modelo, analizar los residuos y aplicar la prueba Ljung-Box.

tsdiag(propuesta1,gof=12, omit.initial=F)

tsdiag(propuesta2,gof=12, omit.initial=F)

Como podemos ver. en las dos se ajustan los residuos, ya que los valores son arriba de P.

8. Usar la funcion auto.arima () y compare sus resultados con el inciso anterior.

propuesta.auto<-auto.arima(PMdif2)
propuesta.auto
## Series: PMdif2 
## ARIMA(0,0,0) with zero mean 
## 
## sigma^2 estimated as 0.0007783:  log likelihood=112.33
## AIC=-222.67   AICc=-222.59   BIC=-220.72
checkresiduals(propuesta.auto)

## 
##  Ljung-Box test
## 
## data:  Residuals from ARIMA(0,0,0) with zero mean
## Q* = 11.041, df = 10.4, p-value = 0.3883
## 
## Model df: 0.   Total lags used: 10.4
propuesta2 <- auto.arima(PMdif2, stepwise = FALSE, approximation = FALSE)
propuesta2
## Series: PMdif2 
## ARIMA(1,0,2) with zero mean 
## 
## Coefficients:
##           ar1     ma1     ma2
##       -0.8872  1.2367  0.4829
## s.e.   0.3140  0.1993  0.2908
## 
## sigma^2 estimated as 0.0006855:  log likelihood=116.81
## AIC=-225.62   AICc=-224.77   BIC=-217.82
checkresiduals(propuesta2)

## 
##  Ljung-Box test
## 
## data:  Residuals from ARIMA(1,0,2) with zero mean
## Q* = 9.4314, df = 7.4, p-value = 0.2556
## 
## Model df: 3.   Total lags used: 10.4

Se puede observar que en las dos propuestas,la media ya es estable asi como los residuales.

9. Presente la ecuacion final y describa.

\[ Yt= -0.8872e_1+1.2367e_{t-1}+0.4829e_{t-2}+et \]

10. Crear un pronostico a dos años.

pronostico<- plot(forecast(propuesta.auto, h=24))

pronostico
## $mean
##      Jan Feb Mar Apr May Jun Jul Aug Sep Oct Nov Dec
## 2010                       0   0   0   0   0   0   0
## 2011   0   0   0   0   0   0   0   0   0   0   0   0
## 2012   0   0   0   0   0                            
## 
## $lower
##                  80%         95%
## Jun 2010 -0.03575187 -0.05467777
## Jul 2010 -0.03575187 -0.05467777
## Aug 2010 -0.03575187 -0.05467777
## Sep 2010 -0.03575187 -0.05467777
## Oct 2010 -0.03575187 -0.05467777
## Nov 2010 -0.03575187 -0.05467777
## Dec 2010 -0.03575187 -0.05467777
## Jan 2011 -0.03575187 -0.05467777
## Feb 2011 -0.03575187 -0.05467777
## Mar 2011 -0.03575187 -0.05467777
## Apr 2011 -0.03575187 -0.05467777
## May 2011 -0.03575187 -0.05467777
## Jun 2011 -0.03575187 -0.05467777
## Jul 2011 -0.03575187 -0.05467777
## Aug 2011 -0.03575187 -0.05467777
## Sep 2011 -0.03575187 -0.05467777
## Oct 2011 -0.03575187 -0.05467777
## Nov 2011 -0.03575187 -0.05467777
## Dec 2011 -0.03575187 -0.05467777
## Jan 2012 -0.03575187 -0.05467777
## Feb 2012 -0.03575187 -0.05467777
## Mar 2012 -0.03575187 -0.05467777
## Apr 2012 -0.03575187 -0.05467777
## May 2012 -0.03575187 -0.05467777
## 
## $upper
##                 80%        95%
## Jun 2010 0.03575187 0.05467777
## Jul 2010 0.03575187 0.05467777
## Aug 2010 0.03575187 0.05467777
## Sep 2010 0.03575187 0.05467777
## Oct 2010 0.03575187 0.05467777
## Nov 2010 0.03575187 0.05467777
## Dec 2010 0.03575187 0.05467777
## Jan 2011 0.03575187 0.05467777
## Feb 2011 0.03575187 0.05467777
## Mar 2011 0.03575187 0.05467777
## Apr 2011 0.03575187 0.05467777
## May 2011 0.03575187 0.05467777
## Jun 2011 0.03575187 0.05467777
## Jul 2011 0.03575187 0.05467777
## Aug 2011 0.03575187 0.05467777
## Sep 2011 0.03575187 0.05467777
## Oct 2011 0.03575187 0.05467777
## Nov 2011 0.03575187 0.05467777
## Dec 2011 0.03575187 0.05467777
## Jan 2012 0.03575187 0.05467777
## Feb 2012 0.03575187 0.05467777
## Mar 2012 0.03575187 0.05467777
## Apr 2012 0.03575187 0.05467777
## May 2012 0.03575187 0.05467777

12. Utilizando metodos de pronosticos y/o suavizamiento exponencial generar un pronostico para dos años. Elegir el metodo que presenta la raiz del error medio al cuadrado mas pequeño.

PMRSME<-window(PM, start=2006)
autoplot(PMRSME)+
  ylab("PM(indice)")+xlab("Año")

Se aplica un suavizamiento exponencial simple, debido a que los datos no tienen una clara estacionalidad.

PMRSME<-window(PM,start=2006)

PMFC<-ses(PMRSME, h=5)

round(accuracy(PMFC),2)
##                ME RMSE  MAE MPE MAPE MASE ACF1
## Training set 0.57 2.73 1.91 0.5 1.87 0.21 0.11

El pronostico se hace como:

autoplot(PMFC)+
  autolayer(fitted(PMFC),series="Fitted")+
  ylab("PM(indice)")+xlab("Año")

13. Analizar un VAR, necesita incluir los puntos siguientes:

Se procedera a limpiar la consola

rm(list = ls())
base2<-read_xls("C:/Users/rodar/OneDrive/Escritorio/SERIES/V Y P.xls")

Declaramos ambas variables como series de tiempo.

ts.V <- ts(base2$VOL, start = c(2006,1), frequency = 12)
ts.P <- ts(base2$PROD, start = c(2006,1), frequency = 12)
autoplot(ts.V)

autoplot(ts.P)

ggseasonplot(ts.V)

ggseasonplot(ts.P)

mes. <- season(ts.V)
reg.w <- lm(ts.V ~ time(ts.V) + mes.)
summary(reg.w)
## 
## Call:
## lm(formula = ts.V ~ time(ts.V) + mes.)
## 
## Residuals:
##    Min     1Q Median     3Q    Max 
## -14.30  -1.86   1.17   2.64   8.38 
## 
## Coefficients:
##                 Estimate Std. Error t value Pr(>|t|)    
## (Intercept)   -1.319e+04  1.204e+03 -10.956 1.30e-13 ***
## time(ts.V)     6.620e+00  5.995e-01  11.043 1.02e-13 ***
## mes.February   2.788e+00  3.496e+00   0.798    0.430    
## mes.March      1.317e+00  3.497e+00   0.377    0.709    
## mes.April      1.650e-01  3.499e+00   0.047    0.963    
## mes.May       -1.567e+00  3.501e+00  -0.447    0.657    
## mes.June      -7.783e-01  3.708e+00  -0.210    0.835    
## mes.July      -7.800e-01  3.707e+00  -0.210    0.834    
## mes.August    -5.567e-01  3.708e+00  -0.150    0.881    
## mes.September -1.633e+00  3.709e+00  -0.440    0.662    
## mes.October    1.890e+00  3.710e+00   0.509    0.613    
## mes.November   7.133e-01  3.713e+00   0.192    0.849    
## mes.December  -5.883e-01  3.716e+00  -0.158    0.875    
## ---
## Signif. codes:  0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
## 
## Residual standard error: 5.527 on 40 degrees of freedom
## Multiple R-squared:  0.7597, Adjusted R-squared:  0.6876 
## F-statistic: 10.54 on 12 and 40 DF,  p-value: 5.981e-09
mes. <- season(ts.P)
reg.p <- lm(ts.P ~ time(ts.P) + mes.)
summary(reg.p)
## 
## Call:
## lm(formula = ts.P ~ time(ts.P) + mes.)
## 
## Residuals:
##     Min      1Q  Median      3Q     Max 
## -8.2071 -1.6494  0.3165  1.8506  7.3600 
## 
## Coefficients:
##                 Estimate Std. Error t value Pr(>|t|)    
## (Intercept)   -5.786e+03  6.939e+02  -8.338 2.77e-10 ***
## time(ts.P)     2.933e+00  3.456e-01   8.487 1.75e-10 ***
## mes.February   2.236e+00  2.015e+00   1.109    0.274    
## mes.March      8.312e-01  2.016e+00   0.412    0.682    
## mes.April      1.068e-01  2.017e+00   0.053    0.958    
## mes.May       -9.977e-01  2.018e+00  -0.494    0.624    
## mes.June       1.034e+00  2.137e+00   0.484    0.631    
## mes.July       9.900e-01  2.137e+00   0.463    0.646    
## mes.August     1.071e+00  2.137e+00   0.501    0.619    
## mes.September  6.512e-01  2.138e+00   0.305    0.762    
## mes.October    3.132e+00  2.139e+00   1.464    0.151    
## mes.November   1.462e+00  2.140e+00   0.683    0.498    
## mes.December  -3.206e-02  2.142e+00  -0.015    0.988    
## ---
## Signif. codes:  0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
## 
## Residual standard error: 3.186 on 40 degrees of freedom
## Multiple R-squared:  0.6604, Adjusted R-squared:  0.5585 
## F-statistic: 6.481 on 12 and 40 DF,  p-value: 3.159e-06

Aplicamos las transformaciones necesarios (ej. diferencias y log)

ts.ddV <- diff(diff(ts.V,12)) # diferencia de volumen
ts.dP <- diff(diff(ts.P,12)) # diferencia de produccion
ggtsdisplay(ts.ddV)

ggtsdisplay(ts.dP)

summary(ur.df(ts.ddV))
## 
## ############################################### 
## # Augmented Dickey-Fuller Test Unit Root Test # 
## ############################################### 
## 
## Test regression none 
## 
## 
## Call:
## lm(formula = z.diff ~ z.lag.1 - 1 + z.diff.lag)
## 
## Residuals:
##     Min      1Q  Median      3Q     Max 
## -8.4919 -1.5118  0.2825  1.4929 10.3051 
## 
## Coefficients:
##            Estimate Std. Error t value Pr(>|t|)    
## z.lag.1     -0.6803     0.1677  -4.057 0.000255 ***
## z.diff.lag   0.2339     0.1606   1.456 0.154059    
## ---
## Signif. codes:  0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
## 
## Residual standard error: 3.473 on 36 degrees of freedom
## Multiple R-squared:  0.3214, Adjusted R-squared:  0.2837 
## F-statistic: 8.524 on 2 and 36 DF,  p-value: 0.0009318
## 
## 
## Value of test-statistic is: -4.0571 
## 
## Critical values for test statistics: 
##       1pct  5pct 10pct
## tau1 -2.62 -1.95 -1.61
summary(ur.df(ts.dP))
## 
## ############################################### 
## # Augmented Dickey-Fuller Test Unit Root Test # 
## ############################################### 
## 
## Test regression none 
## 
## 
## Call:
## lm(formula = z.diff ~ z.lag.1 - 1 + z.diff.lag)
## 
## Residuals:
##     Min      1Q  Median      3Q     Max 
## -6.3684 -1.5291 -0.3147  1.0707  8.2205 
## 
## Coefficients:
##            Estimate Std. Error t value Pr(>|t|)    
## z.lag.1     -0.9422     0.2013  -4.680 3.98e-05 ***
## z.diff.lag   0.2143     0.1618   1.325    0.194    
## ---
## Signif. codes:  0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
## 
## Residual standard error: 2.902 on 36 degrees of freedom
## Multiple R-squared:  0.4195, Adjusted R-squared:  0.3872 
## F-statistic: 13.01 on 2 and 36 DF,  p-value: 5.604e-05
## 
## 
## Value of test-statistic is: -4.6797 
## 
## Critical values for test statistics: 
##       1pct  5pct 10pct
## tau1 -2.62 -1.95 -1.61

Se deben guardar ambas variables en un único objeto.

base3 <- cbind.zoo(v = ts.ddV, p = ts.dP) # ocupamos función 'zoo' del paquete del mismo nombre.
View(base3) # la serie está cortada por las diferencias
autoplot(base3) # la serie está cortada por las diferencias

Las variables tienen la misma longitud, por lo tanto no restringimos la muestra.

Ocupamos ‘base3’ para comenzar nuestro análisis VAR.

VARselect(base3, lag.max = 24, type = "const")[["selection"]]
## AIC(n)  HQ(n)  SC(n) FPE(n) 
##      8      8      8      8
VARselect(base3, lag.max = 24, type = "const")
## $selection
## AIC(n)  HQ(n)  SC(n) FPE(n) 
##      8      8      8      8 
## 
## $criteria
##                 1           2            3           4          5
## AIC(n) -0.1854425 -0.12242399 -0.031133019 -0.09513740 -0.1647990
## HQ(n)  -0.1706065 -0.09769719  0.003484502 -0.05062916 -0.1104001
## SC(n)   0.1042782  0.36044396  0.644882113  0.77402491  0.8975104
## FPE(n)  0.8382296  0.92388316  1.100105692  1.22237287  1.5809072
##                 6             7    8    9   10   11   12   13   14   15
## AIC(n) -0.5163828 -3.738693e+01 -Inf -Inf -Inf -Inf -Inf -Inf -Inf -Inf
## HQ(n)  -0.4520931 -3.731275e+01 -Inf -Inf -Inf -Inf -Inf -Inf -Inf -Inf
## SC(n)   0.7390739 -3.593833e+01 -Inf -Inf -Inf -Inf -Inf -Inf -Inf -Inf
## FPE(n)  2.1618932  1.309731e-15    0    0    0    0    0    0    0    0
##          16   17   18   19   20   21   22   23   24
## AIC(n) -Inf -Inf -Inf -Inf -Inf -Inf -Inf -Inf -Inf
## HQ(n)  -Inf -Inf -Inf -Inf -Inf -Inf -Inf -Inf -Inf
## SC(n)  -Inf -Inf -Inf -Inf -Inf -Inf -Inf -Inf -Inf
## FPE(n)    0    0    0    0    0    0    0    0    0
var1 <- VAR(base3, p=1, type = "both")
summary(var1)
## 
## VAR Estimation Results:
## ========================= 
## Endogenous variables: v, p 
## Deterministic variables: both 
## Sample size: 39 
## Log Likelihood: -144.32 
## Roots of the characteristic polynomial:
## 0.5143 0.5143
## Call:
## VAR(y = base3, p = 1, type = "both")
## 
## 
## Estimation results for equation v: 
## ================================== 
## v = v.l1 + p.l1 + const + trend 
## 
##       Estimate Std. Error t value Pr(>|t|)
## v.l1   0.24932    0.32966   0.756    0.455
## p.l1   0.30426    0.43750   0.695    0.491
## const -0.20905    1.24810  -0.167    0.868
## trend  0.01299    0.05333   0.244    0.809
## 
## 
## Residual standard error: 3.627 on 35 degrees of freedom
## Multiple R-Squared: 0.2178,  Adjusted R-squared: 0.1507 
## F-statistic: 3.248 on 3 and 35 DF,  p-value: 0.03333 
## 
## 
## Estimation results for equation p: 
## ================================== 
## p = v.l1 + p.l1 + const + trend 
## 
##         Estimate Std. Error t value Pr(>|t|)  
## v.l1  -0.3512278  0.2684137  -1.309   0.1992  
## p.l1   0.6320899  0.3562198   1.774   0.0847 .
## const -0.0734402  1.0162147  -0.072   0.9428  
## trend -0.0008388  0.0434190  -0.019   0.9847  
## ---
## Signif. codes:  0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
## 
## 
## Residual standard error: 2.953 on 35 degrees of freedom
## Multiple R-Squared: 0.09608, Adjusted R-squared: 0.0186 
## F-statistic:  1.24 on 3 and 35 DF,  p-value: 0.3099 
## 
## 
## 
## Covariance matrix of residuals:
##       v      p
## v 13.16 10.381
## p 10.38  8.721
## 
## Correlation matrix of residuals:
##        v      p
## v 1.0000 0.9691
## p 0.9691 1.0000
causality(var1, cause='v')$Granger
## 
##  Granger causality H0: v do not Granger-cause p
## 
## data:  VAR object var1
## F-Test = 1.7123, df1 = 1, df2 = 70, p-value = 0.195
causality(var1, cause='p')$Granger
## 
##  Granger causality H0: p do not Granger-cause v
## 
## data:  VAR object var1
## F-Test = 0.48365, df1 = 1, df2 = 70, p-value = 0.4891
plot(irf(var1, impulse='v', response = 'p'))

plot(irf(var1, impulse='p', response = 'v'))

var.serial <- serial.test(var1, lags.pt = 12)
var.serial
## 
##  Portmanteau Test (asymptotic)
## 
## data:  Residuals of VAR object var1
## Chi-squared = 74.069, df = 44, p-value = 0.00305
plot(var.serial)

Otra propuesta

VARselect(base3, lag.max = 24, type = "const")[["selection"]]
## AIC(n)  HQ(n)  SC(n) FPE(n) 
##      8      8      8      8
var8 <- VAR(base3, p=8, type = "both")
summary(var8)
## 
## VAR Estimation Results:
## ========================= 
## Endogenous variables: v, p 
## Deterministic variables: both 
## Sample size: 32 
## Log Likelihood: -66.074 
## Roots of the characteristic polynomial:
## 0.9639 0.9639 0.9366 0.9366 0.9313 0.9313 0.9183 0.9183 0.8899 0.8776 0.8776 0.8104 0.8104 0.6932 0.6932 0.3462
## Call:
## VAR(y = base3, p = 8, type = "both")
## 
## 
## Estimation results for equation v: 
## ================================== 
## v = v.l1 + p.l1 + v.l2 + p.l2 + v.l3 + p.l3 + v.l4 + p.l4 + v.l5 + p.l5 + v.l6 + p.l6 + v.l7 + p.l7 + v.l8 + p.l8 + const + trend 
## 
##       Estimate Std. Error t value Pr(>|t|)  
## v.l1    2.1739     1.2546   1.733   0.1051  
## p.l1   -2.2419     1.5620  -1.435   0.1732  
## v.l2    0.0728     1.1495   0.063   0.9504  
## p.l2   -0.5155     1.5173  -0.340   0.7391  
## v.l3   -1.2295     0.8997  -1.367   0.1933  
## p.l3    0.7749     1.1681   0.663   0.5179  
## v.l4   -1.3367     0.9013  -1.483   0.1602  
## p.l4    1.7021     1.2289   1.385   0.1877  
## v.l5    1.0267     0.9039   1.136   0.2751  
## p.l5   -0.9041     1.0070  -0.898   0.3845  
## v.l6    0.8322     0.9789   0.850   0.4096  
## p.l6   -1.4776     1.0598  -1.394   0.1850  
## v.l7    0.9043     0.8158   1.109   0.2863  
## p.l7   -1.2288     0.9255  -1.328   0.2055  
## v.l8   -1.6594     0.7197  -2.306   0.0369 *
## p.l8    1.5987     0.5985   2.671   0.0183 *
## const   2.3102     1.8040   1.281   0.2211  
## trend  -0.1056     0.0801  -1.319   0.2084  
## ---
## Signif. codes:  0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
## 
## 
## Residual standard error: 2.53 on 14 degrees of freedom
## Multiple R-Squared: 0.7429,  Adjusted R-squared: 0.4306 
## F-statistic: 2.379 on 17 and 14 DF,  p-value: 0.0539 
## 
## 
## Estimation results for equation p: 
## ================================== 
## p = v.l1 + p.l1 + v.l2 + p.l2 + v.l3 + p.l3 + v.l4 + p.l4 + v.l5 + p.l5 + v.l6 + p.l6 + v.l7 + p.l7 + v.l8 + p.l8 + const + trend 
## 
##       Estimate Std. Error t value Pr(>|t|)  
## v.l1   1.74893    1.04814   1.669   0.1174  
## p.l1  -2.04967    1.30503  -1.571   0.1386  
## v.l2   0.03780    0.96039   0.039   0.9692  
## p.l2  -0.58133    1.26764  -0.459   0.6536  
## v.l3  -0.69914    0.75167  -0.930   0.3681  
## p.l3   0.17918    0.97589   0.184   0.8570  
## v.l4  -1.41934    0.75304  -1.885   0.0804 .
## p.l4   1.46959    1.02672   1.431   0.1743  
## v.l5   1.12675    0.75519   1.492   0.1579  
## p.l5  -1.11323    0.84132  -1.323   0.2070  
## v.l6   0.61823    0.81787   0.756   0.4622  
## p.l6  -1.16930    0.88543  -1.321   0.2078  
## v.l7   0.49830    0.68154   0.731   0.4768  
## p.l7  -0.82255    0.77324  -1.064   0.3054  
## v.l8  -1.28531    0.60127  -2.138   0.0507 .
## p.l8   1.26514    0.50004   2.530   0.0240 *
## const  1.41944    1.50714   0.942   0.3623  
## trend -0.09381    0.06692  -1.402   0.1828  
## ---
## Signif. codes:  0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
## 
## 
## Residual standard error: 2.113 on 14 degrees of freedom
## Multiple R-Squared: 0.7282,  Adjusted R-squared: 0.3981 
## F-statistic: 2.206 on 17 and 14 DF,  p-value: 0.07054 
## 
## 
## 
## Covariance matrix of residuals:
##       v     p
## v 6.398 5.241
## p 5.241 4.466
## 
## Correlation matrix of residuals:
##        v      p
## v 1.0000 0.9803
## p 0.9803 1.0000
causality(var8, cause='v')$Granger
## 
##  Granger causality H0: v do not Granger-cause p
## 
## data:  VAR object var8
## F-Test = 2.8017, df1 = 8, df2 = 28, p-value = 0.02052
causality(var8, cause='p')$Granger
## 
##  Granger causality H0: p do not Granger-cause v
## 
## data:  VAR object var8
## F-Test = 2.6585, df1 = 8, df2 = 28, p-value = 0.02628
plot(irf(var8, impulse='v', response = 'p'))

plot(irf(var8, impulse='p', response = 'v'))

var.serial <- serial.test(var8, lags.pt = 8)
var.serial
## 
##  Portmanteau Test (asymptotic)
## 
## data:  Residuals of VAR object var8
## Chi-squared = 34.921, df = 0, p-value < 2.2e-16
plot(var.serial)

14. Evaluar la potencial cointegracion usando la metodologia de Engle-Granger y Johan-Sen

rm(list = ls())
base4<-read_xls("C:/Users/rodar/OneDrive/Escritorio/SERIES/V Y P.xls")