library(readxl)
library(forecast)
PIB_09_21 <- read_excel("C:/doc R/PIBtrimSLV(09-21).xlsx", col_types = c("skip","numeric"),
skip = 5)
serie.PIB.ts<- ts(data = PIB_09_21, start = c(2009,1),frequency = 4)
Yt <- serie.PIB.ts
serie.PIB.ts %>%
autoplot(main = "PIB SLV 2009-2021",
xlab = "años/ trimestres",
ylab = "")
GENERAR PRONOSTICOS PARA 2022 COMPLETO USANDO UN MODELO SARIMA
Pronóstico de series temporales, enfoque moderno (estocástico)
Metodología Box-Jenkins
library(TSstudio)
library(forecast)
ts_plot(Yt,Xtitle = "Años/trimestres")
Identificacion
#ORDEN DE INTEGRACION
library(kableExtra)
library(magrittr)
d<-ndiffs(Yt)
D<-nsdiffs(Yt)
ordenes_integracion<-c(d,D)
names(ordenes_integracion)<-c("d","D")
ordenes_integracion %>% kable(caption = "Ordenes de Integración") %>% kable_material()
| x | |
|---|---|
| d | 1 |
| D | 1 |
#Gráfico de la serie diferenciada
Yt %>%
diff(lag = 4,diffences=D) %>%
diff(diffences=d) %>%
ts_plot(title = "Yt estacionaria")
#VERIFICAR LOS VALORES para (p,q) & (P,Q)
Yt %>%
diff(lag = 4,diffences=D) %>%
diff(diffences=d) %>%
ts_cor(lag.max = 12)
Estimación del modelo
USANDO FORECAST
library(forecast)
library(ggthemes)
modelo.estimado.PIB <- Yt %>%
Arima(order = c(0, 1, 0),
seasonal = c(1, 1, 1))
summary(modelo.estimado.PIB)
## Series: .
## ARIMA(0,1,0)(1,1,1)[4]
##
## Coefficients:
## sar1 sma1
## -0.3980 -0.4550
## s.e. 0.3732 0.3688
##
## sigma^2 = 77377: log likelihood = -331.67
## AIC=669.34 AICc=669.9 BIC=674.89
##
## Training set error measures:
## ME RMSE MAE MPE MAPE MASE
## Training set 6.530071 258.7683 120.9372 -0.004468918 2.054645 0.3680668
## ACF1
## Training set -0.09806694
#grafico de las raices (estabilidad/convertibilidad)
modelo.estimado.PIB %>% autoplot(type="both")+theme_solarized()
#verificacion
modelo.estimado.PIB %>% check_res(lag.max = 4)
#grafico comparativo
Yt.Sarima<-modelo.estimado.PIB$fitted
grafico.comparativo<-cbind(Yt,Yt.Sarima,Yt)
ts_plot(grafico.comparativo)
SARIMA(0,1,0)(1,1,1) nuevos P-1 SARIMA(0,1,0)(0,1,1) & Q-1SARIMA(0,1,0)(1,1,0)
library(tsibble)
library(feasts)
library(fable)
library(fabletools)
library(tidyr)
library(dplyr)
#generando modelo automatico
a<-Yt %>% as_tsibble() %>%
model(arima_original=ARIMA(value ~ pdq(0, 1, 0) + PDQ(1, 1, 1)), #original
arima_010_011 = ARIMA(value ~ pdq(0, 1, 0) + PDQ(0, 1, 1)),
arima_010_110 = ARIMA(value ~ pdq(0, 1, 0) + PDQ(1, 1, 0)),
arima_automatico=ARIMA(value,ic="bic",stepwise = FALSE) #modelo automatico
)
print(a)
## # A mable: 1 x 4
## arima_original arima_010_011 arima_010_110
## <model> <model> <model>
## 1 <ARIMA(0,1,0)(1,1,1)[4]> <ARIMA(0,1,0)(0,1,1)[4]> <ARIMA(0,1,0)(1,1,0)[4]>
## # ... with 1 more variable: arima_automatico <model>
#ordenar recuerdo al criterio
a %>% pivot_longer(everything(), names_to = "Model name",
values_to = "Orders") %>% glance() %>%
arrange(AICc) ->tabla
tabla
## # A tibble: 4 x 9
## `Model name` .model sigma2 log_lik AIC AICc BIC ar_roots ma_roots
## <chr> <chr> <dbl> <dbl> <dbl> <dbl> <dbl> <list> <list>
## 1 arima_010_011 Orders 75930. -332. 668. 669. 672. <cpl [0]> <cpl [4]>
## 2 arima_010_110 Orders 78594. -333. 669. 670. 673. <cpl [4]> <cpl [0]>
## 3 arima_original Orders 77377. -332. 669. 670. 675. <cpl [4]> <cpl [4]>
## 4 arima_automatico Orders 62738. -334. 676. 677. 683. <cpl [1]> <cpl [4]>
library(forecast)
library(dplyr)
library(tsibble)
library(fable)
library(fabletools)
#convertimos
Yt<-Yt %>%
as_tsibble() %>%
rename(PIB=value)
#generador de la dta (roja)
data.cross.validation.PIB<-Yt %>%
as_tsibble() %>%
stretch_tsibble(.init = 20,.step = 1)
#generador de pronostico (verde) y simulacion
TSCV<-data.cross.validation.PIB %>%
model(ARIMA(PIB ~ pdq(0, 1, 0) + PDQ(1, 1, 1))) %>%
forecast(h=1) %>% accuracy(Yt)
print(TSCV)
## # A tibble: 1 x 10
## .model .type ME RMSE MAE MPE MAPE MASE RMSSE ACF1
## <chr> <chr> <dbl> <dbl> <dbl> <dbl> <dbl> <dbl> <dbl> <dbl>
## 1 ARIMA(PIB ~ pdq(0, 1, ~ Test 30.1 370. 197. 0.228 3.21 0.600 0.780 0.0422
library(readxl)
library(forecast)
IPCgeneral_22 <- read_excel("C:/doc R/IPC.SLV(09-may22).xlsx", col_types = c("skip","numeric"),
skip = 6)
serie.IPC.ts<- ts(data = IPCgeneral_22, start = c(2009,1),frequency = 12)
Yt_1 <- serie.IPC.ts
serie.IPC.ts %>%
autoplot(main = "IPC general SLV 2009-2022(05)",
xlab = "años/ meses",
ylab = "")
GENERAR PRONOSTICOS PARA 2022 COMPLETO USANDO UN MODELO SARIMA
Pronóstico de series temporales, enfoque moderno (estocástico)
Metodología Box-Jenkins
library(TSstudio)
library(forecast)
ts_plot(Yt_1,Xtitle = "Años/Meses")
Identificacion
#ORDEN DE INTEGRACION
library(kableExtra)
library(magrittr)
d1<-ndiffs(Yt_1)
D1<-nsdiffs(Yt_1)
ordenes_integracion1<-c(d1,D1)
names(ordenes_integracion1)<-c("d1","D1")
ordenes_integracion1 %>% kable(caption = "Ordenes de Integración 1") %>% kable_material()
| x | |
|---|---|
| d1 | 1 |
| D1 | 0 |
#Gráfico de la serie diferenciada
Yt_1 %>%
diff(lag = 12,diffences=D1) %>%
diff(diffences=d1) %>%
ts_plot(title = "Yt_1 estacionaria")
#VERIFICAR LOS VALORES para (p,q) & (P,Q)
Yt_1 %>%
diff(lag = 12,diffences=D1) %>%
diff(diffences=d1) %>%
ts_cor(lag.max = 36)
Estimación del modelo
USANDO FORECAST
library(forecast)
library(ggthemes)
modelo.estimado.IPC <- Yt_1 %>%
Arima(order = c(1, 1, 1),
seasonal = c(1, 0, 2))
summary(modelo.estimado.IPC)
## Series: .
## ARIMA(1,1,1)(1,0,2)[12]
##
## Coefficients:
## ar1 ma1 sar1 sma1 sma2
## 0.9406 -0.7934 0.0571 0.0615 -0.0683
## s.e. 0.0859 0.1447 0.6621 0.6595 0.1106
##
## sigma^2 = 0.2251: log likelihood = -104.87
## AIC=221.74 AICc=222.29 BIC=240.15
##
## Training set error measures:
## ME RMSE MAE MPE MAPE MASE
## Training set 0.05582688 0.4654425 0.3091819 0.04952956 0.2834401 0.1668998
## ACF1
## Training set 0.08746749
#grafico de las raices (estabilidad/convertibilidad)
modelo.estimado.IPC %>% autoplot(type="both")+theme_solarized()
#verificacion
modelo.estimado.IPC %>% check_res(lag.max = 36)
#grafico comparativo
Yt.Sarima.IPC<-modelo.estimado.IPC$fitted
grafico.comparativo1<-cbind(Yt_1,Yt.Sarima.IPC,Yt_1)
ts_plot(grafico.comparativo1)
SARIMA(1,1,0)(1,0,2)
library(tsibble)
library(feasts)
library(fable)
library(fabletools)
library(tidyr)
library(dplyr)
#generando modelo automatico
a1<-Yt_1 %>% as_tsibble() %>%
model(arima_original=ARIMA(value ~ pdq(1, 1, 1) + PDQ(1, 0, 2)), #original
arima_010_011 = ARIMA(value ~ pdq(1, 1, 1) + PDQ(0, 0, 2)),
arima_010_110 = ARIMA(value ~ pdq(1, 1, 1) + PDQ(1, 0, 1)),
arima_automatico=ARIMA(value,ic="bic",stepwise = FALSE) #modelo automatico
)
print(a1)
## # A mable: 1 x 4
## arima_original arima_010_011
## <model> <model>
## 1 <ARIMA(1,1,1)(1,0,2)[12] w/ drift> <ARIMA(1,1,1)(0,0,2)[12] w/ drift>
## # ... with 2 more variables: arima_010_110 <model>, arima_automatico <model>
#ordenar recuerdo al criterio
a1 %>% pivot_longer(everything(), names_to = "Model name",
values_to = "Orders") %>% glance() %>%
arrange(AICc) ->tabla1
tabla1
## # A tibble: 4 x 9
## `Model name` .model sigma2 log_lik AIC AICc BIC ar_roots ma_roots
## <chr> <chr> <dbl> <dbl> <dbl> <dbl> <dbl> <list> <list>
## 1 arima_010_011 Orders 0.219 -102. 217. 218. 235. <cpl [1]> <cpl [25]>
## 2 arima_automatico Orders 0.218 -102. 217. 218. 235. <cpl [26]> <cpl [0]>
## 3 arima_010_110 Orders 0.219 -103. 217. 218. 236. <cpl [13]> <cpl [13]>
## 4 arima_original Orders 0.220 -102. 219. 220. 240. <cpl [13]> <cpl [25]>
library(forecast)
library(dplyr)
library(tsibble)
library(fable)
library(fabletools)
#convertimos
Yt_1<-Yt_1 %>%
as_tsibble() %>%
rename(IPC=value)
#generador de la dta (roja)
data.cross.validation.IPC<-Yt_1 %>%
as_tsibble() %>%
stretch_tsibble(.init = 60,.step = 1)
#generador de pronostico (verde) y simulacion
TSCV_1<-data.cross.validation.IPC %>%
model(ARIMA(IPC ~ pdq(1, 1, 1) + PDQ(1, 0, 2))) %>%
forecast(h=1) %>% accuracy(Yt_1)
print(TSCV_1)
## # A tibble: 1 x 10
## .model .type ME RMSE MAE MPE MAPE MASE RMSSE ACF1
## <chr> <chr> <dbl> <dbl> <dbl> <dbl> <dbl> <dbl> <dbl> <dbl>
## 1 ARIMA(IPC ~ pdq(1, 1~ Test 0.0695 0.437 0.295 0.0579 0.261 0.159 0.160 0.0775
library(readxl)
library(forecast)
M_HDCBS <- read_excel("C:/doc R/Imp.Hidrocarburo(17-may22).xlsx", col_types = c("skip","numeric"),
skip = 6)
serie.Mhdcbs.ts<- ts(data = M_HDCBS, start = c(2017,1),frequency = 12)
Yt_2 <- serie.Mhdcbs.ts
serie.Mhdcbs.ts %>%
autoplot(main = "Importacion hidrocarbutos SLV 2017-2022(05)",
xlab = "Años/ Meses",
ylab = "")
GENERAR PRONOSTICOS PARA 2022 COMPLETO USANDO UN MODELO SARIMA
Pronóstico de series temporales, enfoque moderno (estocástico)
Metodología Box-Jenkins
library(TSstudio)
library(forecast)
ts_plot(Yt_2,Xtitle = "Años/Meses")
Identificacion
#ORDEN DE INTEGRACION
library(kableExtra)
library(magrittr)
d2<-ndiffs(Yt_2)
D2<-nsdiffs(Yt_2)
ordenes_integracion<-c(d2,D2)
names(ordenes_integracion)<-c("d2","D2")
ordenes_integracion %>% kable(caption = "Ordenes de Integración 2") %>% kable_material()
| x | |
|---|---|
| d2 | 0 |
| D2 | 0 |
#Gráfico de la serie diferenciada
Yt_2 %>%
diff(lag = 12,diffences=D2) %>%
diff(diffences=d2) %>%
ts_plot(title = "Yt 2 estacionaria")
#VERIFICAR LOS VALORES para (p,q) & (P,Q)
Yt_2 %>%
diff(lag = 12,diffences=D2) %>%
diff(diffences=d2) %>%
ts_cor(lag.max = 36)
Estimación del modelo
USANDO FORECAST
library(forecast)
library(ggthemes)
modelo.estimado.MHDCBRS <- Yt_2 %>%
Arima(order = c(1, 0, 1),
seasonal = c(1, 0, 0))
summary(modelo.estimado.MHDCBRS)
## Series: .
## ARIMA(1,0,1)(1,0,0)[12] with non-zero mean
##
## Coefficients:
## ar1 ma1 sar1 mean
## 0.7657 -0.577 -0.1276 216145.229
## s.e. 0.2090 0.244 0.1587 6368.138
##
## sigma^2 = 1.05e+09: log likelihood = -741.79
## AIC=1493.57 AICc=1494.63 BIC=1504.29
##
## Training set error measures:
## ME RMSE MAE MPE MAPE MASE ACF1
## Training set 66.97511 31351.31 25082.44 -2.214857 12.0959 0.6366723 -0.02390458
#grafico de las raices (estabilidad/convertibilidad)
modelo.estimado.MHDCBRS %>% autoplot(type="both")+theme_solarized()
#verificacion
modelo.estimado.MHDCBRS %>% check_res(lag.max = 36)
#grafico comparativo
Yt.Sarima.2<-modelo.estimado.MHDCBRS$fitted
grafico.comparativo2<-cbind(Yt_2,Yt.Sarima.2,Yt_2)
ts_plot(grafico.comparativo2)
SARIMA(1,0,1)(1,0,0)
library(tsibble)
library(feasts)
library(fable)
library(fabletools)
library(tidyr)
library(dplyr)
#generando modelo automatico
a2<-Yt_2 %>% as_tsibble() %>%
model(arima_original=ARIMA(value ~ pdq(1, 0, 1) + PDQ(1, 0, 0)), #original
arima_010_011 = ARIMA(value ~ pdq(1, 1, 1) + PDQ(1, 0, 0)), #sin componente autoregresivo estacional
arima_010_110 = ARIMA(value ~ pdq(1, 0, 1) + PDQ(1, 1, 0)), #sin componente autoregresivo de media movil
arima_automatico=ARIMA(value,ic="bic",stepwise = FALSE) #modelo automatico
)
print(a2)
## # A mable: 1 x 4
## arima_original arima_010_011
## <model> <model>
## 1 <ARIMA(1,0,1)(1,0,0)[12] w/ mean> <ARIMA(1,1,1)(1,0,0)[12]>
## # ... with 2 more variables: arima_010_110 <model>, arima_automatico <model>
#ordenar recuerdo al criterio
a2 %>% pivot_longer(everything(), names_to = "Model name",
values_to = "Orders") %>% glance() %>%
arrange(AICc) ->tabla2
tabla2
## # A tibble: 4 x 9
## `Model name` .model sigma2 log_lik AIC AICc BIC ar_roots ma_roots
## <chr> <chr> <dbl> <dbl> <dbl> <dbl> <dbl> <list> <list>
## 1 arima_010_110 Orders 1.47e9 -612. 1231. 1232. 1239. <cpl> <cpl>
## 2 arima_010_011 Orders 1.11e9 -733. 1473. 1474. 1482. <cpl> <cpl>
## 3 arima_automatico Orders 1.11e9 -745. 1494. 1494. 1498. <cpl> <cpl>
## 4 arima_original Orders 1.05e9 -742. 1494. 1495. 1504. <cpl> <cpl>
library(forecast)
library(dplyr)
library(tsibble)
library(fable)
library(fabletools)
#convertimos
Yt_2<-Yt_2 %>%
as_tsibble() %>%
rename(MHDCBRS=value)
#generador de la dta (roja)
data.cross.validation.MH<-Yt_2 %>%
as_tsibble() %>%
stretch_tsibble(.init = 60,.step = 1)
#generador de pronostico (verde) y simulacion
TSCV_2<-data.cross.validation.MH %>%
model(ARIMA(MHDCBRS ~ pdq(1, 0, 1) + PDQ(1, 0, 1))) %>%
forecast(h=1) %>% accuracy(Yt_2)
print(TSCV_2)
## # A tibble: 1 x 10
## .model .type ME RMSE MAE MPE MAPE MASE RMSSE ACF1
## <chr> <chr> <dbl> <dbl> <dbl> <dbl> <dbl> <dbl> <dbl> <dbl>
## 1 ARIMA(MHDCBRS ~ pdq(~ Test 46349. 46349. 46349. 17.7 17.7 1.18 0.957 NA