# Load necessary libraries

library(fpp3)
## Registered S3 method overwritten by 'tsibble':
##   method               from 
##   as_tibble.grouped_df dplyr
## ── Attaching packages ──────────────────────────────────────────── fpp3 1.0.1 ──
## ✔ tibble      3.2.1     ✔ tsibble     1.1.6
## ✔ dplyr       1.1.4     ✔ tsibbledata 0.4.1
## ✔ tidyr       1.3.1     ✔ feasts      0.4.1
## ✔ lubridate   1.9.4     ✔ fable       0.4.1
## ✔ ggplot2     3.5.1
## ── Conflicts ───────────────────────────────────────────────── fpp3_conflicts ──
## ✖ lubridate::date()    masks base::date()
## ✖ dplyr::filter()      masks stats::filter()
## ✖ tsibble::intersect() masks base::intersect()
## ✖ tsibble::interval()  masks lubridate::interval()
## ✖ dplyr::lag()         masks stats::lag()
## ✖ tsibble::setdiff()   masks base::setdiff()
## ✖ tsibble::union()     masks base::union()
library(tidyverse)
## ── Attaching core tidyverse packages ──────────────────────── tidyverse 2.0.0 ──
## ✔ forcats 1.0.0     ✔ readr   2.1.5
## ✔ purrr   1.0.4     ✔ stringr 1.5.1
## ── Conflicts ────────────────────────────────────────── tidyverse_conflicts() ──
## ✖ dplyr::filter()     masks stats::filter()
## ✖ tsibble::interval() masks lubridate::interval()
## ✖ dplyr::lag()        masks stats::lag()
## ℹ Use the conflicted package (<http://conflicted.r-lib.org/>) to force all conflicts to become errors
library(lubridate)
library(tsibble)
library(forecast)
## Warning: package 'forecast' was built under R version 4.4.3
## Registered S3 method overwritten by 'quantmod':
##   method            from
##   as.zoo.data.frame zoo

Load the dataset (aus_livestock)and Inspect the first few rows of the data for Victoria and Pigs

data("aus_livestock")

aus_livestock %>% 
  filter(State == "Victoria", Animal == "Pigs") %>% 
  head()
## # A tsibble: 6 x 4 [1M]
## # Key:       Animal, State [1]
##      Month Animal State     Count
##      <mth> <fct>  <fct>     <dbl>
## 1 1972 Jul Pigs   Victoria  94200
## 2 1972 Aug Pigs   Victoria 105700
## 3 1972 Sep Pigs   Victoria  96500
## 4 1972 Oct Pigs   Victoria 117100
## 5 1972 Nov Pigs   Victoria 104600
## 6 1972 Dec Pigs   Victoria 100500

Plot the number of pigs slaughtered in Victoria over time

aus_livestock %>%
  filter(State == "Victoria" & Animal == "Pigs") %>%
  ggplot(aes(x = Month, y = Count)) + 
  geom_line() +
  labs(title = "Number of Pigs Slaughtered in Victoria", x = "Year", y = "Count")

Create a tsibble for Victoria Pigs data

pigs_victoria <- aus_livestock %>%
  filter(State == "Victoria", Animal == "Pigs") %>%
  as_tsibble(index = Month)

Fit an ETS Model

ets_model <- pigs_victoria %>%
  model(ETS(Count))

Forecast for the next 12 months

forecast_ets <- ets_model %>%
  forecast(h = 12)

Plot the forecast

autoplot(forecast_ets) +
  labs(title = "Forecast of Pigs Slaughtered in Victoria", y = "Forecasted Count")

Compute a 95% Prediction Interval manually

residuals_ets <- residuals(ets_model)
std_dev <- sd(residuals_ets$.resid)

forecast_value <- forecast_ets$.mean[1]  # First forecast value
lower_bound <- forecast_value - 1.96 * std_dev
upper_bound <- forecast_value + 1.96 * std_dev

cat("95% Prediction Interval: ", lower_bound, " to ", upper_bound, "\n")
## 95% Prediction Interval:  69328.25  to  99521.17

8.6: Custom Function for Simple Exponential Smoothing

smooth_exponential <- function(data_series, smoothing_factor, initial_value = data_series[1]) {
  smoothed_series <- vector("numeric", length(data_series))
  smoothed_series[1] <- initial_value  # Start with the initial value
  
  for (i in 2:length(data_series)) {
    smoothed_series[i] <- smoothing_factor * data_series[i - 1] + 
                          (1 - smoothing_factor) * smoothed_series[i - 1]
  }
  
  return(smoothed_series)
}

` # Test the smoothing function with a small dataset

small_data <- c(100, 105, 110, 115, 120)
smoothed_values <- smooth_exponential(small_data, smoothing_factor = 0.2)
print(smoothed_values)
## [1] 100.00 100.00 101.00 102.80 105.24

8.7: Function to Compute SSE

compute_sse <- function(params, y) {
  alpha <- params[1]  
  level <- params[2]  
  
 
  forecasts <- smooth_exponential(y, alpha, level)
  

  sse <- sum((y - forecasts)^2)
  return(sse)
}

Use optim() to find the optimal alpha and level

optim_results <- optim(par = c(alpha = 0.2, level = mean(aus_livestock$Count)),  
                       fn = compute_sse,  
                       y = aus_livestock$Count) 
optim_results$par
##       alpha       level 
## 9.29229e-01 8.08271e+04

8.8: Combined Function to Optimize and Forecast and Generate the forecast using the optimal values

optimize_and_forecast <- function(y) {
  

  optim_results <- optim(par = c(alpha = 0.2, level = mean(y)), 
                         fn = compute_sse, 
                         y = y)
  
  alpha_optimal <- optim_results$par[1]
  level_optimal <- optim_results$par[2]
   forecast <- smooth_exponential(y, alpha_optimal, level_optimal)
  return(list(forecast = forecast, alpha = alpha_optimal, level = level_optimal))
}

result <- optimize_and_forecast(aus_livestock$Count)
result$forecast[length(result$forecast)]  # Forecast for the next value
## [1] 152255.9

8.9: Forecasting Exports for China

data("global_economy")

Filter for exports from China

china_exports <- global_economy %>%
  filter(Country == "China") %>%
  select(Year, Exports)

Plot the exports data for China

china_exports %>%
  ggplot(aes(x = Year, y = Exports)) +
  geom_line() +
  labs(title = "Exports from China", x = "Year", y = "Exports")

Fit ETS models: ETS(A,N,N) and ETS(A,A,N)

ets_ann <- china_exports %>%
  model(ETS(Exports ~ error("A") + trend("N") + season("N")))

ets_aan <- china_exports %>%
  model(ETS(Exports ~ error("A") + trend("A") + season("N")))

Compare the two models

ets_ann
## # A mable: 1 x 1
##   `ETS(Exports ~ error("A") + trend("N") + season("N"))`
##                                                  <model>
## 1                                           <ETS(A,N,N)>
ets_aan
## # A mable: 1 x 1
##   `ETS(Exports ~ error("A") + trend("A") + season("N"))`
##                                                  <model>
## 1                                           <ETS(A,A,N)>

Conclusion

The ETS model and custom exponential smoothing are two time series forecasting approaches that were used in this study to examine export data from China and Victoria regarding the number of pigs killed. Reliable 12-month projections were produced by the ETS model, which successfully captured trends and seasonality in both datasets. These predictions were given an extra degree of assurance by the hand computation of prediction intervals. Enhancing the exponential smoothing settings further increased the accuracy of the forecast. The results demonstrate the usefulness of statistical models in economic and agricultural forecasting, and also offer suggestions for additional research into different models, such as ARIMA, to improve predictions and take into account outside influences for better decision-making.