Library

library(fpp3)
## Registered S3 method overwritten by 'tsibble':
##   method               from 
##   as_tibble.grouped_df dplyr
## ── Attaching packages ──────────────────────────────────────────── fpp3 1.0.2 ──
## ✔ tibble      3.3.0     ✔ tsibble     1.1.6
## ✔ dplyr       1.1.4     ✔ tsibbledata 0.4.1
## ✔ tidyr       1.3.1     ✔ feasts      0.4.2
## ✔ lubridate   1.9.4     ✔ fable       0.4.1
## ✔ ggplot2     3.5.2
## ── Conflicts ───────────────────────────────────────────────── fpp3_conflicts ──
## ✖ lubridate::date()    masks base::date()
## ✖ dplyr::filter()      masks stats::filter()
## ✖ tsibble::intersect() masks base::intersect()
## ✖ tsibble::interval()  masks lubridate::interval()
## ✖ dplyr::lag()         masks stats::lag()
## ✖ tsibble::setdiff()   masks base::setdiff()
## ✖ tsibble::union()     masks base::union()

Exercise 2.1

Explore the following four time series: Bricks from aus_production, Lynx from pelt, Close from gafa_stock, Demand from vic_elec.

Use ? (or help()) to find out about the data in each series. What is the time interval of each series? Use autoplot() to produce a time plot of each series. For the last plot, modify the axis labels and title.

?aus_production
head(aus_production)
## # A tsibble: 6 x 7 [1Q]
##   Quarter  Beer Tobacco Bricks Cement Electricity   Gas
##     <qtr> <dbl>   <dbl>  <dbl>  <dbl>       <dbl> <dbl>
## 1 1956 Q1   284    5225    189    465        3923     5
## 2 1956 Q2   213    5178    204    532        4436     6
## 3 1956 Q3   227    5297    208    561        4806     7
## 4 1956 Q4   308    5681    197    570        4418     6
## 5 1957 Q1   262    5577    187    529        4339     5
## 6 1957 Q2   228    5651    214    604        4811     7
?pelt
head(pelt)
## # A tsibble: 6 x 3 [1Y]
##    Year  Hare  Lynx
##   <dbl> <dbl> <dbl>
## 1  1845 19580 30090
## 2  1846 19600 45150
## 3  1847 19610 49150
## 4  1848 11990 39520
## 5  1849 28040 21230
## 6  1850 58000  8420
?gafa_stock
head(gafa_stock)
## # A tsibble: 6 x 8 [!]
## # Key:       Symbol [1]
##   Symbol Date        Open  High   Low Close Adj_Close    Volume
##   <chr>  <date>     <dbl> <dbl> <dbl> <dbl>     <dbl>     <dbl>
## 1 AAPL   2014-01-02  79.4  79.6  78.9  79.0      67.0  58671200
## 2 AAPL   2014-01-03  79.0  79.1  77.2  77.3      65.5  98116900
## 3 AAPL   2014-01-06  76.8  78.1  76.2  77.7      65.9 103152700
## 4 AAPL   2014-01-07  77.8  78.0  76.8  77.1      65.4  79302300
## 5 AAPL   2014-01-08  77.0  77.9  77.0  77.6      65.8  64632400
## 6 AAPL   2014-01-09  78.1  78.1  76.5  76.6      65.0  69787200
?vic_elec
head(vic_elec)
## # A tsibble: 6 x 5 [30m] <Australia/Melbourne>
##   Time                Demand Temperature Date       Holiday
##   <dttm>               <dbl>       <dbl> <date>     <lgl>  
## 1 2012-01-01 00:00:00  4383.        21.4 2012-01-01 TRUE   
## 2 2012-01-01 00:30:00  4263.        21.0 2012-01-01 TRUE   
## 3 2012-01-01 01:00:00  4049.        20.7 2012-01-01 TRUE   
## 4 2012-01-01 01:30:00  3878.        20.6 2012-01-01 TRUE   
## 5 2012-01-01 02:00:00  4036.        20.4 2012-01-01 TRUE   
## 6 2012-01-01 02:30:00  3866.        20.2 2012-01-01 TRUE
aus_production %>%
autoplot(Bricks)
## Warning: Removed 20 rows containing missing values or values outside the scale range
## (`geom_line()`).

The above autoplot() result shows that the time interval of aus_production is quarterly.

pelt %>%
autoplot(Lynx)

The above autoplot() result shows that the time interval of pelt is yearly.

gafa_stock %>%
  autoplot(Close)

The above autoplot() result shows that the time interval of gafa_stock is daily, based on stock market opening dates.

vic_elec %>%
  autoplot(Demand)+
  labs(title="Half-hourly Electricity Demand for Victoria, Australia ",
        y = "Total Electricity Demand in MWh")  #add axis labels and title.

The above autoplot() result shows that the time interval of vic_elec is half-hourly.

Exercise 2.2

Use filter() to find what days corresponded to the peak closing price for each of the four stocks in gafa_stock.

distinct(gafa_stock, Symbol) #list of all unique stock symbols
## # A tibble: 4 × 1
##   Symbol
##   <chr> 
## 1 AAPL  
## 2 AMZN  
## 3 FB    
## 4 GOOG

There are 4 unique stock symbols: APPL, AMAN, FB, & GOOG. See each of peak closing price as following :

aapl_peak<- gafa_stock %>%
  filter(Symbol == "AAPL") %>%
  select(Symbol, Date, Close) %>%
   slice_max(order_by = Close, n = 1) 
amzn_peak <- gafa_stock %>%
  filter(Symbol == "AMZN") %>%
  select(Symbol, Date, Close) %>%
   slice_max(order_by = Close, n = 1) 
fb_peak <- gafa_stock %>%
  filter(Symbol == "FB") %>%
  select(Symbol, Date, Close) %>%
   slice_max(order_by = Close, n = 1) 
goog_peak <- gafa_stock %>%
  filter(Symbol == "GOOG") %>%
  select(Symbol, Date, Close) %>%
   slice_max(order_by = Close, n = 1) 

Exercise 2.3

Download the file tute1.csv from the book website, open it in Excel (or some other spreadsheet application), and review its contents. You should find four columns of information. Columns B through D each contain a quarterly series, labelled Sales, AdBudget and GDP. Sales contains the quarterly sales for a small company over the period 1981-2005. AdBudget is the advertising budget and GDP is the gross domestic product. All series have been adjusted for inflation.

Loading the csv file:

tute1 <- read.csv("tute1.csv") #import csv file
glimpse(tute1)
## Rows: 100
## Columns: 4
## $ Quarter  <chr> "1981-03-01", "1981-06-01", "1981-09-01", "1981-12-01", "1982…
## $ Sales    <dbl> 1020.2, 889.2, 795.0, 1003.9, 1057.7, 944.4, 778.5, 932.5, 99…
## $ AdBudget <dbl> 659.2, 589.0, 512.5, 614.1, 647.2, 602.0, 530.7, 608.4, 637.9…
## $ GDP      <dbl> 251.8, 290.9, 290.8, 292.4, 279.1, 254.0, 295.6, 271.7, 259.6…

Converting the data to time series:

ts_tute1 <- tute1 %>%
  mutate(Quarter = yearquarter(Quarter)) %>%
  as_tsibble(index = Quarter)

Construct time series plots

ts_tute1 %>%
pivot_longer(-Quarter) %>% 
  ggplot(aes(x = Quarter, y = value, colour = name)) +
  geom_line() +
  facet_grid(name ~ ., scales = "free_y")

Exercise 2.4

The USgas package contains data on the demand for natural gas in the US.

Install the USgas package. Create a tsibble from us_total with year as the index and state as the key. Plot the annual natural gas consumption by state for the New England area (comprising the states of Maine, Vermont, New Hampshire, Massachusetts, Connecticut and Rhode Island).

Installing the ‘USgas’ package

# install.packages("USgas")
library(USgas)
head(us_total)
##   year   state      y
## 1 1997 Alabama 324158
## 2 1998 Alabama 329134
## 3 1999 Alabama 337270
## 4 2000 Alabama 353614
## 5 2001 Alabama 332693
## 6 2002 Alabama 379343

Convert the selected data to time series

us_total_filter <- us_total %>%
    rename(natural_gas_consumption = y) %>%
    filter(state %in% c("Maine", "Vermont", "New Hampshire", "Massachusetts", "Connecticut", "Rhode Island")) %>%
    as_tsibble(key = state,
             index = year)

Plot the annual natural gas consumption

us_total_filter %>%
  autoplot(natural_gas_consumption)

Exercise 2.5

Download tourism.xlsx from the book website and read it into R using readxl::read_excel(). Create a tsibble which is identical to the tourism tsibble from the tsibble package. Find what combination of Region and Purpose had the maximum number of overnight trips on average. Create a new tsibble which combines the Purposes and Regions, and just has total trips by State.

Loading the data into R by using readxl::read_excel()

library(readxl)
tourism_xl <- readxl::read_excel("tourism.xlsx")
head(tourism_xl)
## # A tibble: 6 × 5
##   Quarter    Region   State           Purpose  Trips
##   <chr>      <chr>    <chr>           <chr>    <dbl>
## 1 1998-01-01 Adelaide South Australia Business  135.
## 2 1998-04-01 Adelaide South Australia Business  110.
## 3 1998-07-01 Adelaide South Australia Business  166.
## 4 1998-10-01 Adelaide South Australia Business  127.
## 5 1999-01-01 Adelaide South Australia Business  137.
## 6 1999-04-01 Adelaide South Australia Business  200.

Call out the tourism tsibble from the tsibble package.

tourism_tb <- tourism
head(tourism_tb)
## # A tsibble: 6 x 5 [1Q]
## # Key:       Region, State, Purpose [1]
##   Quarter Region   State           Purpose  Trips
##     <qtr> <chr>    <chr>           <chr>    <dbl>
## 1 1998 Q1 Adelaide South Australia Business  135.
## 2 1998 Q2 Adelaide South Australia Business  110.
## 3 1998 Q3 Adelaide South Australia Business  166.
## 4 1998 Q4 Adelaide South Australia Business  127.
## 5 1999 Q1 Adelaide South Australia Business  137.
## 6 1999 Q2 Adelaide South Australia Business  200.

Create a tsibble which is identical to the tourism tsibble from the tsibble package.

tourism_xl_update <- tourism_xl %>%
  mutate(Quarter = yearquarter(Quarter)) %>%
  as_tsibble(index=Quarter, key=c(Region, State, Purpose))

head(tourism_xl_update)
## # A tsibble: 6 x 5 [1Q]
## # Key:       Region, State, Purpose [1]
##   Quarter Region   State           Purpose  Trips
##     <qtr> <chr>    <chr>           <chr>    <dbl>
## 1 1998 Q1 Adelaide South Australia Business  135.
## 2 1998 Q2 Adelaide South Australia Business  110.
## 3 1998 Q3 Adelaide South Australia Business  166.
## 4 1998 Q4 Adelaide South Australia Business  127.
## 5 1999 Q1 Adelaide South Australia Business  137.
## 6 1999 Q2 Adelaide South Australia Business  200.

Find what combination of Region and Purpose had the maximum number of overnight trips on average.

max_trip_avg <- tourism_xl_update %>%
  group_by(Region, Purpose) %>%
  summarise(avg_trips = mean(Trips)) %>% 
  slice_max(avg_trips, n = 1) %>% 
  arrange(desc(avg_trips))

max_trip_avg
## # A tsibble: 76 x 4 [1Q]
## # Key:       Region, Purpose [76]
## # Groups:    Region [76]
##    Region                 Purpose  Quarter avg_trips
##    <chr>                  <chr>      <qtr>     <dbl>
##  1 Melbourne              Visiting 2017 Q4      985.
##  2 Sydney                 Business 2001 Q4      948.
##  3 South Coast            Holiday  1998 Q1      915.
##  4 North Coast NSW        Holiday  2016 Q1      906.
##  5 Brisbane               Visiting 2016 Q4      796.
##  6 Gold Coast             Holiday  2002 Q1      711.
##  7 Sunshine Coast         Holiday  2005 Q1      617.
##  8 Australia's South West Holiday  2016 Q1      612.
##  9 Great Ocean Road       Holiday  1998 Q1      548.
## 10 Experience Perth       Visiting 2016 Q1      538.
## # ℹ 66 more rows

Based on the above result, the combination of Melbourne and visiting in 2017 Q4 had the highest average number of trips.

Create a new tsibble which combines the Purposes and Regions, and just has total trips by State.

total_trips_state <- tourism_xl_update %>%
  group_by(State) %>% 
  summarise(tot_trips = sum(Trips))

total_trips_state
## # A tsibble: 640 x 3 [1Q]
## # Key:       State [8]
##    State Quarter tot_trips
##    <chr>   <qtr>     <dbl>
##  1 ACT   1998 Q1      551.
##  2 ACT   1998 Q2      416.
##  3 ACT   1998 Q3      436.
##  4 ACT   1998 Q4      450.
##  5 ACT   1999 Q1      379.
##  6 ACT   1999 Q2      558.
##  7 ACT   1999 Q3      449.
##  8 ACT   1999 Q4      595.
##  9 ACT   2000 Q1      600.
## 10 ACT   2000 Q2      557.
## # ℹ 630 more rows

Exercise 2.8

Use the following graphics functions: autoplot(), gg_season(), gg_subseries(), gg_lag(), ACF() and explore features from the following time series: “Total Private” Employed from us_employment, Bricks from aus_production, Hare from pelt, “H02” Cost from PBS, and Barrels from us_gasoline.

Can you spot any seasonality, cyclicity and trend? What do you learn about the series? What can you say about the seasonal patterns? Can you identify any unusual years?

a.us_employment

head(us_employment)
## # A tsibble: 6 x 4 [1M]
## # Key:       Series_ID [1]
##      Month Series_ID     Title         Employed
##      <mth> <chr>         <chr>            <dbl>
## 1 1939 Jan CEU0500000001 Total Private    25338
## 2 1939 Feb CEU0500000001 Total Private    25447
## 3 1939 Mar CEU0500000001 Total Private    25833
## 4 1939 Apr CEU0500000001 Total Private    25801
## 5 1939 May CEU0500000001 Total Private    26113
## 6 1939 Jun CEU0500000001 Total Private    26485
private_employment <- us_employment %>% 
  filter(Title == "Total Private")

autoplot(private_employment, Employed)

gg_season(private_employment, y = Employed)
## Warning: `gg_season()` was deprecated in feasts 0.4.2.
## ℹ Please use `ggtime::gg_season()` instead.
## This warning is displayed once every 8 hours.
## Call `lifecycle::last_lifecycle_warnings()` to see where this warning was
## generated.

gg_subseries(private_employment, y = Employed)
## Warning: `gg_subseries()` was deprecated in feasts 0.4.2.
## ℹ Please use `ggtime::gg_subseries()` instead.
## This warning is displayed once every 8 hours.
## Call `lifecycle::last_lifecycle_warnings()` to see where this warning was
## generated.

gg_lag(private_employment, y = Employed)
## Warning: `gg_lag()` was deprecated in feasts 0.4.2.
## ℹ Please use `ggtime::gg_lag()` instead.
## This warning is displayed once every 8 hours.
## Call `lifecycle::last_lifecycle_warnings()` to see where this warning was
## generated.

ACF(private_employment, y = Employed) %>% 
  autoplot()

Conclusion: The time series of ‘Total Private’ employment from us_employment shows an overall positive long-term upward trend which reflecting U.S. economic growth. However, the plot also reflects several major events, such as the employment shifts during World War II and the sharp decline caused by the 2008 financial crisis following the collapse of Lehman Brothers. It should be noted that employment rates are closely tied to the economic cycle.

b.aus_production

head(aus_production)
## # A tsibble: 6 x 7 [1Q]
##   Quarter  Beer Tobacco Bricks Cement Electricity   Gas
##     <qtr> <dbl>   <dbl>  <dbl>  <dbl>       <dbl> <dbl>
## 1 1956 Q1   284    5225    189    465        3923     5
## 2 1956 Q2   213    5178    204    532        4436     6
## 3 1956 Q3   227    5297    208    561        4806     7
## 4 1956 Q4   308    5681    197    570        4418     6
## 5 1957 Q1   262    5577    187    529        4339     5
## 6 1957 Q2   228    5651    214    604        4811     7
bricks <- aus_production %>%
  select (Quarter, Bricks)

autoplot(bricks, Bricks)
## Warning: Removed 20 rows containing missing values or values outside the scale range
## (`geom_line()`).

gg_season(bricks, y=Bricks)
## Warning: Removed 20 rows containing missing values or values outside the scale range
## (`geom_line()`).

gg_subseries(bricks, y=Bricks)
## Warning: Removed 5 rows containing missing values or values outside the scale range
## (`geom_line()`).

gg_lag(bricks, y=Bricks)
## Warning: Removed 20 rows containing missing values (gg_lag).

ACF(bricks, y=Bricks)  %>% 
  autoplot()

Conclusion: The time series of Bricks from aus_production shows a clear downward trend since peaking in the 1980s. It also exhibits a strong seasonal pattern, with production rising for a few quarters before declining again.

c.Hare from pelt

head(pelt)
## # A tsibble: 6 x 3 [1Y]
##    Year  Hare  Lynx
##   <dbl> <dbl> <dbl>
## 1  1845 19580 30090
## 2  1846 19600 45150
## 3  1847 19610 49150
## 4  1848 11990 39520
## 5  1849 28040 21230
## 6  1850 58000  8420
autoplot(pelt, Hare)

#gg_season(pelt, Hare) -- interval period is yearly which not working on this 

gg_subseries(pelt, y=Hare)

gg_lag(pelt, y=Hare)

ACF(pelt, y=Hare)  %>% 
  autoplot()

Conclusion: The time series of Hare from pelt shows no distinct long-term trend. However, it exhibits a strong cyclical pattern, where the hare population falls close to zero (early 1860’s, around 1870’s, 1990’s and so on) before increasing again.

  1. “H02” Cost from PBS
glimpse(PBS)
## Rows: 67,596
## Columns: 9
## Key: Concession, Type, ATC1, ATC2 [336]
## $ Month      <mth> 1991 Jul, 1991 Aug, 1991 Sep, 1991 Oct, 1991 Nov, 1991 Dec,…
## $ Concession <chr> "Concessional", "Concessional", "Concessional", "Concession…
## $ Type       <chr> "Co-payments", "Co-payments", "Co-payments", "Co-payments",…
## $ ATC1       <chr> "A", "A", "A", "A", "A", "A", "A", "A", "A", "A", "A", "A",…
## $ ATC1_desc  <chr> "Alimentary tract and metabolism", "Alimentary tract and me…
## $ ATC2       <chr> "A01", "A01", "A01", "A01", "A01", "A01", "A01", "A01", "A0…
## $ ATC2_desc  <chr> "STOMATOLOGICAL PREPARATIONS", "STOMATOLOGICAL PREPARATIONS…
## $ Scripts    <dbl> 18228, 15327, 14775, 15380, 14371, 15028, 11040, 15165, 168…
## $ Cost       <dbl> 67877.00, 57011.00, 55020.00, 57222.00, 52120.00, 54299.00,…
h02 <- PBS %>%
  filter(ATC2 == "H02")

autoplot(h02, Cost)

gg_season(h02, Cost) 

gg_subseries(h02, Cost)

#gg_lag(h02, Cost) -- contains more than one time series which not working on it

ACF(h02, Cost)  %>% 
  autoplot()

Conclusion: The time series of ‘H02’ Cost from PBS indicates that the Safety Net category experiences rising costs from mid-year through the following February, while the Concessional co-payment category tends to have higher costs during the mid-year period. The series exhibits strong seasonality, with peaks corresponding to periods of higher demand for healthcare services

  1. Barrels from us_gasoline.
head(us_gasoline)
## # A tsibble: 6 x 2 [1W]
##       Week Barrels
##     <week>   <dbl>
## 1 1991 W06    6.62
## 2 1991 W07    6.43
## 3 1991 W08    6.58
## 4 1991 W09    7.22
## 5 1991 W10    6.88
## 6 1991 W11    6.95
autoplot(us_gasoline, Barrels)

gg_season(us_gasoline, Barrels)

gg_subseries(us_gasoline, Barrels)

gg_lag(us_gasoline, Barrels)

ACF(us_gasoline, Barrels)  %>% 
  autoplot()

Conclusion: The time series of Barrels from us_gasoline shows a slight overall upward trend overall. Notice that the decline began in the late 2000s, corresponding to the 2008 financial crisis. In addition, the series exhibits a clear seasonal pattern, with consumption rising from March, peaking between mid-year and October, and then declining toward the end of the year.