4Y monthly Samsung data
library(quantmod)
## Warning: package 'quantmod' was built under R version 4.0.3
## Loading required package: xts
## Warning: package 'xts' was built under R version 4.0.2
## Loading required package: zoo
## Warning: package 'zoo' was built under R version 4.0.2
##
## Attaching package: 'zoo'
## The following objects are masked from 'package:base':
##
## as.Date, as.Date.numeric
## Loading required package: TTR
## Warning: package 'TTR' was built under R version 4.0.2
## Registered S3 method overwritten by 'quantmod':
## method from
## as.zoo.data.frame zoo
## Version 0.4-0 included new data defaults. See ?getSymbols.
library(fpp2)
## Warning: package 'fpp2' was built under R version 4.0.3
## -- Attaching packages --------------------------------- fpp2 2.4 --
## v ggplot2 3.3.0 v fma 2.4
## v forecast 8.12 v expsmooth 2.3
## Warning: package 'forecast' was built under R version 4.0.2
## Warning: package 'fma' was built under R version 4.0.3
## Warning: package 'expsmooth' was built under R version 4.0.3
##
ss <- read.csv("C:/Users/burtkb/Downloads/005930.KS.csv")
ss.ts = ts(ss[,2],start=c(2016,12,01),end=c(2020,12,01),frequency=12)
autoplot(ss.ts)+ggtitle("Samsung Price - Dec 2016-Dec 2020")
ggseasonplot(ss.ts)
There is an upward trend which is to be expected as technology has become much more popular over the past year as more people and businesses are using it to communicate during the pandemic. There appears to be no real seasonality.
ss.train = window(ss.ts,end=c(2019))
ss2 = window(ss.ts,start=c(2019),end=c(2020))
s.nn.1=nnetar(ss.train,lambda="auto",size=2)
s.nn.2=nnetar(ss.train,lambda="auto",size=4)
s.nn.3=nnetar(ss.train,lambda="auto",size=5)
s.nn.4=nnetar(ss.train,lambda="auto",size=1)
fs1=forecast(s.nn.1,h=10)
fs2=forecast(s.nn.2,h=10)
fs3=forecast(s.nn.3,h=10)
fs4=forecast(s.nn.4,h=10)
autoplot(fs1)
autoplot(fs2)
autoplot(fs3)
autoplot(fs4)
accuracy(fs1)
## ME RMSE MAE MPE MAPE MASE
## Training set 74.22038 756.8094 621.4815 -0.6334127 4.501968 0.0474317
## ACF1
## Training set -0.008336545
accuracy(fs2)
## ME RMSE MAE MPE MAPE MASE
## Training set 4.356227 629.7068 461.4286 -0.2275234 3.523341 0.03521641
## ACF1
## Training set -0.2733203
accuracy(fs3)
## ME RMSE MAE MPE MAPE MASE
## Training set 11.02638 623.9516 448.0004 -0.1767838 3.425161 0.03419156
## ACF1
## Training set -0.3124748
accuracy(fs4)
## ME RMSE MAE MPE MAPE MASE
## Training set 8.823989 713.8361 601.2165 -0.2837735 4.611245 0.04588507
## ACF1
## Training set -0.2616706