Get Data and Build Models

# !!please load this part before the main function!!

library("quantmod")
## Loading required package: xts
## Loading required package: zoo
## 
## Attaching package: 'zoo'
## The following objects are masked from 'package:base':
## 
##     as.Date, as.Date.numeric
## Loading required package: TTR
## Version 0.4-0 included new data defaults. See ?getSymbols.
library("forecast")
## Loading required package: timeDate
## This is forecast 7.3
getSymbols(Symbols = c("DEXJPUS","DEXUSEU","DEXUSUK"),src="FRED")
##     As of 0.4-0, 'getSymbols' uses env=parent.frame() and
##  auto.assign=TRUE by default.
## 
##  This  behavior  will be  phased out in 0.5-0  when the call  will
##  default to use auto.assign=FALSE. getOption("getSymbols.env") and 
##  getOptions("getSymbols.auto.assign") are now checked for alternate defaults
## 
##  This message is shown once per session and may be disabled by setting 
##  options("getSymbols.warning4.0"=FALSE). See ?getSymbols for more details.
## [1] "DEXJPUS" "DEXUSEU" "DEXUSUK"
ex.data<-cbind(DEXJPUS,DEXUSEU,DEXUSUK)["2013-12-02/2016-12-02"]
# load the required packages and get symbols from FRED during the time period.

ex.data<-na.omit(data.frame(date=index(ex.data), coredata(ex.data)))
ex.ts<-ts(data=ex.data,frequency=22)
# omit the N/A and transfer the data to time series with frequency about 22 workding days per month. 

ex.jp<-arima(ex.ts[,2],order=c(7,2,1),seasonal = list(order=c(1,1,0),period=22))
ex.eu<-arima(ex.ts[,3],order=c(7,2,1),seasonal = list(order=c(1,1,0),period=22))
ex.uk<-arima(ex.ts[,4],order=c(7,2,1),seasonal = list(order=c(1,1,0),period=22))
# build the SARIMA models with fitted parameters, respectively to JP, EU and UK.
# the method to get these parameters are in supplementary explanation.

Main Function

# !please input start date and end date with format as "yyyy-mm-dd"!
predex<-function(sd,ed){

sd<-as.Date(sd)
ed<-as.Date(ed)
l<-as.Date(tail(ex.data$date,1))
# transfer to date variables and get the last date of data.

n1<-sum(!weekdays(seq(l,sd,"days")) %in% c("Saturday", "Sunday"))
n2<-sum(!weekdays(seq(sd,ed,"days")) %in% c("Saturday", "Sunday"))
n3<-sum(!weekdays(seq(l,ed,"days")) %in% c("Saturday", "Sunday"))
# calculate work days from last date of date to start as well as to end. 
# *n2 is optional variable. I just want to clearly draw differences.

x <- seq(sd, ed, by = 1)
x<- x[!grepl("S(at|un)", weekdays(x))]
# get date of work days from start to end as rownames for output table.

pr.jp<-forecast(ex.jp,n3-1)
pr.eu<-forecast(ex.eu,n3-1)
pr.uk<-forecast(ex.uk,n3-1)
# apply forecast function to each model for prediction for future work days.

pr.ex<-cbind(pr.jp$mean[(n3-n2):(n3-1)],pr.eu$mean[(n3-n2):(n3-1)],pr.uk$mean[(n3-n2):(n3-1)])
colnames(pr.ex)<-c("JP/US","US/EU","US/UK")
rownames(pr.ex)<-as.character(as.Date(x))
# build the table of forecast point estimates for three exchange rates.

pr.pa<-cbind(c("p","d","q","P","D","Q","Period"),c(7,2,1,1,1,0,22))
colnames(pr.pa)<-c("SARIMA parameters","Value")
pr<-list(pr.ex,pr.pa)
return(pr)

pr.cl<-cbind(pr.jp$lower[(n3-n2):(n3-1)],pr.jp$upper[(n3-n2):(n3-1)],pr.eu$lower[(n3-n2):(n3-1)],pr.eu$upper[(n3-n2):(n3-1)],pr.uk$lower[(n3-n2):(n3-1)],pr.uk$upper[(n3-n2):(n3-1)])
colnames(pr.cl)<-c("JP/US.lo80","JP/US.lo95","JP/US.hi80","JP/US.hi95","US/EU.lo80","US/EU.lo95","US/EU.hi80","US/EU.hi95","US/UK.lo80","US/UK.lo95","US/UK.hi80","US/UK.hi95")
rownames(pr.cl)<-as.character(as.Date(x))
# this part reveals the confidence levels of estimates, while it is not part of outcome of the function.

}

Supplementary Explanation

# function test from "2016-12-04" to "2016-12-12"
predex(sd = "2016-12-04",ed = "2016-12-12")
## [[1]]
##               JP/US    US/EU    US/UK
## 2016-12-05 113.7410 1.071500 1.245169
## 2016-12-06 112.8843 1.075270 1.252046
## 2016-12-07 113.0425 1.072548 1.247475
## 2016-12-08 113.5284 1.074042 1.247673
## 2016-12-09 114.7664 1.068723 1.240365
## 2016-12-12 115.2147 1.066741 1.234103
## 
## [[2]]
##      SARIMA parameters Value
## [1,] "p"               "7"  
## [2,] "d"               "2"  
## [3,] "q"               "1"  
## [4,] "P"               "1"  
## [5,] "D"               "1"  
## [6,] "Q"               "0"  
## [7,] "Period"          "22"
# not in the request, I think the confidence level of prediction should also be print out.
# here are confidence levels of next 5 estimates.
pr.jp5<-forecast(ex.jp,5)
pr.eu5<-forecast(ex.eu,5)
pr.uk5<-forecast(ex.uk,5)
pr.cl<-cbind(pr.jp5$lower,pr.jp5$upper,pr.eu5$lower,pr.eu5$upper,pr.uk5$lower,pr.uk5$upper)
colnames(pr.cl)<-c("JP/US.lo80","JP/US.lo95","JP/US.hi80","JP/US.hi95","US/EU.lo80","US/EU.lo95","US/EU.hi80","US/EU.hi95","US/UK.lo80","US/UK.lo95","US/UK.hi80","US/UK.hi95")
pr.cl
##      JP/US.lo80 JP/US.lo95 JP/US.hi80 JP/US.hi95 US/EU.lo80 US/EU.lo95
## [1,]   112.1197   111.5407   114.3072   114.8862   1.052502   1.047053
## [2,]   111.8881   111.0455   115.0713   115.9138   1.049710   1.041948
## [3,]   111.5958   110.5618   115.5022   116.5361   1.047629   1.038125
## [4,]   111.2344   110.0280   115.7925   116.9989   1.043687   1.032838
## [5,]   110.9499   109.6051   116.0309   117.3757   1.042210   1.030067
##      US/EU.hi80 US/EU.hi95 US/UK.lo80 US/UK.lo95 US/UK.hi80 US/UK.hi95
## [1,]   1.073086   1.078535   1.238754   1.231299   1.266921   1.274376
## [2,]   1.079034   1.086795   1.221890   1.210732   1.264046   1.275204
## [3,]   1.083538   1.093043   1.218861   1.205036   1.271092   1.284917
## [4,]   1.084677   1.095526   1.213167   1.197396   1.272752   1.288524
## [5,]   1.088086   1.100229   1.213171   1.195616   1.279498   1.297053
# actually, 22 work days for 1 period might not be a very good estimate for the season. 
# but I still do this because it makes sense to some extent.
# *I only get a clear picture of 12 months as a period in montly data by applying auto.arima function.

ex.com<-lapply(ex.ts[,2:4], decompose)
plot(ex.com$DEXJPUS)

plot(ex.com$DEXUSEU)

plot(ex.com$DEXUSUK)

# decompose time series and plot components of three rates.
# from these three plots, we can see the effects from different parts in the data.


# from acf and pacf plots, I can decide the parameters for SARIMA models.
# by tring d = 2, I get pretty clear plots for both.

Acf(diff(ex.ts[,2],differences = 2),lag.max = 50)

Pacf(diff(ex.ts[,2],differences = 2),lag.max = 50)

# from acf, I get the q = 1, and from pacf, I get the p = 7.

# and to find P, D, Q in seasonal periods, I tried different combinations by auto.arima function.
# it finally come out that P = 1, D = 1, Q =0, which make more sense than others in plots.

# for p, d, q, another way is also using auto.arima function.
# however, the model is too flat and in some way not realistic at all. 

# the accuracy of prediction, in my opinion, will just be considerable in less than 1 period, namely in 22 days.

par(mfrow=c(3,1))
plot(forecast(ex.jp,22),main="JP-US in next period (22 days)")
plot(forecast(ex.eu,22),main="US-EU in next period (22 days)")
plot(forecast(ex.uk,22),main="US-UK in next period (22 days)")

# these are prediction points in next one period.

Some Comment

The exchange rates cannot be predicted accurately because of so many creative and interactive factors.

I have tried different sizes and types of data, with many possible parameters to predict the whole trends. For monthly data, I get some pretty clear models for 12 months as a period. However, in R’s SARIMA models, frequency is limited, which means I cannot get the whole trend from all the data from like 1971 to 2016 (in other hand, it is also OK because data far from nowadays maybe out of date) .

Another reason is that exchange rates might be manipulated. Like in US-EU models, every time the rate went down at about 1.05, it would rise in next few days. There are possible results from governments to control the exchange rates.

Similar to US-UK, the rate went down significantly since the Britain out of Europe. From the models, the rate probably keep going down. However, I think the rate is like a spring, which means it can be lower but cannot continuously go down without resistence.

So many political or economical rules should be applied into a systematical algorithm, which can be built by experienced analyst, however it is much beyond my ability. The models I provide is inflexiable, and I think I can approve it with my further study in this field.