#================================================================================================ # https://systematicinvestor.wordpress.com/2011/12/13/backtesting-minimum-variance-portfolios/ # # Load Systematic Investor Toolbox (SIT) # setInternet2(TRUE)

rm(list=ls())
con = gzcon(url('https://github.com/systematicinvestor/SIT/raw/master/sit.gz', 'rb'))
source(con)
close(con)

#***************************************************************** # Load historical data #******************************************************************

library(pacman)
p_load(quantmod, quadprog, lpSolve)

#1. S&P500 (SPY) #2. Nasdaq 100 (QQQ) #3. Emerging Markets (EEM) #4. Russell 2000 (IWM) #5. MSCI EAFE (EFA) #6. Long-term Treasury Bonds (TLT) #7. Real Estate (IYR) #8. Gold (GLD)

tickers = spl('SPY,QQQ,EEM,IWM,EFA,TLT,IYR,GLD')
data <- new.env()
getSymbols(tickers, src = 'yahoo', from = '1980-01-01', env = data, auto.assign = T)
## 'getSymbols' currently uses auto.assign=TRUE by default, but will
## use auto.assign=FALSE in 0.5-0. You will still be able to use
## 'loadSymbols' to automatically load data. getOption("getSymbols.env")
## and getOption("getSymbols.auto.assign") will still be checked for
## alternate defaults.
## 
## This message is shown once per session and may be disabled by setting 
## options("getSymbols.warning4.0"=FALSE). See ?getSymbols for details.
## pausing 1 second between requests for more than 5 symbols
## pausing 1 second between requests for more than 5 symbols
## pausing 1 second between requests for more than 5 symbols
## pausing 1 second between requests for more than 5 symbols
## [1] "SPY" "QQQ" "EEM" "IWM" "EFA" "TLT" "IYR" "GLD"
for(i in ls(data)) data[[i]] = adjustOHLC(data[[i]], use.Adjusted=T)
data.weekly <- new.env()
for(i in tickers) data.weekly[[i]] = to.weekly(data[[i]], indexAt='endof')
bt.prep(data, align='remove.na', dates='1990::2011')
bt.prep(data.weekly, align='remove.na', dates='1990::2011')

#***************************************************************** # Code Strategies #******************************************************************

prices = data$prices   
n = ncol(prices)

find week ends

head(prices)
##                          EEM      EFA   GLD      IWM      IYR      QQQ      SPY
## 2004-11-18 08:00:00 15.56904 32.33311 44.38 49.51762 30.25960 34.16679 85.60189
## 2004-11-19 08:00:00 15.34645 32.12376 44.78 48.99015 29.84475 33.63632 84.65029
## 2004-11-22 08:00:00 15.40635 32.21796 44.95 49.50964 30.06772 33.95809 85.05399
## 2004-11-23 08:00:00 15.43305 32.28077 44.75 49.81733 30.38926 33.90590 85.18372
## 2004-11-24 08:00:00 15.59736 32.42732 45.05 50.11702 30.80413 34.20155 85.38561
## 2004-11-26 08:00:00 15.85637 32.61573 45.29 50.14898 30.62261 34.09720 85.32074
##                          TLT
## 2004-11-18 08:00:00 52.29156
## 2004-11-19 08:00:00 51.87427
## 2004-11-22 08:00:00 52.14464
## 2004-11-23 08:00:00 52.20925
## 2004-11-24 08:00:00 52.20925
## 2004-11-26 08:00:00 51.86834
week.ends = endpoints(prices, 'weeks')
week.ends = week.ends[week.ends > 0]     

Equal Weight 1/N Benchmark

data$weight[] = NA
data$weight[week.ends,] = ntop(prices[week.ends,], n)       
capital = 100000
data$weight[] = (capital / prices) * data$weight
equal.weight = bt.run(data, type='share')
## Latest weights :
##                       EEM   EFA   GLD  IWM   IYR   QQQ   SPY   TLT
## 2011-12-30 08:00:00 12.38 12.51 12.08 12.5 12.56 12.56 12.54 12.88
## 
## Performance summary :
##  CAGR    Best    Worst   
##  8.7 10.2    -8.6    
head(equal.weight$ret)
##                             EEM
## 2004-11-18 08:00:00 0.000000000
## 2004-11-19 08:00:00 0.000000000
## 2004-11-22 08:00:00 0.006031753
## 2004-11-23 08:00:00 0.002175574
## 2004-11-24 08:00:00 0.006590953
## 2004-11-26 08:00:00 0.001506593

#***************************************************************** # Create Constraints #*****************************************************************

constraints = new.constraints(n, lb = -Inf, ub = +Inf)

SUM x.i = 1

constraints = add.constraints(rep(1, n), 1, type = '=', constraints)        
ret = prices / mlag(prices) - 1
weight = coredata(prices)
weight[] = NA

data$weight[64:68,]
##                          EEM      EFA      GLD      IWM      IYR      QQQ
## 2005-02-18 08:00:00 715.9791 363.6785 292.3977 248.3657 402.5947 381.2043
## 2005-02-22 08:00:00       NA       NA       NA       NA       NA       NA
## 2005-02-23 08:00:00       NA       NA       NA       NA       NA       NA
## 2005-02-24 08:00:00       NA       NA       NA       NA       NA       NA
## 2005-02-25 08:00:00 691.6362 359.6088 287.3563 245.3949 406.8868 378.4682
##                          SPY      TLT
## 2005-02-18 08:00:00 143.3464 231.3586
## 2005-02-22 08:00:00       NA       NA
## 2005-02-23 08:00:00       NA       NA
## 2005-02-24 08:00:00       NA       NA
## 2005-02-25 08:00:00 142.1187 231.0026

for( i in week.ends[week.ends >= (63 + 1)] ) {
  # one quarter is 63 days
  hist = ret[ (i- 63 +1):i, ]
  
  # create historical input assumptions
  ia = create.historical.ia(hist, 252)
  s0 = apply(coredata(hist),2,sd)     
  ia$cov = cor(coredata(hist), use='complete.obs',method='pearson') * (s0 %*% t(s0))
  
  weight[i,] = min.risk.portfolio(ia, constraints)
}

head(weight, 70)
##               EEM        EFA       GLD        IWM         IYR        QQQ
##  [1,]          NA         NA        NA         NA          NA         NA
##  [2,]          NA         NA        NA         NA          NA         NA
##  [3,]          NA         NA        NA         NA          NA         NA
##  [4,]          NA         NA        NA         NA          NA         NA
##  [5,]          NA         NA        NA         NA          NA         NA
##  [6,]          NA         NA        NA         NA          NA         NA
##  [7,]          NA         NA        NA         NA          NA         NA
##  [8,]          NA         NA        NA         NA          NA         NA
##  [9,]          NA         NA        NA         NA          NA         NA
## [10,]          NA         NA        NA         NA          NA         NA
## [11,]          NA         NA        NA         NA          NA         NA
## [12,]          NA         NA        NA         NA          NA         NA
## [13,]          NA         NA        NA         NA          NA         NA
## [14,]          NA         NA        NA         NA          NA         NA
## [15,]          NA         NA        NA         NA          NA         NA
## [16,]          NA         NA        NA         NA          NA         NA
## [17,]          NA         NA        NA         NA          NA         NA
## [18,]          NA         NA        NA         NA          NA         NA
## [19,]          NA         NA        NA         NA          NA         NA
## [20,]          NA         NA        NA         NA          NA         NA
## [21,]          NA         NA        NA         NA          NA         NA
## [22,]          NA         NA        NA         NA          NA         NA
## [23,]          NA         NA        NA         NA          NA         NA
## [24,]          NA         NA        NA         NA          NA         NA
## [25,]          NA         NA        NA         NA          NA         NA
## [26,]          NA         NA        NA         NA          NA         NA
## [27,]          NA         NA        NA         NA          NA         NA
## [28,]          NA         NA        NA         NA          NA         NA
## [29,]          NA         NA        NA         NA          NA         NA
## [30,]          NA         NA        NA         NA          NA         NA
## [31,]          NA         NA        NA         NA          NA         NA
## [32,]          NA         NA        NA         NA          NA         NA
## [33,]          NA         NA        NA         NA          NA         NA
## [34,]          NA         NA        NA         NA          NA         NA
## [35,]          NA         NA        NA         NA          NA         NA
## [36,]          NA         NA        NA         NA          NA         NA
## [37,]          NA         NA        NA         NA          NA         NA
## [38,]          NA         NA        NA         NA          NA         NA
## [39,]          NA         NA        NA         NA          NA         NA
## [40,]          NA         NA        NA         NA          NA         NA
## [41,]          NA         NA        NA         NA          NA         NA
## [42,]          NA         NA        NA         NA          NA         NA
## [43,]          NA         NA        NA         NA          NA         NA
## [44,]          NA         NA        NA         NA          NA         NA
## [45,]          NA         NA        NA         NA          NA         NA
## [46,]          NA         NA        NA         NA          NA         NA
## [47,]          NA         NA        NA         NA          NA         NA
## [48,]          NA         NA        NA         NA          NA         NA
## [49,]          NA         NA        NA         NA          NA         NA
## [50,]          NA         NA        NA         NA          NA         NA
## [51,]          NA         NA        NA         NA          NA         NA
## [52,]          NA         NA        NA         NA          NA         NA
## [53,]          NA         NA        NA         NA          NA         NA
## [54,]          NA         NA        NA         NA          NA         NA
## [55,]          NA         NA        NA         NA          NA         NA
## [56,]          NA         NA        NA         NA          NA         NA
## [57,]          NA         NA        NA         NA          NA         NA
## [58,]          NA         NA        NA         NA          NA         NA
## [59,]          NA         NA        NA         NA          NA         NA
## [60,]          NA         NA        NA         NA          NA         NA
## [61,]          NA         NA        NA         NA          NA         NA
## [62,]          NA         NA        NA         NA          NA         NA
## [63,]          NA         NA        NA         NA          NA         NA
## [64,] -0.02270568 -0.1618535 0.3211152 -0.2284190 -0.05799197 -0.2286962
## [65,]          NA         NA        NA         NA          NA         NA
## [66,]          NA         NA        NA         NA          NA         NA
## [67,]          NA         NA        NA         NA          NA         NA
## [68,] -0.02462253 -0.1630153 0.3324164 -0.2562777 -0.06761583 -0.1685113
## [69,]          NA         NA        NA         NA          NA         NA
## [70,]          NA         NA        NA         NA          NA         NA
##            SPY       TLT
##  [1,]       NA        NA
##  [2,]       NA        NA
##  [3,]       NA        NA
##  [4,]       NA        NA
##  [5,]       NA        NA
##  [6,]       NA        NA
##  [7,]       NA        NA
##  [8,]       NA        NA
##  [9,]       NA        NA
## [10,]       NA        NA
## [11,]       NA        NA
## [12,]       NA        NA
## [13,]       NA        NA
## [14,]       NA        NA
## [15,]       NA        NA
## [16,]       NA        NA
## [17,]       NA        NA
## [18,]       NA        NA
## [19,]       NA        NA
## [20,]       NA        NA
## [21,]       NA        NA
## [22,]       NA        NA
## [23,]       NA        NA
## [24,]       NA        NA
## [25,]       NA        NA
## [26,]       NA        NA
## [27,]       NA        NA
## [28,]       NA        NA
## [29,]       NA        NA
## [30,]       NA        NA
## [31,]       NA        NA
## [32,]       NA        NA
## [33,]       NA        NA
## [34,]       NA        NA
## [35,]       NA        NA
## [36,]       NA        NA
## [37,]       NA        NA
## [38,]       NA        NA
## [39,]       NA        NA
## [40,]       NA        NA
## [41,]       NA        NA
## [42,]       NA        NA
## [43,]       NA        NA
## [44,]       NA        NA
## [45,]       NA        NA
## [46,]       NA        NA
## [47,]       NA        NA
## [48,]       NA        NA
## [49,]       NA        NA
## [50,]       NA        NA
## [51,]       NA        NA
## [52,]       NA        NA
## [53,]       NA        NA
## [54,]       NA        NA
## [55,]       NA        NA
## [56,]       NA        NA
## [57,]       NA        NA
## [58,]       NA        NA
## [59,]       NA        NA
## [60,]       NA        NA
## [61,]       NA        NA
## [62,]       NA        NA
## [63,]       NA        NA
## [64,] 1.140581 0.2379699
## [65,]       NA        NA
## [66,]       NA        NA
## [67,]       NA        NA
## [68,] 1.089748 0.2578782
## [69,]       NA        NA
## [70,]       NA        NA
# Minimum Variance
data$weight[] = weight      
capital = 100000
data$weight[] = (capital / prices) * data$weight
min.var.daily = bt.run(data, type='share', capital=capital)
## Latest weights :
##                        EEM   EFA   GLD    IWM   IYR    QQQ    SPY   TLT
## 2011-12-30 08:00:00 -14.04 -1.91 -0.77 -19.12 -10.8 -22.68 121.86 47.47
## 
## Performance summary :
##  CAGR    Best    Worst   
##  10.4    2.8 -2.6    

Next let’s create Minimum Variance portfolios using weekly data:

#***************************************************************** # Code Strategies: Weekly #******************************************************************

retw = data.weekly$prices / mlag(data.weekly$prices) - 1
weightw = coredata(prices)
weightw[] = NA

for( i in week.ends[week.ends >= (63 + 1)] ) {   
  # map
  j = which(index(ret[i,]) == index(retw))
  
  # one quarter = 13 weeks
  hist = retw[ (j- 13 +1):j, ]
  
  # create historical input assumptions
  ia = create.historical.ia(hist, 52)
  s0 = apply(coredata(hist),2,sd)     
  ia$cov = cor(coredata(hist), use='complete.obs',method='pearson') * (s0 %*% t(s0))
  
  weightw[i,] = min.risk.portfolio(ia, constraints)
}   

data$weight[] = weightw     
capital = 100000
data$weight[] = (capital / prices) * data$weight
min.var.weekly = bt.run(data, type='share', capital=capital)
## Latest weights :
##                        EEM   EFA   GLD    IWM   IYR   QQQ   SPY   TLT
## 2011-12-30 08:00:00 -29.45 47.89 15.09 -61.59 26.66 58.15 -4.27 47.51
## 
## Performance summary :
##  CAGR    Best    Worst   
##  14  4.3 -4.8    

#***************************************************************** # Create Report #******************************************************************

plotbt.custom.report.part1(min.var.weekly, min.var.daily, equal.weight)

# plot Daily and Weekly transition maps

layout(1:2)
plotbt.transition.map(min.var.daily$weight)
legend('topright', legend = 'min.var.daily', bty = 'n')
plotbt.transition.map(min.var.weekly$weight)
legend('topright', legend = 'min.var.weekly', bty = 'n')

# # I find it very interesting that the Minimum Variance portfolios constructed # using daily returns to create input assumptions are way different from the # Minimum Variance portfolios constructed using weekly returns to create # input assumptions. One possible explanation for this discrepancy was examined # by Pat Burns in the The volatility mystery continues post. # https://www.portfolioprobe.com/2011/12/05/the-volatility-mystery-continues/ # https://www.portfolioprobe.com/2011/11/08/the-mystery-of-volatility-estimates-from-daily-versus-monthly-returns/

#——————————————————————————- # make covariance matrix estimate more stable, use the Ledoit-Wolf # covariance shrinkage estimator from tawny package

ia$cov = tawny::cov.shrink(hist)

or

ia$cov = cor(coredata(hist), use='complete.obs',method='spearman') * (s0 %*% t(s0))

or

ia$cov = cor(coredata(hist), use='complete.obs',method='kendall') * (s0 %*% t(s0))