# lab7.r script file for lab7 calculations
#
# author: Eric Zivot created: October 20, 2003 revised: July 17, 2012
#
# comments:
options(digits = 4, width = 70)
# make sure packages are installed prior to loading them
library(PerformanceAnalytics)
## Loading required package: zoo
## Attaching package: 'zoo'
## The following object(s) are masked from 'package:base':
##
## as.Date, as.Date.numeric
## Loading required package: xts
## Attaching package: 'PerformanceAnalytics'
## The following object(s) are masked from 'package:graphics':
##
## legend
library(zoo)
library(boot)
library(tseries)
## Loading required package: quadprog
#
# |------------------------------------------------------------------------------------------|
# | I N T E R N A L F U N C T I O N S |
# |------------------------------------------------------------------------------------------|
# function to compute Value-at-Risk note: default values are selected for
# the probability level (p) and the initial wealth (w). These values can
# be changed when calling the function. Highlight the entire function,
# right click and select run line or selection
Value.at.Risk = function(x, p = 0.05, w = 1e+05) {
x = as.matrix(x)
q = apply(x, 2, mean) + apply(x, 2, sd) * qnorm(p)
VaR = (exp(q) - 1) * w
VaR
}
# 5% Value-at-Risk
ValueAtRisk.boot = function(x, idx, p = 0.05, w = 1e+05) {
# x.mat data to be resampled idx vector of scrambled indices created by
# boot() function p probability value for VaR calculation w value of
# initial investment value: ans Value-at-Risk computed using resampled
# data
q = mean(x[idx]) + sd(x[idx]) * qnorm(p)
VaR = (exp(q) - 1) * w
VaR
}
#
# |------------------------------------------------------------------------------------------|
# | M A I N P R O C E D U R E |
# |------------------------------------------------------------------------------------------|
# get monthly adjusted closing price data on VBLTX, FMAGX and SBUX from
# Yahoo using the tseries function get.hist.quote(). Set sample to Sept
# 2005 through Sep 2010. Note: if you are not careful with the start and
# end dates or if you set the retclass to 'ts' then results might look
# weird
# get the last five years of monthly adjusted closing prices from Yahoo!
VBLTX.prices = get.hist.quote(instrument = "vbltx", start = "2005-09-01", end = "2010-09-30",
quote = "AdjClose", provider = "yahoo", origin = "1970-01-01", compression = "m",
retclass = "zoo")
## time series ends 2010-09-01
# change class of time index to yearmon which is appropriate for monthly
# data index() and as.yearmon() are functions in the zoo package
index(VBLTX.prices) = as.yearmon(index(VBLTX.prices))
class(VBLTX.prices)
## [1] "zoo"
colnames(VBLTX.prices)
## [1] "AdjClose"
start(VBLTX.prices)
## [1] "Sep 2005"
end(VBLTX.prices)
## [1] "Sep 2010"
FMAGX.prices = get.hist.quote(instrument = "fmagx", start = "2005-09-01", end = "2010-09-30",
quote = "AdjClose", provider = "yahoo", origin = "1970-01-01", compression = "m",
retclass = "zoo")
## time series ends 2010-09-01
index(FMAGX.prices) = as.yearmon(index(FMAGX.prices))
SBUX.prices = get.hist.quote(instrument = "sbux", start = "2005-09-01", end = "2010-09-30",
quote = "AdjClose", provider = "yahoo", origin = "1970-01-01", compression = "m",
retclass = "zoo")
## time series ends 2010-09-01
index(SBUX.prices) = as.yearmon(index(SBUX.prices))
# create merged price data
lab4Prices.z = merge(VBLTX.prices, FMAGX.prices, SBUX.prices)
# rename columns
colnames(lab4Prices.z) = c("VBLTX", "FMAGX", "SBUX")
# calculate cc returns as difference in log prices
lab4Returns.z = diff(log(lab4Prices.z))
# 3. Create timePlots of data
plot(lab4Returns.z, plot.type = "single", lty = 1:3, col = 1:3, lwd = 2)
legend(x = "bottomleft", legend = colnames(lab4Returns.z), lty = 1:3, col = 1:3,
lwd = 2)
abline(h = 0)
title("Monthly cc returns")
# 4. Create matrix of return data and compute pairwise scatterplots
ret.mat = coredata(lab4Returns.z)
colnames(ret.mat)
## [1] "VBLTX" "FMAGX" "SBUX"
head(ret.mat)
## VBLTX FMAGX SBUX
## [1,] -0.019729 -0.01300 0.12142
## [2,] 0.008680 0.03292 0.07369
## [3,] 0.021979 0.01968 -0.01445
## [4,] -0.009709 0.04653 0.05461
## [5,] 0.010916 -0.01499 0.13628
## [6,] -0.036859 0.02650 0.03544
VBLTX = ret.mat[, "VBLTX"]
FMAGX = ret.mat[, "FMAGX"]
SBUX = ret.mat[, "SBUX"]
pairs(ret.mat, col = "blue")
# 5. Compute estimates of CER model parameters
muhat.vals = apply(ret.mat, 2, mean)
muhat.vals
## VBLTX FMAGX SBUX
## 0.0059018 -0.0008077 0.0004841
sigma2hat.vals = apply(ret.mat, 2, var)
sigma2hat.vals
## VBLTX FMAGX SBUX
## 0.000871 0.004511 0.010731
sigmahat.vals = apply(ret.mat, 2, sd)
sigmahat.vals
## VBLTX FMAGX SBUX
## 0.02951 0.06716 0.10359
cov.mat = var(ret.mat)
cov.mat
## VBLTX FMAGX SBUX
## VBLTX 0.0008710 0.000370 -0.0004211
## FMAGX 0.0003700 0.004511 0.0042646
## SBUX -0.0004211 0.004265 0.0107314
cor.mat = cor(ret.mat)
cor.mat
## VBLTX FMAGX SBUX
## VBLTX 1.0000 0.1866 -0.1377
## FMAGX 0.1866 1.0000 0.6129
## SBUX -0.1377 0.6129 1.0000
covhat.vals = cov.mat[lower.tri(cov.mat)]
rhohat.vals = cor.mat[lower.tri(cor.mat)]
names(covhat.vals) <- names(rhohat.vals) <- c("VBLTX,FMAGX", "VBLTX,SBUX", "FMAGX,SBUX")
covhat.vals
## VBLTX,FMAGX VBLTX,SBUX FMAGX,SBUX
## 0.0003700 -0.0004211 0.0042646
rhohat.vals
## VBLTX,FMAGX VBLTX,SBUX FMAGX,SBUX
## 0.1866 -0.1377 0.6129
# summarize the CER model estimates
cbind(muhat.vals, sigma2hat.vals, sigmahat.vals)
## muhat.vals sigma2hat.vals sigmahat.vals
## VBLTX 0.0059018 0.000871 0.02951
## FMAGX -0.0008077 0.004511 0.06716
## SBUX 0.0004841 0.010731 0.10359
cbind(covhat.vals, rhohat.vals)
## covhat.vals rhohat.vals
## VBLTX,FMAGX 0.0003700 0.1866
## VBLTX,SBUX -0.0004211 -0.1377
## FMAGX,SBUX 0.0042646 0.6129
# plot mean vs. sd values
plot(sigmahat.vals, muhat.vals, pch = 1:3, cex = 2, col = 1:3, ylab = "mean",
xlab = "sd (risk)")
abline(h = 0)
legend(x = "topright", legend = names(muhat.vals), pch = 1:3, col = 1:3, cex = 1.5)
# 6. Compute stndard errors for estimated parameters
# compute estimated standard error for mean
nobs = nrow(ret.mat)
nobs
## [1] 60
se.muhat = sigmahat.vals/sqrt(nobs)
se.muhat
## VBLTX FMAGX SBUX
## 0.003810 0.008671 0.013374
# show estimates with SE values underneath
rbind(muhat.vals, se.muhat)
## VBLTX FMAGX SBUX
## muhat.vals 0.005902 -0.0008077 0.0004841
## se.muhat 0.003810 0.0086707 0.0133737
# compute approx 95% confidence intervals
mu.lower = muhat.vals - 2 * se.muhat
mu.upper = muhat.vals + 2 * se.muhat
cbind(mu.lower, mu.upper)
## mu.lower mu.upper
## VBLTX -0.001718 0.01352
## FMAGX -0.018149 0.01653
## SBUX -0.026263 0.02723
# compute estimated standard errors for variance and sd
se.sigma2hat = sigma2hat.vals/sqrt(nobs/2)
se.sigma2hat
## VBLTX FMAGX SBUX
## 0.0001590 0.0008236 0.0019593
se.sigmahat = sigmahat.vals/sqrt(2 * nobs)
se.sigmahat
## VBLTX FMAGX SBUX
## 0.002694 0.006131 0.009457
rbind(sigma2hat.vals, se.sigma2hat)
## VBLTX FMAGX SBUX
## sigma2hat.vals 0.000871 0.0045108 0.010731
## se.sigma2hat 0.000159 0.0008236 0.001959
rbind(sigmahat.vals, se.sigmahat)
## VBLTX FMAGX SBUX
## sigmahat.vals 0.029512 0.067163 0.103592
## se.sigmahat 0.002694 0.006131 0.009457
# compute approx 95% confidence intervals
sigma2.lower = sigma2hat.vals - 2 * se.sigma2hat
sigma2.upper = sigma2hat.vals + 2 * se.sigma2hat
cbind(sigma2.lower, sigma2.upper)
## sigma2.lower sigma2.upper
## VBLTX 0.0005529 0.001189
## FMAGX 0.0028637 0.006158
## SBUX 0.0068128 0.014650
sigma.lower = sigmahat.vals - 2 * se.sigmahat
sigma.upper = sigmahat.vals + 2 * se.sigmahat
cbind(sigma.lower, sigma.upper)
## sigma.lower sigma.upper
## VBLTX 0.02412 0.03490
## FMAGX 0.05490 0.07942
## SBUX 0.08468 0.12251
# compute estimated standard errors for correlation
se.rhohat = (1 - rhohat.vals^2)/sqrt(nobs)
se.rhohat
## VBLTX,FMAGX VBLTX,SBUX FMAGX,SBUX
## 0.1246 0.1267 0.0806
rbind(rhohat.vals, se.rhohat)
## VBLTX,FMAGX VBLTX,SBUX FMAGX,SBUX
## rhohat.vals 0.1866 -0.1377 0.6129
## se.rhohat 0.1246 0.1267 0.0806
# compute approx 95% confidence intervals
rho.lower = rhohat.vals - 2 * se.rhohat
rho.upper = rhohat.vals + 2 * se.rhohat
cbind(rho.lower, rho.upper)
## rho.lower rho.upper
## VBLTX,FMAGX -0.06255 0.4359
## VBLTX,SBUX -0.39104 0.1156
## FMAGX,SBUX 0.45175 0.7741
# 7. Compute 5% and 1% Value at Risk
# 5% and 1% VaR estimates based on W0 = 100000
Value.at.Risk(ret.mat, p = 0.05, w = 1e+05)
## VBLTX FMAGX SBUX
## -4175 -10531 -15626
Value.at.Risk(ret.mat, p = 0.01, w = 1e+05)
## VBLTX FMAGX SBUX
## -6083 -14534 -21377
################################################################################
################################################################################ Hypothesis
################################################################################ Testing
# 8. Test H0: mu = 0 vs. H1: mu /= 0
#
# ?t.test
t.test(lab4Returns.z[, "VBLTX"])
##
## One Sample t-test
##
## data: lab4Returns.z[, "VBLTX"]
## t = 1.549, df = 59, p-value = 0.1267
## alternative hypothesis: true mean is not equal to 0
## 95 percent confidence interval:
## -0.001722 0.013526
## sample estimates:
## mean of x
## 0.005902
t.test(lab4Returns.z[, "FMAGX"])
##
## One Sample t-test
##
## data: lab4Returns.z[, "FMAGX"]
## t = -0.0931, df = 59, p-value = 0.9261
## alternative hypothesis: true mean is not equal to 0
## 95 percent confidence interval:
## -0.01816 0.01654
## sample estimates:
## mean of x
## -0.0008077
t.test(lab4Returns.z[, "SBUX"])
##
## One Sample t-test
##
## data: lab4Returns.z[, "SBUX"]
## t = 0.0362, df = 59, p-value = 0.9712
## alternative hypothesis: true mean is not equal to 0
## 95 percent confidence interval:
## -0.02628 0.02724
## sample estimates:
## mean of x
## 0.0004841
# 9. Test H0: rho_ij = 0 vs. H1: rho_ij /= 0
#
# ?cor.test VBLTX,FMAGX
cor.test(x = lab4Returns.z[, "VBLTX"], y = lab4Returns.z[, "FMAGX"])
##
## Pearson's product-moment correlation
##
## data: lab4Returns.z[, "VBLTX"] and lab4Returns.z[, "FMAGX"]
## t = 1.447, df = 58, p-value = 0.1533
## alternative hypothesis: true correlation is not equal to 0
## 95 percent confidence interval:
## -0.07062 0.42064
## sample estimates:
## cor
## 0.1866
# VBLTX,SBUX
cor.test(x = lab4Returns.z[, "VBLTX"], y = lab4Returns.z[, "SBUX"])
##
## Pearson's product-moment correlation
##
## data: lab4Returns.z[, "VBLTX"] and lab4Returns.z[, "SBUX"]
## t = -1.059, df = 58, p-value = 0.294
## alternative hypothesis: true correlation is not equal to 0
## 95 percent confidence interval:
## -0.3784 0.1204
## sample estimates:
## cor
## -0.1377
# FMAGX,SBUX
cor.test(x = lab4Returns.z[, "FMAGX"], y = lab4Returns.z[, "SBUX"])
##
## Pearson's product-moment correlation
##
## data: lab4Returns.z[, "FMAGX"] and lab4Returns.z[, "SBUX"]
## t = 5.908, df = 58, p-value = 1.932e-07
## alternative hypothesis: true correlation is not equal to 0
## 95 percent confidence interval:
## 0.4252 0.7501
## sample estimates:
## cor
## 0.6129
# 10. Test H0: returns are normal vs. H1: returns are not normal
#
# ?jarque.bera.test
jarque.bera.test(lab4Returns.z[, "VBLTX"])
##
## Jarque Bera Test
##
## data: lab4Returns.z[, "VBLTX"]
## X-squared = 21.78, df = 2, p-value = 1.863e-05
jarque.bera.test(lab4Returns.z[, "FMAGX"])
##
## Jarque Bera Test
##
## data: lab4Returns.z[, "FMAGX"]
## X-squared = 26.09, df = 2, p-value = 2.158e-06
jarque.bera.test(lab4Returns.z[, "SBUX"])
##
## Jarque Bera Test
##
## data: lab4Returns.z[, "SBUX"]
## X-squared = 14.57, df = 2, p-value = 0.0006859
# 11. 24 month rolling estimates of mu and sd
# rolling analysis for VBLTX
roll.mu.VBLTX = rollapply(lab4Returns.z[, "VBLTX"], FUN = mean, width = 24,
align = "right")
roll.sd.VBLTX = rollapply(lab4Returns.z[, "VBLTX"], FUN = sd, width = 24, align = "right")
plot(merge(roll.mu.VBLTX, roll.sd.VBLTX, lab4Returns.z[, "VBLTX"]), plot.type = "single",
main = "24-month rolling means and sds for VBLTX", ylab = "Percent per month",
col = c("blue", "red", "black"), lwd = 2)
abline(h = 0)
legend(x = "bottomleft", legend = c("Rolling means", "Rolling sds", "VBLTX returns"),
col = c("blue", "red", "black"), lwd = 2)
# rolling analysis for FMAGX
roll.mu.FMAGX = rollapply(lab4Returns.z[, "FMAGX"], FUN = mean, width = 24,
align = "right")
roll.sd.FMAGX = rollapply(lab4Returns.z[, "FMAGX"], FUN = sd, width = 24, align = "right")
plot(merge(roll.mu.FMAGX, roll.sd.FMAGX, lab4Returns.z[, "FMAGX"]), plot.type = "single",
main = "24-month rolling means and sds for FMAGX", ylab = "Percent per month",
col = c("blue", "red", "black"), lwd = 2)
abline(h = 0)
legend(x = "bottomleft", legend = c("Rolling means", "Rolling sds", "FMAGX returns"),
col = c("blue", "red", "black"), lwd = 2)
# rolling analysis for SBUX
roll.mu.SBUX = rollapply(lab4Returns.z[, "SBUX"], FUN = mean, width = 24, align = "right")
roll.sd.SBUX = rollapply(lab4Returns.z[, "SBUX"], FUN = sd, width = 24, align = "right")
plot(merge(roll.mu.SBUX, roll.sd.SBUX, lab4Returns.z[, "SBUX"]), plot.type = "single",
main = "24-month rolling means and sds for SBUX", ylab = "Percent per month",
col = c("blue", "red", "black"), lwd = 2)
abline(h = 0)
legend(x = "bottomleft", legend = c("Rolling means", "Rolling sds", "SBUX returns"),
col = c("blue", "red", "black"), lwd = 2)
# rolling correlation estimates
rhohat = function(x) {
cor(x)[1, 2]
}
# compute rolling estimates b/w VBLTX and FMAGX
roll.rhohat.VBLTX.FMAGX = rollapply(lab4Returns.z[, c("VBLTX", "FMAGX")], width = 24,
FUN = rhohat, by.column = FALSE, align = "right")
class(roll.rhohat.VBLTX.FMAGX)
## [1] "zoo"
roll.rhohat.VBLTX.FMAGX[1:5]
## Sep 2007 Oct 2007 Nov 2007 Dec 2007 Jan 2008
## -0.08616 -0.09022 -0.17255 -0.18540 -0.17295
plot(roll.rhohat.VBLTX.FMAGX, main = "Rolling Correlation b/w VBLTX and FMAGX",
lwd = 2, col = "blue", ylab = "rho.hat")
abline(h = 0)
# compute rolling estimates b/w VBLTX and SBUX
roll.rhohat.VBLTX.SBUX = rollapply(lab4Returns.z[, c("VBLTX", "SBUX")], width = 24,
FUN = rhohat, by.column = FALSE, align = "right")
class(roll.rhohat.VBLTX.SBUX)
## [1] "zoo"
roll.rhohat.VBLTX.SBUX[1:5]
## Sep 2007 Oct 2007 Nov 2007 Dec 2007 Jan 2008
## -0.2334 -0.1461 -0.2307 -0.1968 -0.1821
plot(roll.rhohat.VBLTX.SBUX, main = "Rolling Correlation b/w VBLTX and SBUX",
lwd = 2, col = "blue", ylab = "rho.hat")
abline(h = 0)
# compute rolling estimates b/w FMAGX and SBUX
roll.rhohat.FMAGX.SBUX = rollapply(lab4Returns.z[, c("FMAGX", "SBUX")], width = 24,
FUN = rhohat, by.column = FALSE, align = "right")
class(roll.rhohat.FMAGX.SBUX)
## [1] "zoo"
roll.rhohat.FMAGX.SBUX[1:5]
## Sep 2007 Oct 2007 Nov 2007 Dec 2007 Jan 2008
## 0.04224 0.12974 0.21424 0.21904 0.23660
plot(roll.rhohat.FMAGX.SBUX, main = "Rolling Correlation b/w FMAGX and SBUX",
lwd = 2, col = "blue", ylab = "rho.hat")
abline(h = 0)
# 12. Evaluate bias and SE formulas using Monte Carlo
# generate 1000 samples from CER and compute sample statistics
mu = muhat.vals["FMAGX"]
sd = sigmahat.vals["FMAGX"]
n.obs = 60
set.seed(123)
n.sim = 1000
sim.means = rep(0, n.sim)
sim.vars = rep(0, n.sim)
sim.sds = rep(0, n.sim)
for (sim in 1:n.sim) {
sim.ret = rnorm(n.obs, mean = mu, sd = sd)
sim.means[sim] = mean(sim.ret)
sim.vars[sim] = var(sim.ret)
sim.sds[sim] = sqrt(sim.vars[sim])
}
par(mfrow = c(2, 2))
hist(sim.means, xlab = "mu hat", col = "slateblue1")
abline(v = mu, col = "white", lwd = 2)
hist(sim.vars, xlab = "sigma2 hat", col = "slateblue1")
abline(v = sd^2, col = "white", lwd = 2)
hist(sim.sds, xlab = "sigma hat", col = "slateblue1")
abline(v = sd, col = "white", lwd = 2)
par(mfrow = c(1, 1))
# 13. compute MC estimates of bias and SE
c(mu, mean(sim.means))
## FMAGX
## -0.0008077 -0.0007786
mean(sim.means) - mu
## FMAGX
## 2.904e-05
c(sd^2, mean(sim.vars))
## FMAGX
## 0.004511 0.004534
mean(sim.vars) - sd^2
## FMAGX
## 2.328e-05
c(sd, mean(sim.sds))
## FMAGX
## 0.06716 0.06705
mean(sim.sds) - sd
## FMAGX
## -0.000117
# compute MC SE value and compare to SE calculated from simulated data
c(se.muhat["FMAGX"], sd(sim.means))
## FMAGX
## 0.008671 0.008308
c(se.sigma2hat["FMAGX"], sd(sim.vars))
## FMAGX
## 0.0008236 0.0008426
c(se.sigmahat["FMAGX"], sd(sim.sds))
## FMAGX
## 0.006131 0.006246
# 14. bootstrapping SE for mean, variance, sd and correlation
#
# ?boot note: boot requires user-supplied functions that take two
# arguments: data and an index. The index is created by the boot function
# and represents random resampling with replacement
# function for bootstrapping sample mean
mean.boot = function(x, idx) {
# arguments: x data to be resampled idx vector of scrambled indices
# created by boot() function value: ans mean value computed using
# resampled data
ans = mean(x[idx])
ans
}
VBLTX.mean.boot = boot(VBLTX, statistic = mean.boot, R = 999)
class(VBLTX.mean.boot)
## [1] "boot"
names(VBLTX.mean.boot)
## [1] "t0" "t" "R" "data" "seed"
## [6] "statistic" "sim" "call" "stype" "strata"
## [11] "weights"
# print, plot and qqnorm methods
VBLTX.mean.boot
##
## ORDINARY NONPARAMETRIC BOOTSTRAP
##
##
## Call:
## boot(data = VBLTX, statistic = mean.boot, R = 999)
##
##
## Bootstrap Statistics :
## original bias std. error
## t1* 0.005902 0.0002181 0.003724
se.muhat["VBLTX"]
## VBLTX
## 0.00381
# plot bootstrap distribution and qq-plot against normal
plot(VBLTX.mean.boot)
# compute bootstrap confidence intervals from normal approximation basic
# bootstrap method and percentile intervals
boot.ci(VBLTX.mean.boot, conf = 0.95, type = c("norm", "perc"))
## BOOTSTRAP CONFIDENCE INTERVAL CALCULATIONS
## Based on 999 bootstrap replicates
##
## CALL :
## boot.ci(boot.out = VBLTX.mean.boot, conf = 0.95, type = c("norm",
## "perc"))
##
## Intervals :
## Level Normal Percentile
## 95% (-0.0016, 0.0130 ) (-0.0011, 0.0135 )
## Calculations and Intervals on Original Scale
# boostrap SD estimate
#
# function for bootstrapping sample standard deviation
sd.boot = function(x, idx) {
# arguments: x data to be resampled idx vector of scrambled indices
# created by boot() function value: ans sd value computed using resampled
# data
ans = sd(x[idx])
ans
}
VBLTX.sd.boot = boot(VBLTX, statistic = sd.boot, R = 999)
VBLTX.sd.boot
##
## ORDINARY NONPARAMETRIC BOOTSTRAP
##
##
## Call:
## boot(data = VBLTX, statistic = sd.boot, R = 999)
##
##
## Bootstrap Statistics :
## original bias std. error
## t1* 0.02951 -0.0005905 0.004182
se.sigmahat["VBLTX"]
## VBLTX
## 0.002694
# plot bootstrap distribution
plot(VBLTX.sd.boot)
# compute confidence intervals
boot.ci(VBLTX.sd.boot, conf = 0.95, type = c("norm", "basic", "perc"))
## BOOTSTRAP CONFIDENCE INTERVAL CALCULATIONS
## Based on 999 bootstrap replicates
##
## CALL :
## boot.ci(boot.out = VBLTX.sd.boot, conf = 0.95, type = c("norm",
## "basic", "perc"))
##
## Intervals :
## Level Normal Basic Percentile
## 95% ( 0.0219, 0.0383 ) ( 0.0217, 0.0383 ) ( 0.0207, 0.0373 )
## Calculations and Intervals on Original Scale
# bootstrap correlation
# function to compute correlation between 1st 2 variables in matrix
rho.boot = function(x.mat, idx) {
# x.mat n x 2 data matrix to be resampled idx vector of scrambled indices
# created by boot() function value: ans correlation value computed using
# resampled data
ans = cor(x.mat[idx, ])[1, 2]
ans
}
VBLTX.FMAGX.cor.boot = boot(ret.mat[, c("VBLTX", "FMAGX")], statistic = rho.boot,
R = 999)
VBLTX.FMAGX.cor.boot
##
## ORDINARY NONPARAMETRIC BOOTSTRAP
##
##
## Call:
## boot(data = ret.mat[, c("VBLTX", "FMAGX")], statistic = rho.boot,
## R = 999)
##
##
## Bootstrap Statistics :
## original bias std. error
## t1* 0.1866 -0.01172 0.2053
se.rhohat[1]
## VBLTX,FMAGX
## 0.1246
# plot bootstrap distribution
plot(VBLTX.FMAGX.cor.boot)
# bootstrap confidence intervals
boot.ci(VBLTX.FMAGX.cor.boot, conf = 0.95, type = c("norm", "perc"))
## BOOTSTRAP CONFIDENCE INTERVAL CALCULATIONS
## Based on 999 bootstrap replicates
##
## CALL :
## boot.ci(boot.out = VBLTX.FMAGX.cor.boot, conf = 0.95, type = c("norm",
## "perc"))
##
## Intervals :
## Level Normal Percentile
## 95% (-0.2041, 0.6008 ) (-0.2683, 0.5251 )
## Calculations and Intervals on Original Scale
# 15. Bootstrap VaR
VBLTX.VaR.boot = boot(VBLTX, statistic = ValueAtRisk.boot, R = 999)
VBLTX.VaR.boot
##
## ORDINARY NONPARAMETRIC BOOTSTRAP
##
##
## Call:
## boot(data = VBLTX, statistic = ValueAtRisk.boot, R = 999)
##
##
## Bootstrap Statistics :
## original bias std. error
## t1* -4175 56.84 680.1
boot.ci(VBLTX.VaR.boot, conf = 0.95, type = c("norm", "perc"))
## BOOTSTRAP CONFIDENCE INTERVAL CALCULATIONS
## Based on 999 bootstrap replicates
##
## CALL :
## boot.ci(boot.out = VBLTX.VaR.boot, conf = 0.95, type = c("norm",
## "perc"))
##
## Intervals :
## Level Normal Percentile
## 95% (-5564, -2898 ) (-5411, -2818 )
## Calculations and Intervals on Original Scale
plot(VBLTX.VaR.boot)
#
# |------------------------------------------------------------------------------------------|
# | E N D O F S C R I P T |
# |------------------------------------------------------------------------------------------|