library(readxl)
library(ggplot2)
library(tidyr)
## Warning: package 'tidyr' was built under R version 4.3.3
library(dplyr)
## Warning: package 'dplyr' was built under R version 4.3.3
##
## Attaching package: 'dplyr'
## The following objects are masked from 'package:stats':
##
## filter, lag
## The following objects are masked from 'package:base':
##
## intersect, setdiff, setequal, union
library(zoo)
## Warning: package 'zoo' was built under R version 4.3.3
##
## Attaching package: 'zoo'
## The following objects are masked from 'package:base':
##
## as.Date, as.Date.numeric
library(TTR)
## Warning: package 'TTR' was built under R version 4.3.3
library(e1071)
## Warning: package 'e1071' was built under R version 4.3.3
library(sn)
## Warning: package 'sn' was built under R version 4.3.3
## Loading required package: stats4
##
## Attaching package: 'sn'
## The following object is masked from 'package:stats':
##
## sd
library(moments)
##
## Attaching package: 'moments'
## The following objects are masked from 'package:e1071':
##
## kurtosis, moment, skewness
FINM_8100_DATA_1_ <- read_excel("C:/Users/thech/Downloads/FINM 8100 DATA (1).xlsx")
## New names:
## • `` -> `...2`
## • `` -> `...3`
## • `` -> `...4`
## • `` -> `...5`
## • `` -> `...6`
## • `` -> `...7`
## • `` -> `...8`
## • `` -> `...9`
## • `` -> `...11`
## • `` -> `...13`
## • `` -> `...15`
## • `` -> `...17`
## • `` -> `...19`
## • `` -> `...21`
## • `` -> `...23`
## • `` -> `...25`
## • `` -> `...27`
## • `` -> `...29`
## • `` -> `...31`
## • `` -> `...32`
#################Commonwealth Bank#########################
commonwealth<-FINM_8100_DATA_1_[,2:3]
commonwealth[1,1]<-"year"
colnames(commonwealth) <- as.character(unlist(commonwealth[1, ]))
cm<-commonwealth[-1, ]
cmdotcom<-cm%>%filter(year>=20000301 & year <=20030331)
hist(as.numeric(cmdotcom$Change.cm), breaks = 100, freq = FALSE)
cmm <- mean(as.numeric(cmdotcom$Change.cm), na.rm = TRUE)
cms <- sd(as.numeric(cmdotcom$Change.cm), na.rm = TRUE)
# Add normal distribution curve
curve(
dnorm(x, mean = cmm, sd = cms),
col = "lightblue", lwd = 2,
add = TRUE
)
# Add true distribution curve
dens <- density(as.numeric(cmdotcom$Change.cm), bw = 0.1, na.rm = TRUE)
# Add density line
lines(dens, col = "blue", lwd = 2)

skewness(as.numeric(cmdotcom$Change.cm), na.rm = TRUE)
## [1] -0.2094623
kurtosis(as.numeric(cmdotcom$Change.cm), na.rm = TRUE)
## [1] 4.246894
##################Variance-Covariance Approach
cmchange <- as.numeric(cmdotcom$Change.cm)
##50 days
#----------EQUALLY WEIGHTED
win50 <- 50
alpha <- 0.01
z <- qnorm(alpha)
cmmu_roll50 <- rollapply(cmchange, width = win50, FUN = mean, align = "right", fill = NA, na.rm = TRUE)
cmsd_roll50 <- rollapply(cmchange, width = win50, FUN = sd, align = "right", fill = NA, na.rm = TRUE)
cmVaR50d_99 <- (cmmu_roll50 + z * cmsd_roll50)
##120 days
win120 <- 120
alpha <- 0.01
z <- qnorm(alpha)
cmmu_roll120 <- rollapply(cmchange, width = win120, FUN = mean, align = "right", fill = NA, na.rm = TRUE)
cmsd_roll120 <- rollapply(cmchange, width = win120, FUN = sd, align = "right", fill = NA, na.rm = TRUE)
cmVaR120d_99 <- (cmmu_roll120 + z * cmsd_roll120)
##250 days
win250 <- 250
alpha <- 0.01
z <- qnorm(alpha)
cmmu_roll250 <- rollapply(cmchange, width = win250, FUN = mean, align = "right", fill = NA, na.rm = TRUE)
cmsd_roll250 <- rollapply(cmchange, width = win250, FUN = sd, align = "right", fill = NA, na.rm = TRUE)
cmVaR250d_99 <- (cmmu_roll250 + z * cmsd_roll250)
#plot cm
plot(cmchange, type = "l",col = "grey" )
lines(cmVaR50d_99, col = "black")
lines(cmVaR120d_99, col = "blue")
lines(cmVaR250d_99,col = "purple")

#--------------exponentially weighted
r <- as.numeric(cmchange)
# --- Minimal, robust EWMA VaR (starts exactly at each window) -------------
ewma_var_exact <- function(r, mu, window, lambda, alpha = 0.01) {
stopifnot(length(r) == length(mu))
n <- length(r)
z <- qnorm(alpha)
# first index where rolling mean exists (for right-aligned, this is 'window')
t0 <- which(!is.na(mu))[1]
if (is.na(t0) || t0 < window) t0 <- window # safety
# init variance at t0 using the same window the mean used
eps_win <- r[(t0 - window + 1):t0] - mu[t0]
sigma2 <- rep(NA_real_, n)
sigma2[t0] <- var(eps_win, na.rm = TRUE)
# recursive EWMA on variance using lagged residuals
for (t in (t0 + 1):n) {
eps_tm1 <- r[t - 1] - mu[t - 1] # (r_{t-1} - mu_{t-1})
if (is.na(eps_tm1) || is.na(sigma2[t - 1])) {
sigma2[t] <- NA
} else {
sigma2[t] <- lambda * sigma2[t - 1] + (1 - lambda) * (eps_tm1^2)
}
}
VaR <- (mu + z * sqrt(sigma2))
VaR
}
# Inputs
r <- as.numeric(cmchange)
# rolling means you already computed:
# cmmu_roll50, cmmu_roll120, cmmu_roll250
# match “effective window” lambdas
lam50 <- 49/51
lam120 <- 119/121
lam250 <- 249/251
# compute EWMA VaR — these will start exactly at 50/120/250
cmVaR50x_99 <- ewma_var_exact(r, cmmu_roll50, 50, lam50, alpha = 0.01)
cmVaR120x_99 <- ewma_var_exact(r, cmmu_roll120, 120, lam120, alpha = 0.01)
cmVaR250x_99 <- ewma_var_exact(r, cmmu_roll250, 250, lam250, alpha = 0.01)
# sanity check counts (expect N-50, N-120, N-250)
sapply(list(V50 = cmVaR50x_99, V120 = cmVaR120x_99, V250 = cmVaR250x_99),
function(v) sum(is.finite(v)))
## V50 V120 V250
## 755 685 555
plot(cmchange, type = "l",col = "grey" )
lines(cmVaR50x_99, col = "black")
lines(cmVaR120x_99, col = "blue")
lines(cmVaR250x_99,col = "purple")

################## Historical Approach
#50days
win50 <- 50
alpha <- 0.01
cmVaR50h_99 <- rollapply(cmchange, width = win50, FUN = function(cmchange) quantile(cmchange, probs = alpha, na.rm = TRUE), align = "right", fill = NA)
#120 days
win120 <- 120
alpha <- 0.01
cmVaR120h_99 <- rollapply(cmchange, width = win120, FUN = function(cmchange) quantile(cmchange, probs = alpha, na.rm = TRUE), align = "right", fill = NA)
#250 days
win250 <- 250
alpha <- 0.01
cmVaR250h_99 <- rollapply(cmchange, width = win250, FUN = function(cmchange) quantile(cmchange, probs = alpha, na.rm = TRUE), align = "right", fill = NA)
plot(cmchange, type = "l",col = "grey" )
lines(cmVaR50h_99, col = "black")
lines(cmVaR120h_99,col = "blue")
lines(cmVaR250h_99, col = "purple")

####################Monte Carlo Approach
stackr<-FINM_8100_DATA_1_[,2:9]
stackr[1,1]<-"year"
colnames(stackr) <- as.character(unlist(stackr[1, ]))
stackr<-stackr[-1, ]
dotcom<-stackr%>%filter(year>=20000301 & year <=20030331)
coldotcom <- as.numeric(c(as.matrix(dotcom[2:8])))
skewness(coldotcom, na.rm = TRUE)
## [1] -1.379403
kurtosis(coldotcom, na.rm = TRUE)
## [1] 25.80484
sd(coldotcom, na.rm = TRUE)
## [1] 0.09205716
summary(coldotcom)
## Min. 1st Qu. Median Mean 3rd Qu. Max.
## -1.361745 -0.029295 0.000000 0.001972 0.033773 0.704701
stackr <- as.numeric(stackr$Change.cm)
stackr <- stackr[is.finite(stackr)]
coldotcom <- as.numeric(coldotcom)
coldotcom <- coldotcom[is.finite(coldotcom)]
cmm<- mean(stackr,na.rm = TRUE)
cms<- sd(stackr, na.rm = TRUE)
cmsk<- skewness(coldotcom, na.rm = TRUE)
cmk<- kurtosis(coldotcom, na.rm = TRUE)
# 1) Tiny helper: find (alpha, nu) to match target skew & EXCESS kurtosis
find_skewt_params <- function(sk_target, kex_target, n = 30000) {
obj <- function(par) {
skew <- par[1]
nu <- 4 + exp(par[2]) # ensure nu > 4
x <- rst(n, xi = 0, omega = 1, alpha = skew, nu = nu)
(skewness(x) - sk_target)^2 + (kurtosis(x) - kex_target)^2
}
fit <- optim(c(0, log(6-4)), obj, method = "Nelder-Mead")
list(skew = fit$par[1], nu = 4 + exp(fit$par[2]))
}
# 2) Your industry targets
sk_tgt <- cmsk # skewness(coldotcom)
kex_tgt <- cmk # EXCESS kurtosis(coldotcom)
pars <- find_skewt_params(sk_tgt, kex_tgt)
skew_hat <- pars$skew
nu_hat <- pars$nu
# 3) Simulate for ONE bank using its mean & sd
xi <- cmm # mean(coldotcom)
omega <- cms # sd(coldotcom)
set.seed(123)
x_sim <- rst(nrow(dotcom), xi = xi, omega = omega, alpha = skew_hat, nu = nu_hat)
# Quick visual check
hist(x_sim, breaks = 100, col = "lightblue", freq = FALSE,
main = "Skew-t simulation (bank mean/sd + industry skew/tails)")
lines(density(x_sim, na.rm = FALSE, bw = 0.8), lwd = 2)

#50days
win50 <- 50
alpha <- 0.01
cmVaR50m_99 <- rollapply(x_sim, width = win50, FUN = function(x_sim) quantile(x_sim, probs = alpha, na.rm = TRUE), align = "right", fill = NA)
#120 days
win120 <- 120
alpha <- 0.01
cmVaR120m_99 <- rollapply(x_sim, width = win120, FUN = function(x_sim) quantile(x_sim, probs = alpha, na.rm = TRUE), align = "right", fill = NA)
#250 days
win250 <- 250
alpha <- 0.01
cmVaR250m_99 <- rollapply(x_sim, width = win250, FUN = function(x_sim) quantile(x_sim, probs = alpha, na.rm = TRUE), align = "right", fill = NA)
plot(x_sim, type = "l",col = "grey" )
lines(cmVaR50m_99, col = "black")
lines(cmVaR120m_99,col = "blue")
lines(cmVaR250m_99, col = "purple")

#--------------------------------------Test----------------------------------------------#
#####count
valid <- is.finite(cmVaR50d_99) & is.finite(cmchange)
exceptionse50 <- (cmchange < cmVaR50d_99) & valid
ce50<-sum(exceptionse50, na.rm = TRUE)
pe50<-ce50/sum(valid)
# z-Test at 99%
thres99<- 0.01
z<- (pe50-thres99)/sqrt(thres99*(1-thres99)/sum(valid))
cn<-qnorm(1-thres99)
z
## [1] 2.359221
cn
## [1] 2.326348
#--> reject null: more than 0.01 -> more bleached than expected or underestimate risks but not much
#####magnitude
exceptionse50 <-cmchange < cmVaR50d_99
loss_exc <- cmchange[exceptionse50]
var_exc <- cmVaR50d_99[exceptionse50]
ok <- is.finite(loss_exc) & is.finite(var_exc)
d <- var_exc[ok] - loss_exc[ok] # differences
n <- length(d)
mean_d <- mean(d, na.rm = TRUE)
sd_d <- sd(d, na.rm = TRUE)
t <- mean_d / (sd_d / sqrt(n)) # test statistic
ct<-qt(1-thres99,n-1)
t
## [1] 3.17601
ct
## [1] 2.650309
#--> rejest null: the difference is not 0 -> exceeded by large
#####loss function
y <- as.numeric(cmchange)
q <- cmVaR50d_99
thres99 <- 0.01
# Loss function
s <- ((1-thres99) - (y < q)) * (y - q)
loss<-sum(s,na.rm = TRUE)