R Markdown

This is an R Markdown document. Markdown is a simple formatting syntax for authoring HTML, PDF, and MS Word documents. For more details on using R Markdown see http://rmarkdown.rstudio.com.

When you click the Knit button a document will be generated that includes both content as well as the output of any embedded R code chunks within the document. You can embed an R code chunk like this:

#========================================
# FamaFrench_mon_69_98_3stocks
# one factor model
# ff three factor model
#========================================
library(tidyverse)
## ── Attaching core tidyverse packages ──────────────────────── tidyverse 2.0.0 ──
## ✔ dplyr     1.1.4     ✔ readr     2.1.5
## ✔ forcats   1.0.0     ✔ stringr   1.5.1
## ✔ ggplot2   3.5.1     ✔ tibble    3.2.1
## ✔ lubridate 1.9.3     ✔ tidyr     1.3.1
## ✔ purrr     1.0.2     
## ── Conflicts ────────────────────────────────────────── tidyverse_conflicts() ──
## ✖ dplyr::filter() masks stats::filter()
## ✖ dplyr::lag()    masks stats::lag()
## ℹ Use the conflicted package (<http://conflicted.r-lib.org/>) to force all conflicts to become errors
library(reshape2)
## 
## Attaching package: 'reshape2'
## 
## The following object is masked from 'package:tidyr':
## 
##     smiths
retdata = read_csv('C:/Users/Administrator/Downloads/FamaFrench_mon_69_98_3stocks.csv')
## Rows: 360 Columns: 9
## ── Column specification ────────────────────────────────────────────────────────
## Delimiter: ","
## dbl (9): date, Mkt-RF, SMB, HML, RF, ge, ibm, mobil, CRSP
## 
## ℹ Use `spec()` to retrieve the full column specification for this data.
## ℹ Specify the column types or set `show_col_types = FALSE` to quiet this message.
head(retdata)
## # A tibble: 6 × 9
##     date `Mkt-RF`   SMB   HML    RF    ge    ibm  mobil   CRSP
##    <dbl>    <dbl> <dbl> <dbl> <dbl> <dbl>  <dbl>  <dbl>  <dbl>
## 1 196901    -1.2  -0.8   1.57  0.53 -1.20 -5.95   -1.40 -0.671
## 2 196902    -5.82 -3.9   0.93  0.46 -6.04 -0.700  -7.84 -5.36 
## 3 196903     2.59 -0.28 -0.45  0.46  6.65  7.03   21.5   3.05 
## 4 196904     1.52 -0.85  0.06  0.53  5.96  4.46    3.00  2.05 
## 5 196905     0.02 -0.27  0.74  0.48 -3.58 -2.5     2.67  0.504
## 6 196906    -7.25 -5.31 -1.15  0.51 -3.82  5.88  -13.0  -6.74
glimpse(retdata)
## Rows: 360
## Columns: 9
## $ date     <dbl> 196901, 196902, 196903, 196904, 196905, 196906, 196907, 19690…
## $ `Mkt-RF` <dbl> -1.20, -5.82, 2.59, 1.52, 0.02, -7.25, -7.05, 4.65, -2.88, 4.…
## $ SMB      <dbl> -0.80, -3.90, -0.28, -0.85, -0.27, -5.31, -3.27, 0.89, 1.20, …
## $ HML      <dbl> 1.57, 0.93, -0.45, 0.06, 0.74, -1.15, 1.36, -3.83, -3.24, -3.…
## $ RF       <dbl> 0.53, 0.46, 0.46, 0.53, 0.48, 0.51, 0.53, 0.50, 0.62, 0.60, 0…
## $ ge       <dbl> -1.1984, -6.0377, 6.6474, 5.9621, -3.5806, -3.8196, -4.3056, …
## $ ibm      <dbl> -5.9524, -0.7004, 7.0303, 4.4586, -2.5000, 5.8777, -3.9230, 6…
## $ mobil    <dbl> -1.4043, -7.8431, 21.5130, 2.9961, 2.6667, -12.9870, -6.0981,…
## $ CRSP     <dbl> -0.6714, -5.3641, 3.0505, 2.0528, 0.5038, -6.7388, -6.5173, 5…
colnames(retdata)[2]<- 'Mkt_RF'# Replace 'Mkt-RF' with 'Mkt_RF'; 
# attach(retdata)
#Below we use two different approaches to estimate covariance matrix 
#===========================================================
# Single index model to compute covariance matrix
#===========================================================
# Method 1: by "lm" function
#===========================
stock.rets<-retdata %>% select(c(2,6,7,8))/100
glimpse(stock.rets)
## Rows: 360
## Columns: 4
## $ Mkt_RF <dbl> -0.0120, -0.0582, 0.0259, 0.0152, 0.0002, -0.0725, -0.0705, 0.0…
## $ ge     <dbl> -0.011984, -0.060377, 0.066474, 0.059621, -0.035806, -0.038196,…
## $ ibm    <dbl> -0.059524, -0.007004, 0.070303, 0.044586, -0.025000, 0.058777, …
## $ mobil  <dbl> -0.014043, -0.078431, 0.215130, 0.029961, 0.026667, -0.129870, …
N <- dim(stock.rets)[1]
#Mkt.RF<-retdata %>% select(2)/100
fit = lm(formula = cbind(ge,ibm,mobil)~Mkt_RF, data = stock.rets)
sigF = as.numeric(var(stock.rets$Mkt_RF))
bbeta = as.matrix(fit$coefficients)
bbeta = as.matrix(bbeta[-1,])
bbeta
##            [,1]
## ge    1.0580825
## ibm   0.8149949
## mobil 0.8158072
sigeps = crossprod(fit$residuals)/(N-2)
# sigeps = as.matrix(var(fit$residuals)) #  you can use this way too
sigeps = diag(diag(sigeps))
sigeps
##             [,1]        [,2]        [,3]
## [1,] 0.001702494 0.000000000 0.000000000
## [2,] 0.000000000 0.003225874 0.000000000
## [3,] 0.000000000 0.000000000 0.002913458
cov_1f = sigF*bbeta%*%t(bbeta)+sigeps
cov_1f
##                ge         ibm       mobil
## ge    0.004070402 0.001823896 0.001825714
## ibm   0.001823896 0.004630742 0.001406268
## mobil 0.001825714 0.001406268 0.004321127
#===================================
#Method 2: by formula "inv(X'X)*X'Y"
#===================================
ones = rep(1,N)
X = as.matrix(cbind(ones, stock.rets$Mkt_RF))
retdata1 = as.matrix(retdata[,c(6,7,8)]/100)
b_hat = solve(t(X)%*%X)%*%t(X)%*%retdata1
E_hat = retdata1 - X%*%b_hat
b_hat = as.matrix(b_hat[-1,])
diagD_hat = diag(t(E_hat)%*%E_hat)/(N-2)
cov_1f.1 = as.numeric(var(stock.rets$Mkt_RF))*b_hat%*%t(b_hat) + diag(diagD_hat); 
cov_1f.1
##                ge         ibm       mobil
## ge    0.004070402 0.001823896 0.001825714
## ibm   0.001823896 0.004630742 0.001406268
## mobil 0.001825714 0.001406268 0.004321127
#===================================================================
# Using FF 3 factor model to compute covariance matrix 
#===================================================================
# Method 1: by "lm" function
#============================
N <- dim(retdata)[1]
stock.rets<-retdata %>% select(c(2,3,4,6,7,8))/100
fit3 = lm(formula = cbind(ge, ibm, mobil)~Mkt_RF + SMB + HML, data=stock.rets)

sigF3 = as.matrix(var(cbind(stock.rets$Mkt_RF, 
                            stock.rets$SMB, 
                            stock.rets$HML)))
bbeta3 = as.matrix(fit3$coefficients)
bbeta3 = bbeta3[-1,]
bbeta3
##                  ge        ibm      mobil
## Mkt_RF  1.134331448  0.8050676  0.9803403
## SMB    -0.369412445 -0.3099907 -0.3727822
## HML     0.009630701 -0.2981744  0.3726475
sigeps3 = crossprod(fit3$residuals)/(N-4)
sigeps3 = diag(diag(sigeps3))
cov_3f = t(bbeta3) %*% sigF3 %*% (bbeta3) + sigeps3
cov_3f
##                ge         ibm       mobil
## ge    0.004079122 0.001905590 0.001929618
## ibm   0.001905590 0.004647834 0.001424956
## mobil 0.001929618 0.001424956 0.004335927
#===================================
#Method 2: by formula "inv(X'X)*X'Y"
#===================================
X.3 = cbind(ones, stock.rets$Mkt_RF, stock.rets$SMB, stock.rets$HML)
b_hat.3 = solve(t(X.3)%*%(X.3))%*%t(X.3)%*%retdata1
E_hat.3 = retdata1 - X.3%*%b_hat.3
b_hat.3 = as.matrix(b_hat.3[-1,])
diagD_hat.3 = diag(t(E_hat.3)%*%E_hat.3)/(N-4)
cov_3f.3 = t(b_hat.3)*sigF3*b_hat.3 + diag(diagD_hat.3) 
cov_3f.3
##                                                
## ge     4.332517e-03 -1.258773e-04 -4.819330e-06
## ibm   -1.258773e-04  3.199233e-03 -1.196042e-05
## mobil -4.819330e-06 -1.196042e-05  2.841930e-03
cov_3f
##                ge         ibm       mobil
## ge    0.004079122 0.001905590 0.001929618
## ibm   0.001905590 0.004647834 0.001424956
## mobil 0.001929618 0.001424956 0.004335927
#======================================================
# Create frontier function to plot efficient frontier
#======================================================
frontier <- function(return, Q) {
  #return <- log(tail(assets, -1) / head(assets, -1))
  n = ncol(return)
  #Q = cov(return)
  Ax <- rbind(2*cov(return), colMeans(return), rep(1, n))
  Ax <- cbind(Ax, rbind(t(tail(Ax, 2)), matrix(0, 2, 2)))
  r <- colMeans(return)
  rbase <- seq(min(r), max(r), length = 100)
  s <- sapply(rbase, function(x) {
    b0 <- c(rep(0, ncol(return)), x, 1)
    y <- head(solve(Ax, b0), n)
    sqrt(y%*%Q%*%y)
  })
  efficient.port <- list("call" = call,
                         "er" = as.vector(rbase),
                         "sd" = as.vector(s))
  class(efficient.port) <- "portfolio"
  efficient.port
  #plot(s, rbase, xlab = 'Std', ylab = 'Return', type="l")
}




#===============================================================
# Use different covariance matrix to plot efficient frontier: Q
# 
#================================================================
#return = retdata1
Q.3f = cov_3f
Q.1f = cov_1f
retdata1
##               ge       ibm     mobil
##   [1,] -0.011984 -0.059524 -0.014043
##   [2,] -0.060377 -0.007004 -0.078431
##   [3,]  0.066474  0.070303  0.215130
##   [4,]  0.059621  0.044586  0.029961
##   [5,] -0.035806 -0.025000  0.026667
##   [6,] -0.038196  0.058777 -0.129870
##   [7,] -0.043056 -0.039230 -0.060981
##   [8,] -0.027576  0.066256  0.107798
##   [9,]  0.022687  0.000725 -0.103520
##  [10,] -0.010294  0.044171 -0.044342
##  [11,] -0.047548 -0.006935 -0.053790
##  [12,] -0.024649  0.021008 -0.049096
##  [13,] -0.066129 -0.080247 -0.147283
##  [14,] -0.025907  0.018494  0.097087
##  [15,]  0.055319 -0.005143  0.017699
##  [16,] -0.033898 -0.119645 -0.035362
##  [17,] -0.068421 -0.063087  0.073171
##  [18,]  0.026742 -0.100719 -0.005682
##  [19,]  0.138889  0.013800  0.082286
##  [20,]  0.021138  0.055500  0.064171
##  [21,]  0.083121  0.095775  0.042714
##  [22,]  0.019259  0.011825  0.042892
##  [23,]  0.027616  0.045068  0.065421
##  [24,]  0.069590  0.034174  0.008772
##  [25,]  0.057257  0.024390 -0.076522
##  [26,]  0.089421  0.037788  0.038095
##  [27,]  0.041850  0.062407  0.013761
##  [28,]  0.097098  0.000699  0.068326
##  [29,] -0.024415 -0.082320 -0.070664
##  [30,]  0.019395 -0.030581  0.027650
##  [31,] -0.121399 -0.081230 -0.055605
##  [32,]  0.161592  0.048240 -0.057692
##  [33,] -0.002419 -0.000822 -0.007653
##  [34,] -0.052846 -0.010700  0.023650
##  [35,]  0.025751  0.020965  0.015267
##  [36,]  0.053975  0.101473  0.095238
##  [37,] -0.009980  0.093611 -0.013730
##  [38,] -0.022177  0.005027  0.030626
##  [39,]  0.071753  0.037313 -0.063781
##  [40,]  0.046422  0.009810  0.017032
##  [41,]  0.016636  0.033938  0.069856
##  [42,] -0.038545 -0.014456  0.018100
##  [43,] -0.041825  0.022321  0.057778
##  [44,]  0.055556  0.022707  0.141176
##  [45,]  0.003383 -0.004896  0.016729
##  [46,] -0.037665 -0.051661  0.023035
##  [47,]  0.068102  0.017769  0.059567
##  [48,]  0.073665  0.028133  0.008518
##  [49,] -0.039451  0.083333  0.016216
##  [50,] -0.039643 -0.005970 -0.139262
##  [51,] -0.035514  0.000000  0.089669
##  [52,] -0.081395 -0.054461 -0.044723
##  [53,]  0.004219 -0.032261 -0.026966
##  [54,] -0.029832  0.007149  0.001946
##  [55,]  0.093682 -0.007886 -0.053204
##  [56,] -0.057769 -0.039364 -0.020747
##  [57,]  0.072304 -0.142857  0.067797
##  [58,]  0.037698  0.086240 -0.070238
##  [59,] -0.028298 -0.050419 -0.181425
##  [60,] -0.001980 -0.068868  0.118734
##  [61,] -0.049603 -0.006079 -0.055660
##  [62,] -0.047599 -0.024852 -0.098985
##  [63,] -0.041943 -0.008933  0.025352
##  [64,] -0.059908 -0.036055 -0.040110
##  [65,] -0.053922 -0.059274 -0.032070
##  [66,]  0.029016  0.001176 -0.033133
##  [67,] -0.126904 -0.052291  0.029283
##  [68,] -0.119186 -0.040298 -0.086420
##  [69,] -0.164356 -0.171875 -0.118243
##  [70,]  0.224000  0.189465  0.124138
##  [71,] -0.029412 -0.054858 -0.080139
##  [72,] -0.090236 -0.052186  0.090909
##  [73,]  0.157303  0.120536  0.169444
##  [74,]  0.152751  0.152722  0.003030
##  [75,]  0.042493 -0.041183 -0.039275
##  [76,]  0.000000  0.015729  0.033962
##  [77,] -0.013587  0.031566  0.177019
##  [78,]  0.168595 -0.027907  0.002639
##  [79,] -0.102138 -0.089713 -0.045263
##  [80,] -0.021164 -0.011827 -0.008427
##  [81,] -0.040000  0.010738  0.011331
##  [82,]  0.093750  0.127490  0.047059
##  [83,]  0.002597  0.074794 -0.010899
##  [84,] -0.035751 -0.009387  0.041322
##  [85,]  0.214092  0.149387  0.168783
##  [86,] -0.046875 -0.001455 -0.011494
##  [87,] -0.008899  0.024939  0.079070
##  [88,]  0.023810 -0.032920  0.023707
##  [89,] -0.053488  0.019734  0.018526
##  [90,]  0.130713  0.078422  0.004193
##  [91,] -0.045952 -0.016712 -0.022965
##  [92,] -0.027523  0.013780 -0.000427
##  [93,]  0.029717  0.028323  0.067245
##  [94,] -0.013857 -0.034207 -0.016260
##  [95,] -0.051522  0.005520  0.005372
##  [96,]  0.107654  0.029982  0.085595
##  [97,] -0.049438 -0.013883  0.028846
##  [98,] -0.044917  0.011807  0.001121
##  [99,] -0.018317  0.001812  0.018939
## [100,]  0.096692 -0.064195 -0.014870
## [101,]  0.006961 -0.041546 -0.015849
## [102,]  0.051613  0.075356  0.058366
## [103,] -0.037611  0.015625  0.005515
## [104,] -0.018391  0.010256 -0.108592
## [105,] -0.029508 -0.027480  0.031250
## [106,] -0.021951 -0.014368  0.000000
## [107,]  0.010973  0.035957  0.049293
## [108,] -0.004988  0.035985 -0.003914
## [109,] -0.082707 -0.029250 -0.046365
## [110,] -0.021858 -0.042825 -0.016771
## [111,]  0.054190 -0.062687  0.049041
## [112,]  0.120643  0.129512  0.077236
## [113,]  0.011962 -0.018308 -0.010566
## [114,] -0.032624 -0.003872 -0.042636
## [115,]  0.061881  0.092323  0.028340
## [116,]  0.011655  0.048950  0.055905
## [117,] -0.013364 -0.050964  0.064394
## [118,] -0.101655 -0.046029 -0.069039
## [119,]  0.005789  0.030766  0.036965
## [120,]  0.000000  0.107607  0.041276
## [121,]  0.023873  0.034338  0.060541
## [122,] -0.033161 -0.017198 -0.001727
## [123,]  0.043478  0.051667  0.029412
## [124,]  0.023438 -0.003962  0.078992
## [125,]  0.014249 -0.021671 -0.086293
## [126,]  0.017812 -0.034539  0.088388
## [127,]  0.035000 -0.049404  0.018471
## [128,]  0.028986  0.015914  0.111111
## [129,] -0.033803 -0.032143  0.180000
## [130,] -0.046798 -0.079336 -0.031477
## [131,] -0.032041  0.059880  0.050761
## [132,]  0.097561 -0.013410  0.062802
## [133,]  0.086420  0.066019  0.075000
## [134,] -0.073636 -0.062149  0.366167
## [135,] -0.042289 -0.122047 -0.175549
## [136,] -0.012987 -0.015695  0.119392
## [137,]  0.057895  0.024784  0.003436
## [138,]  0.040404  0.060948 -0.023973
## [139,]  0.092233  0.110638  0.054035
## [140,] -0.031111  0.017011 -0.058923
## [141,] -0.020642 -0.020992  0.007156
## [142,]  0.028504  0.038986  0.058615
## [143,]  0.143187  0.035422  0.192953
## [144,]  0.002020 -0.003670 -0.081081
## [145,]  0.000000 -0.051565 -0.082043
## [146,]  0.089796  0.011417 -0.066667
## [147,]  0.014981 -0.029183 -0.042125
## [148,] -0.020522 -0.060120 -0.024857
## [149,]  0.001905  0.016802 -0.091633
## [150,] -0.046768 -0.014894  0.052632
## [151,] -0.014141 -0.030238  0.020833
## [152,] -0.084016 -0.002494 -0.057143
## [153,] -0.003579 -0.018141 -0.105727
## [154,] -0.011390 -0.048499  0.024631
## [155,]  0.112903  0.074951  0.038462
## [156,] -0.036439  0.043578 -0.089623
## [157,]  0.089325  0.118681 -0.015544
## [158,]  0.006000 -0.013988 -0.015789
## [159,]  0.020676 -0.034343 -0.038251
## [160,]  0.011834  0.075314 -0.011364
## [161,] -0.035088 -0.029416  0.091954
## [162,]  0.042020 -0.014228 -0.037634
## [163,]  0.033399  0.082474 -0.072626
## [164,]  0.144487  0.087390  0.156627
## [165,]  0.004651  0.040780  0.042553
## [166,]  0.148829  0.088586  0.020408
## [167,]  0.087045  0.093709 -0.040000
## [168,]  0.025676  0.112717  0.069149
## [169,]  0.090909  0.027273  0.054726
## [170,]  0.048309  0.009962  0.009434
## [171,] -0.024424  0.027778  0.095238
## [172,]  0.065476  0.149877  0.091304
## [173,] -0.075978 -0.041026 -0.051793
## [174,]  0.073277  0.080899  0.076923
## [175,] -0.093182  0.001040 -0.059524
## [176,]  0.025063  0.000623  0.118143
## [177,]  0.041076  0.061715 -0.049808
## [178,] -0.018957 -0.000985 -0.032258
## [179,]  0.113527 -0.066469 -0.033333
## [180,]  0.026258  0.039404  0.008772
## [181,] -0.072495 -0.064549  0.084348
## [182,] -0.043678 -0.025630  0.024490
## [183,]  0.064904  0.034014 -0.027888
## [184,]  0.011390 -0.002193  0.028689
## [185,] -0.042793 -0.044396 -0.145817
## [186,] -0.002353 -0.018561  0.009524
## [187,]  0.000000  0.047281 -0.092453
## [188,]  0.078571  0.127314  0.260638
## [189,] -0.006623  0.004040  0.012658
## [190,]  0.031390  0.011871  0.014167
## [191,] -0.027391 -0.023069 -0.041841
## [192,]  0.022573  0.011294 -0.052402
## [193,]  0.128035  0.107614  0.052535
## [194,]  0.004697 -0.009349  0.071429
## [195,] -0.070727 -0.052239  0.000000
## [196,]  0.000000 -0.003937  0.026667
## [197,]  0.034672  0.025494  0.028926
## [198,]  0.020619 -0.037901 -0.032129
## [199,]  0.036364  0.061616  0.001660
## [200,] -0.048733 -0.027783 -0.012658
## [201,] -0.048361 -0.021718  0.004274
## [202,]  0.002174  0.048436  0.099574
## [203,]  0.138829  0.084504 -0.007874
## [204,]  0.117410  0.112701 -0.039683
## [205,] -0.025773 -0.025723 -0.020661
## [206,]  0.101658  0.003135 -0.002532
## [207,]  0.012903  0.004143 -0.025862
## [208,] -0.004777  0.031353  0.041593
## [209,]  0.020800 -0.017760  0.082251
## [210,]  0.022947 -0.038556  0.012000
## [211,] -0.098765 -0.095563 -0.022134
## [212,]  0.077055  0.055472  0.172840
## [213,] -0.078474 -0.030631  0.052632
## [214,]  0.060870 -0.080855  0.020000
## [215,]  0.088525  0.037209  0.043791
## [216,]  0.043735 -0.056047  0.019048
## [217,]  0.165698  0.072917  0.093458
## [218,]  0.031172  0.092039 -0.021652
## [219,]  0.020605  0.076165  0.123894
## [220,] -0.013111  0.066611 -0.006824
## [221,]  0.004831  0.006089 -0.002674
## [222,]  0.052019  0.015625  0.091153
## [223,]  0.089655 -0.009231  0.031941
## [224,]  0.054852  0.052640  0.029524
## [225,] -0.010720 -0.104677 -0.081776
## [226,] -0.229675 -0.187396 -0.178117
## [227,] -0.105541 -0.086939 -0.150464
## [228,]  0.049558  0.042889  0.159259
## [229,]  0.022663 -0.027056  0.127796
## [230,]  0.008310  0.055395  0.009632
## [231,] -0.104945 -0.084043  0.005682
## [232,]  0.000000  0.053426  0.056497
## [233,]  0.040248  0.001985 -0.027273
## [234,]  0.052976  0.132222 -0.030641
## [235,] -0.025641 -0.012758  0.028736
## [236,] -0.055556 -0.104573 -0.017318
## [237,]  0.082972  0.034753 -0.017291
## [238,]  0.005764  0.062839  0.041056
## [239,]  0.031519 -0.024669  0.016338
## [240,]  0.003556  0.028481  0.022472
## [241,]  0.081006  0.071795  0.076374
## [242,] -0.054264 -0.061435 -0.036176
## [243,] -0.018361 -0.101852  0.050938
## [244,]  0.098315  0.044674  0.035714
## [245,]  0.117647 -0.027763  0.027586
## [246,] -0.047414  0.020525 -0.041262
## [247,]  0.133172  0.027933  0.037975
## [248,] -0.014957  0.029000  0.083415
## [249,] -0.012408 -0.067236  0.022779
## [250,] -0.019912 -0.082380  0.004900
## [251,]  0.119639 -0.014115  0.056054
## [252,]  0.047903 -0.035851  0.063694
## [253,] -0.034884  0.047809 -0.045509
## [254,] -0.000482  0.065501  0.044397
## [255,]  0.040486  0.021661 -0.004049
## [256,] -0.003891  0.027091 -0.020325
## [257,]  0.083516  0.112018  0.055602
## [258,]  0.005445 -0.020833 -0.017893
## [259,]  0.034296 -0.051064  0.068421
## [260,] -0.134380 -0.075471 -0.021073
## [261,] -0.113387  0.044172 -0.015656
## [262,] -0.045872 -0.009401 -0.083897
## [263,]  0.052885  0.089775  0.035165
## [264,]  0.057260 -0.005501 -0.014862
## [265,]  0.115468  0.121681 -0.009052
## [266,]  0.070469  0.025325  0.110132
## [267,]  0.023897 -0.115534  0.023810
## [268,]  0.016158 -0.095499  0.062016
## [269,]  0.099081  0.042087 -0.026642
## [270,] -0.042071 -0.084806 -0.024668
## [271,] -0.010135  0.042471  0.064981
## [272,]  0.022184 -0.031259  0.011091
## [273,] -0.066644  0.069677 -0.003656
## [274,] -0.005405 -0.051870  0.042936
## [275,] -0.061594 -0.046209 -0.078292
## [276,]  0.189961 -0.037838  0.048263
## [277,] -0.016340  0.011236 -0.058932
## [278,]  0.044850 -0.021278 -0.018787
## [279,] -0.029571 -0.038849 -0.038384
## [280,]  0.011551  0.086826  0.086134
## [281,] -0.003263  0.013333  0.018182
## [282,]  0.025205  0.078512 -0.053846
## [283,] -0.016077 -0.031928  0.058943
## [284,] -0.032680 -0.072982  0.008445
## [285,]  0.065405 -0.067821  0.005780
## [286,] -0.019169 -0.171827 -0.034483
## [287,]  0.084691  0.038654 -0.023016
## [288,]  0.034595 -0.261905  0.039095
## [289,]  0.007310  0.022333  0.007921
## [290,] -0.023222  0.066311  0.071513
## [291,]  0.066924 -0.064368  0.020408
## [292,]  0.016830 -0.044226  0.030909
## [293,]  0.023448  0.095938 -0.004586
## [294,]  0.039137 -0.063981  0.071685
## [295,]  0.028721 -0.098734  0.008361
## [296,] -0.002538  0.033708  0.042123
## [297,] -0.017761 -0.081967  0.049839
## [298,]  0.011734  0.095238 -0.001531
## [299,]  0.014175  0.176630 -0.052454
## [300,]  0.073393  0.048724  0.036007
## [301,]  0.027414  0.000000  0.023697
## [302,] -0.022042 -0.059735 -0.018827
## [303,] -0.044176  0.033097 -0.054054
## [304,] -0.047500  0.052632  0.052101
## [305,]  0.044619  0.100000  0.046006
## [306,] -0.055578 -0.067460  0.006173
## [307,]  0.080429  0.053191  0.029141
## [308,] -0.012407  0.111111  0.016095
## [309,] -0.025427  0.016423 -0.062222
## [310,]  0.015584  0.070018  0.086888
## [311,] -0.058824 -0.046980  0.001163
## [312,]  0.117609  0.038869 -0.011730
## [313,]  0.009804 -0.018707  0.035312
## [314,]  0.063107  0.046794  0.007236
## [315,] -0.006210  0.091362  0.064655
## [316,]  0.037037  0.152207  0.024291
## [317,]  0.035714 -0.014531  0.058498
## [318,] -0.020948  0.032258 -0.035176
## [319,]  0.046563  0.134115  0.018229
## [320,] -0.002119 -0.048220 -0.016113
## [321,]  0.089766 -0.085852  0.045932
## [322,] -0.007843  0.029101  0.011292
## [323,]  0.061265 -0.003856  0.045161
## [324,]  0.079479 -0.054334  0.070659
## [325,]  0.065972  0.187414 -0.008949
## [326,] -0.016287  0.132488 -0.001806
## [327,]  0.037550 -0.092762  0.057013
## [328,] -0.008026 -0.031461 -0.007551
## [329,]  0.071197 -0.006032 -0.009783
## [330,]  0.048338 -0.072600 -0.004430
## [331,] -0.046571  0.085859 -0.018910
## [332,]  0.010638  0.067209  0.034014
## [333,]  0.100271  0.088525  0.024336
## [334,]  0.063187  0.036145  0.008639
## [335,]  0.074935  0.238178  0.046039
## [336,] -0.044279 -0.049412  0.009288
## [337,]  0.046776  0.035479  0.073620
## [338,] -0.006039 -0.081434 -0.056686
## [339,] -0.030182 -0.045217  0.064155
## [340,]  0.118388  0.169399 -0.004785
## [341,]  0.087838  0.079128  0.084115
## [342,]  0.076605  0.043353 -0.000894
## [343,]  0.082846  0.171745  0.102397
## [344,] -0.107843 -0.039480 -0.049020
## [345,]  0.092068  0.045623  0.017182
## [346,] -0.050505 -0.070755 -0.008885
## [347,]  0.143133  0.113706 -0.012017
## [348,] -0.002707 -0.044521  0.003475
## [349,]  0.056218 -0.056153 -0.051082
## [350,]  0.003226  0.059620  0.063066
## [351,]  0.112379 -0.005386  0.060554
## [352,] -0.011603  0.115523  0.032626
## [353,] -0.021277  0.015922 -0.007014
## [354,]  0.089955 -0.022872 -0.017628
## [355,] -0.012517  0.154056 -0.082284
## [356,] -0.105521 -0.148340 -0.008961
## [357,] -0.001719  0.140954  0.098553
## [358,]  0.099764  0.155642 -0.003292
## [359,]  0.032857  0.113434  0.143782
## [360,]  0.132503  0.116578  0.013081
Q = cov(retdata1)

#========================================
# draw overlay frontiers on the same graph
#=========================================
xy.3f = frontier(retdata1, Q.3f)
xy.1f = frontier(retdata1, Q.1f)
xy    = frontier(retdata1, Q)

# convert to tibble and rename column names

xx<-cbind(xy$sd, xy.1f$sd, xy.3f$sd) %>% 
    as.tibble() %>% 
    rename(s = V1,  s1 = V2, s3 = V3) 
## Warning: `as.tibble()` was deprecated in tibble 2.0.0.
## ℹ Please use `as_tibble()` instead.
## ℹ The signature and semantics have changed, see `?as_tibble`.
## This warning is displayed once every 8 hours.
## Call `lifecycle::last_lifecycle_warnings()` to see where this warning was
## generated.
## Warning: The `x` argument of `as_tibble.matrix()` must have unique column names if
## `.name_repair` is omitted as of tibble 2.0.0.
## ℹ Using compatibility `.name_repair`.
## ℹ The deprecated feature was likely used in the tibble package.
##   Please report the issue at <https://github.com/tidyverse/tibble/issues>.
## This warning is displayed once every 8 hours.
## Call `lifecycle::last_lifecycle_warnings()` to see where this warning was
## generated.
yy<-cbind(xy$er, xy.1f$er, xy.3f$er) %>% 
    as.tibble() %>% 
    rename(er = V1, er1 = V2, er3 = V3)

xy.all<-bind_cols(xx, yy)
xy.all
## # A tibble: 100 × 6
##         s     s1     s3      er     er1     er3
##     <dbl>  <dbl>  <dbl>   <dbl>   <dbl>   <dbl>
##  1 0.0645 0.0661 0.0660 0.00980 0.00980 0.00980
##  2 0.0641 0.0657 0.0655 0.00985 0.00985 0.00985
##  3 0.0637 0.0652 0.0651 0.00990 0.00990 0.00990
##  4 0.0632 0.0648 0.0647 0.00995 0.00995 0.00995
##  5 0.0628 0.0643 0.0642 0.0100  0.0100  0.0100 
##  6 0.0624 0.0639 0.0638 0.0101  0.0101  0.0101 
##  7 0.0620 0.0635 0.0634 0.0101  0.0101  0.0101 
##  8 0.0616 0.0631 0.0630 0.0102  0.0102  0.0102 
##  9 0.0612 0.0626 0.0626 0.0102  0.0102  0.0102 
## 10 0.0608 0.0622 0.0622 0.0102  0.0102  0.0102 
## # ℹ 90 more rows
class(xy.all) 
## [1] "tbl_df"     "tbl"        "data.frame"
melt(xy.all)
## No id variables; using all as measure variables
##     variable       value
## 1          s 0.064529184
## 2          s 0.064097873
## 3          s 0.063670802
## 4          s 0.063248055
## 5          s 0.062829721
## 6          s 0.062415887
## 7          s 0.062006644
## 8          s 0.061602083
## 9          s 0.061202298
## 10         s 0.060807382
## 11         s 0.060417431
## 12         s 0.060032541
## 13         s 0.059652812
## 14         s 0.059278341
## 15         s 0.058909229
## 16         s 0.058545577
## 17         s 0.058187489
## 18         s 0.057835066
## 19         s 0.057488414
## 20         s 0.057147637
## 21         s 0.056812841
## 22         s 0.056484133
## 23         s 0.056161619
## 24         s 0.055845406
## 25         s 0.055535603
## 26         s 0.055232317
## 27         s 0.054935656
## 28         s 0.054645728
## 29         s 0.054362641
## 30         s 0.054086502
## 31         s 0.053817418
## 32         s 0.053555495
## 33         s 0.053300839
## 34         s 0.053053555
## 35         s 0.052813746
## 36         s 0.052581515
## 37         s 0.052356961
## 38         s 0.052140185
## 39         s 0.051931285
## 40         s 0.051730354
## 41         s 0.051537487
## 42         s 0.051352774
## 43         s 0.051176304
## 44         s 0.051008162
## 45         s 0.050848431
## 46         s 0.050697190
## 47         s 0.050554515
## 48         s 0.050420480
## 49         s 0.050295153
## 50         s 0.050178600
## 51         s 0.050070882
## 52         s 0.049972055
## 53         s 0.049882174
## 54         s 0.049801285
## 55         s 0.049729434
## 56         s 0.049666659
## 57         s 0.049612994
## 58         s 0.049568471
## 59         s 0.049533112
## 60         s 0.049506938
## 61         s 0.049489963
## 62         s 0.049482197
## 63         s 0.049483643
## 64         s 0.049494303
## 65         s 0.049514168
## 66         s 0.049543229
## 67         s 0.049581468
## 68         s 0.049628866
## 69         s 0.049685395
## 70         s 0.049751024
## 71         s 0.049825718
## 72         s 0.049909436
## 73         s 0.050002133
## 74         s 0.050103758
## 75         s 0.050214257
## 76         s 0.050333573
## 77         s 0.050461642
## 78         s 0.050598398
## 79         s 0.050743771
## 80         s 0.050897687
## 81         s 0.051060068
## 82         s 0.051230835
## 83         s 0.051409903
## 84         s 0.051597187
## 85         s 0.051792596
## 86         s 0.051996040
## 87         s 0.052207425
## 88         s 0.052426654
## 89         s 0.052653629
## 90         s 0.052888252
## 91         s 0.053130419
## 92         s 0.053380030
## 93         s 0.053636979
## 94         s 0.053901163
## 95         s 0.054172474
## 96         s 0.054450807
## 97         s 0.054736054
## 98         s 0.055028109
## 99         s 0.055326862
## 100        s 0.055632206
## 101       s1 0.066133642
## 102       s1 0.065680173
## 103       s1 0.065231287
## 104       s1 0.064787080
## 105       s1 0.064347648
## 106       s1 0.063913089
## 107       s1 0.063483505
## 108       s1 0.063058996
## 109       s1 0.062639665
## 110       s1 0.062225618
## 111       s1 0.061816960
## 112       s1 0.061413800
## 113       s1 0.061016246
## 114       s1 0.060624407
## 115       s1 0.060238397
## 116       s1 0.059858328
## 117       s1 0.059484313
## 118       s1 0.059116467
## 119       s1 0.058754907
## 120       s1 0.058399749
## 121       s1 0.058051111
## 122       s1 0.057709110
## 123       s1 0.057373866
## 124       s1 0.057045497
## 125       s1 0.056724123
## 126       s1 0.056409863
## 127       s1 0.056102838
## 128       s1 0.055803166
## 129       s1 0.055510967
## 130       s1 0.055226359
## 131       s1 0.054949460
## 132       s1 0.054680387
## 133       s1 0.054419257
## 134       s1 0.054166184
## 135       s1 0.053921281
## 136       s1 0.053684661
## 137       s1 0.053456433
## 138       s1 0.053236706
## 139       s1 0.053025585
## 140       s1 0.052823173
## 141       s1 0.052629570
## 142       s1 0.052444876
## 143       s1 0.052269182
## 144       s1 0.052102582
## 145       s1 0.051945162
## 146       s1 0.051797005
## 147       s1 0.051658193
## 148       s1 0.051528800
## 149       s1 0.051408896
## 150       s1 0.051298550
## 151       s1 0.051197823
## 152       s1 0.051106771
## 153       s1 0.051025446
## 154       s1 0.050953895
## 155       s1 0.050892160
## 156       s1 0.050840275
## 157       s1 0.050798271
## 158       s1 0.050766173
## 159       s1 0.050743999
## 160       s1 0.050731763
## 161       s1 0.050729471
## 162       s1 0.050737126
## 163       s1 0.050754721
## 164       s1 0.050782248
## 165       s1 0.050819690
## 166       s1 0.050867025
## 167       s1 0.050924225
## 168       s1 0.050991257
## 169       s1 0.051068084
## 170       s1 0.051154659
## 171       s1 0.051250935
## 172       s1 0.051356856
## 173       s1 0.051472363
## 174       s1 0.051597392
## 175       s1 0.051731873
## 176       s1 0.051875733
## 177       s1 0.052028894
## 178       s1 0.052191275
## 179       s1 0.052362789
## 180       s1 0.052543347
## 181       s1 0.052732857
## 182       s1 0.052931222
## 183       s1 0.053138342
## 184       s1 0.053354117
## 185       s1 0.053578440
## 186       s1 0.053811207
## 187       s1 0.054052306
## 188       s1 0.054301628
## 189       s1 0.054559060
## 190       s1 0.054824487
## 191       s1 0.055097794
## 192       s1 0.055378864
## 193       s1 0.055667580
## 194       s1 0.055963823
## 195       s1 0.056267475
## 196       s1 0.056578416
## 197       s1 0.056896526
## 198       s1 0.057221686
## 199       s1 0.057553777
## 200       s1 0.057892679
## 201       s3 0.065959766
## 202       s3 0.065519087
## 203       s3 0.065083014
## 204       s3 0.064651638
## 205       s3 0.064225055
## 206       s3 0.063803361
## 207       s3 0.063386653
## 208       s3 0.062975030
## 209       s3 0.062568594
## 210       s3 0.062167444
## 211       s3 0.061771686
## 212       s3 0.061381421
## 213       s3 0.060996757
## 214       s3 0.060617800
## 215       s3 0.060244658
## 216       s3 0.059877438
## 217       s3 0.059516251
## 218       s3 0.059161208
## 219       s3 0.058812419
## 220       s3 0.058469997
## 221       s3 0.058134053
## 222       s3 0.057804702
## 223       s3 0.057482056
## 224       s3 0.057166229
## 225       s3 0.056857334
## 226       s3 0.056555485
## 227       s3 0.056260796
## 228       s3 0.055973379
## 229       s3 0.055693347
## 230       s3 0.055420813
## 231       s3 0.055155886
## 232       s3 0.054898678
## 233       s3 0.054649297
## 234       s3 0.054407851
## 235       s3 0.054174447
## 236       s3 0.053949187
## 237       s3 0.053732176
## 238       s3 0.053523512
## 239       s3 0.053323295
## 240       s3 0.053131620
## 241       s3 0.052948579
## 242       s3 0.052774262
## 243       s3 0.052608756
## 244       s3 0.052452145
## 245       s3 0.052304507
## 246       s3 0.052165921
## 247       s3 0.052036457
## 248       s3 0.051916184
## 249       s3 0.051805166
## 250       s3 0.051703463
## 251       s3 0.051611130
## 252       s3 0.051528217
## 253       s3 0.051454769
## 254       s3 0.051390828
## 255       s3 0.051336429
## 256       s3 0.051291601
## 257       s3 0.051256371
## 258       s3 0.051230758
## 259       s3 0.051214776
## 260       s3 0.051208435
## 261       s3 0.051211738
## 262       s3 0.051224683
## 263       s3 0.051247263
## 264       s3 0.051279465
## 265       s3 0.051321271
## 266       s3 0.051372658
## 267       s3 0.051433597
## 268       s3 0.051504053
## 269       s3 0.051583989
## 270       s3 0.051673360
## 271       s3 0.051772116
## 272       s3 0.051880206
## 273       s3 0.051997569
## 274       s3 0.052124145
## 275       s3 0.052259865
## 276       s3 0.052404658
## 277       s3 0.052558451
## 278       s3 0.052721163
## 279       s3 0.052892713
## 280       s3 0.053073015
## 281       s3 0.053261980
## 282       s3 0.053459517
## 283       s3 0.053665529
## 284       s3 0.053879922
## 285       s3 0.054102593
## 286       s3 0.054333443
## 287       s3 0.054572367
## 288       s3 0.054819259
## 289       s3 0.055074013
## 290       s3 0.055336520
## 291       s3 0.055606669
## 292       s3 0.055884351
## 293       s3 0.056169453
## 294       s3 0.056461863
## 295       s3 0.056761469
## 296       s3 0.057068156
## 297       s3 0.057381811
## 298       s3 0.057702321
## 299       s3 0.058029572
## 300       s3 0.058363451
## 301       er 0.009804942
## 302       er 0.009854269
## 303       er 0.009903596
## 304       er 0.009952923
## 305       er 0.010002250
## 306       er 0.010051577
## 307       er 0.010100904
## 308       er 0.010150231
## 309       er 0.010199558
## 310       er 0.010248885
## 311       er 0.010298212
## 312       er 0.010347539
## 313       er 0.010396866
## 314       er 0.010446193
## 315       er 0.010495520
## 316       er 0.010544847
## 317       er 0.010594174
## 318       er 0.010643501
## 319       er 0.010692828
## 320       er 0.010742155
## 321       er 0.010791482
## 322       er 0.010840809
## 323       er 0.010890135
## 324       er 0.010939462
## 325       er 0.010988789
## 326       er 0.011038116
## 327       er 0.011087443
## 328       er 0.011136770
## 329       er 0.011186097
## 330       er 0.011235424
## 331       er 0.011284751
## 332       er 0.011334078
## 333       er 0.011383405
## 334       er 0.011432732
## 335       er 0.011482059
## 336       er 0.011531386
## 337       er 0.011580713
## 338       er 0.011630040
## 339       er 0.011679367
## 340       er 0.011728694
## 341       er 0.011778021
## 342       er 0.011827348
## 343       er 0.011876675
## 344       er 0.011926002
## 345       er 0.011975329
## 346       er 0.012024656
## 347       er 0.012073983
## 348       er 0.012123310
## 349       er 0.012172637
## 350       er 0.012221964
## 351       er 0.012271291
## 352       er 0.012320618
## 353       er 0.012369945
## 354       er 0.012419272
## 355       er 0.012468599
## 356       er 0.012517926
## 357       er 0.012567253
## 358       er 0.012616580
## 359       er 0.012665907
## 360       er 0.012715234
## 361       er 0.012764561
## 362       er 0.012813888
## 363       er 0.012863215
## 364       er 0.012912542
## 365       er 0.012961869
## 366       er 0.013011196
## 367       er 0.013060523
## 368       er 0.013109850
## 369       er 0.013159177
## 370       er 0.013208504
## 371       er 0.013257831
## 372       er 0.013307158
## 373       er 0.013356485
## 374       er 0.013405812
## 375       er 0.013455139
## 376       er 0.013504466
## 377       er 0.013553793
## 378       er 0.013603120
## 379       er 0.013652447
## 380       er 0.013701774
## 381       er 0.013751101
## 382       er 0.013800428
## 383       er 0.013849755
## 384       er 0.013899082
## 385       er 0.013948409
## 386       er 0.013997736
## 387       er 0.014047063
## 388       er 0.014096390
## 389       er 0.014145717
## 390       er 0.014195044
## 391       er 0.014244371
## 392       er 0.014293698
## 393       er 0.014343025
## 394       er 0.014392352
## 395       er 0.014441679
## 396       er 0.014491006
## 397       er 0.014540333
## 398       er 0.014589660
## 399       er 0.014638987
## 400       er 0.014688314
## 401      er1 0.009804942
## 402      er1 0.009854269
## 403      er1 0.009903596
## 404      er1 0.009952923
## 405      er1 0.010002250
## 406      er1 0.010051577
## 407      er1 0.010100904
## 408      er1 0.010150231
## 409      er1 0.010199558
## 410      er1 0.010248885
## 411      er1 0.010298212
## 412      er1 0.010347539
## 413      er1 0.010396866
## 414      er1 0.010446193
## 415      er1 0.010495520
## 416      er1 0.010544847
## 417      er1 0.010594174
## 418      er1 0.010643501
## 419      er1 0.010692828
## 420      er1 0.010742155
## 421      er1 0.010791482
## 422      er1 0.010840809
## 423      er1 0.010890135
## 424      er1 0.010939462
## 425      er1 0.010988789
## 426      er1 0.011038116
## 427      er1 0.011087443
## 428      er1 0.011136770
## 429      er1 0.011186097
## 430      er1 0.011235424
## 431      er1 0.011284751
## 432      er1 0.011334078
## 433      er1 0.011383405
## 434      er1 0.011432732
## 435      er1 0.011482059
## 436      er1 0.011531386
## 437      er1 0.011580713
## 438      er1 0.011630040
## 439      er1 0.011679367
## 440      er1 0.011728694
## 441      er1 0.011778021
## 442      er1 0.011827348
## 443      er1 0.011876675
## 444      er1 0.011926002
## 445      er1 0.011975329
## 446      er1 0.012024656
## 447      er1 0.012073983
## 448      er1 0.012123310
## 449      er1 0.012172637
## 450      er1 0.012221964
## 451      er1 0.012271291
## 452      er1 0.012320618
## 453      er1 0.012369945
## 454      er1 0.012419272
## 455      er1 0.012468599
## 456      er1 0.012517926
## 457      er1 0.012567253
## 458      er1 0.012616580
## 459      er1 0.012665907
## 460      er1 0.012715234
## 461      er1 0.012764561
## 462      er1 0.012813888
## 463      er1 0.012863215
## 464      er1 0.012912542
## 465      er1 0.012961869
## 466      er1 0.013011196
## 467      er1 0.013060523
## 468      er1 0.013109850
## 469      er1 0.013159177
## 470      er1 0.013208504
## 471      er1 0.013257831
## 472      er1 0.013307158
## 473      er1 0.013356485
## 474      er1 0.013405812
## 475      er1 0.013455139
## 476      er1 0.013504466
## 477      er1 0.013553793
## 478      er1 0.013603120
## 479      er1 0.013652447
## 480      er1 0.013701774
## 481      er1 0.013751101
## 482      er1 0.013800428
## 483      er1 0.013849755
## 484      er1 0.013899082
## 485      er1 0.013948409
## 486      er1 0.013997736
## 487      er1 0.014047063
## 488      er1 0.014096390
## 489      er1 0.014145717
## 490      er1 0.014195044
## 491      er1 0.014244371
## 492      er1 0.014293698
## 493      er1 0.014343025
## 494      er1 0.014392352
## 495      er1 0.014441679
## 496      er1 0.014491006
## 497      er1 0.014540333
## 498      er1 0.014589660
## 499      er1 0.014638987
## 500      er1 0.014688314
## 501      er3 0.009804942
## 502      er3 0.009854269
## 503      er3 0.009903596
## 504      er3 0.009952923
## 505      er3 0.010002250
## 506      er3 0.010051577
## 507      er3 0.010100904
## 508      er3 0.010150231
## 509      er3 0.010199558
## 510      er3 0.010248885
## 511      er3 0.010298212
## 512      er3 0.010347539
## 513      er3 0.010396866
## 514      er3 0.010446193
## 515      er3 0.010495520
## 516      er3 0.010544847
## 517      er3 0.010594174
## 518      er3 0.010643501
## 519      er3 0.010692828
## 520      er3 0.010742155
## 521      er3 0.010791482
## 522      er3 0.010840809
## 523      er3 0.010890135
## 524      er3 0.010939462
## 525      er3 0.010988789
## 526      er3 0.011038116
## 527      er3 0.011087443
## 528      er3 0.011136770
## 529      er3 0.011186097
## 530      er3 0.011235424
## 531      er3 0.011284751
## 532      er3 0.011334078
## 533      er3 0.011383405
## 534      er3 0.011432732
## 535      er3 0.011482059
## 536      er3 0.011531386
## 537      er3 0.011580713
## 538      er3 0.011630040
## 539      er3 0.011679367
## 540      er3 0.011728694
## 541      er3 0.011778021
## 542      er3 0.011827348
## 543      er3 0.011876675
## 544      er3 0.011926002
## 545      er3 0.011975329
## 546      er3 0.012024656
## 547      er3 0.012073983
## 548      er3 0.012123310
## 549      er3 0.012172637
## 550      er3 0.012221964
## 551      er3 0.012271291
## 552      er3 0.012320618
## 553      er3 0.012369945
## 554      er3 0.012419272
## 555      er3 0.012468599
## 556      er3 0.012517926
## 557      er3 0.012567253
## 558      er3 0.012616580
## 559      er3 0.012665907
## 560      er3 0.012715234
## 561      er3 0.012764561
## 562      er3 0.012813888
## 563      er3 0.012863215
## 564      er3 0.012912542
## 565      er3 0.012961869
## 566      er3 0.013011196
## 567      er3 0.013060523
## 568      er3 0.013109850
## 569      er3 0.013159177
## 570      er3 0.013208504
## 571      er3 0.013257831
## 572      er3 0.013307158
## 573      er3 0.013356485
## 574      er3 0.013405812
## 575      er3 0.013455139
## 576      er3 0.013504466
## 577      er3 0.013553793
## 578      er3 0.013603120
## 579      er3 0.013652447
## 580      er3 0.013701774
## 581      er3 0.013751101
## 582      er3 0.013800428
## 583      er3 0.013849755
## 584      er3 0.013899082
## 585      er3 0.013948409
## 586      er3 0.013997736
## 587      er3 0.014047063
## 588      er3 0.014096390
## 589      er3 0.014145717
## 590      er3 0.014195044
## 591      er3 0.014244371
## 592      er3 0.014293698
## 593      er3 0.014343025
## 594      er3 0.014392352
## 595      er3 0.014441679
## 596      er3 0.014491006
## 597      er3 0.014540333
## 598      er3 0.014589660
## 599      er3 0.014638987
## 600      er3 0.014688314
#type<-rep(c("hist", "1.factor", "3.factor"), c(100,100,100))
#xy.all<-data.frame(xx, yy)
#head(xy.all)
#write_csv(xy.all, "xy_all.csv")
#
#library(lattice)
#xyplot(yy ~ xx, xy.all, groups = xy.all$type, pch= 20)
#ggplot(xy.all, aes(x = xx, y = yy, colour = type))+
#  geom_line(type)

#plot(xx, yy)

# using plot 
plot(xy$sd, xy$er, type = 'l', col="red", xlim = c(0.03, 0.07))
lines(xy.1f$sd, xy.1f$er, col = "blue")
lines(xy.3f$sd, xy.3f$er, col = "black")

# using ggplot
ggplot(data = xy.all, aes(x = s, y = er)) +
  geom_point(color = "red", size = 0.3) +
  annotate(geom="text", x=0.045, y=0.0125, label="historical covariance",
           color="red", size= 4)+
  geom_point(aes(x = s1, y = er1), color = "blue", size = 0.3, shape = 2)+
  annotate(geom="text", x=0.058, y=0.013, label="capm covariance",
           color="blue", size= 4)+
  geom_point(aes(x = s3, y = er3), color = "black", size = 0.3, shape = 4)+
  annotate(geom="text", x=0.045, y=0.0145, label="FF3F covariance",
           color="black", size= 4)+
  labs(title="Efficient Frontiers ", x="sd", y="ret")

Including Plots

You can also embed plots, for example:

Note that the echo = FALSE parameter was added to the code chunk to prevent printing of the R code that generated the plot.