load("/home/boyazhang/repos/unifdist/code/beta_para_map_32_3.RData")
out <- as.data.frame(out)
colnames(out) <- c("n", "dim", "shape1", "shape2","ise")
mse <- aggregate(out$ise, by = list(shape1 = out$shape1, shape2 = out$shape2), FUN = mean)
sd <- aggregate(out$ise, by = list(shape1 = out$shape1, shape2 = out$shape2), FUN = sd)
mse <- cbind(mse, sd = sd$x)
colnames(mse) <- c("shape1", "shape2", "mse_mean", "mse_sd")
# output table sorted my mse
head(mse[order(mse$mse_mean),], 20)
## shape1 shape2 mse_mean mse_sd
## 61 3.943567 4.632472 0.03559047 0.03338464
## 72 4.472485 5.555643 0.04091350 0.03824516
## 45 4.569969 3.448906 0.05494936 0.07961113
## 48 3.628172 3.882714 0.05674321 0.08840268
## 35 3.504561 2.541078 0.05856227 0.07916133
## 51 5.084801 4.034257 0.05863567 0.07013726
## 83 7.241970 6.472785 0.05956670 0.12137969
## 84 6.219199 6.535379 0.06578248 0.09052578
## 64 5.793940 4.910994 0.06637280 0.11064285
## 77 5.220332 5.862018 0.06644792 0.10545027
## 99 7.496164 7.567474 0.06994646 0.09890296
## 100 6.618014 7.609970 0.07031876 0.10391678
## 104 10.335462 7.959726 0.07535752 0.10912610
## 67 7.203422 5.058714 0.07665273 0.12085595
## 74 6.596990 5.699826 0.07987212 0.12369099
## 89 9.864269 6.809267 0.08082548 0.10553595
## 101 9.559792 7.635348 0.08299139 0.12214632
## 85 8.110467 6.561017 0.09180164 0.15367077
## 102 8.575584 7.738923 0.11205199 0.20393867
## 52 6.479930 4.071984 0.11477522 0.16276413
library(hetGP)
X <- as.matrix(out[,3:4])
Z <- out$ise
nvar = 2
## Model fitting
# settings <- list(return.hom = TRUE) # To keep homoskedastic model used for training
# model <- mleHetGP(X = X, Z = Z, lower = rep(10^(-4), nvar), upper = rep(50, nvar),
# covtype = "Gaussian", settings = settings)
model <- mleHetGP(X = X, Z = log(Z), lower = rep(10^(-4), 2), upper = rep(50, 2),
covtype = "Matern5_2", maxit=1999)
## A quick view of the fit
summary(model)
## N = 20000 n = 200 d = 2
## Matern5_2 covariance lengthscale values of the main process: 1.767464 2.425737
## Variance/scale hyperparameter: 2.637736
## Summary of Lambda values:
## Min. 1st Qu. Median Mean 3rd Qu. Max.
## 0.01174 0.05617 0.13563 0.23565 0.42396 0.63336
## Estimated constant trend value: -0.7848893
## Matern5_2 covariance lengthscale values of the log-noise process: 2.115685 2.903651
## Nugget of the log-noise process: 1e-06
## Estimated constant trend value of the log-noise process: -2.042356
## MLE optimization:
## Log-likelihood = -18562.39 ; Nb of evaluations (obj, gradient) by L-BFGS-B: 129 129 ; message: CONVERGENCE: REL_REDUCTION_OF_F <= FACTR*EPSMCH
xx <- seq(0, 15, length.out = 501)
xgrid <- as.matrix(expand.grid(xx,xx))
predictions <- predict(x = xgrid, object = model)
par(mfrow=c(1,1)); cols <- heat.colors(10)
## Display mean predictive surface
image(x = xx, y = xx, z = matrix(predictions$mean, ncol = length(xx)), xlab="shape1",ylab="shape2", col=cols,
main = "n=32,dim=3: mean surface with raw data ranks and lowest 20 prediction locs")
text(mse$shape1, mse$shape2, labels = rank(mse$mse_mean))
dat_pred <- data.frame(shape1 = xgrid[,1], shape2 = xgrid[,2], pred_mean = predictions$mean, pred_sd2 = predictions$sd2)
dat_pred_top <- head(dat_pred[order(dat_pred$pred_mean),], 20)
points(dat_pred_top[,1], dat_pred_top[,2])

head(dat_pred_top,1)
## shape1 shape2 pred_mean pred_sd2
## 73291 4.32 4.38 -3.815741 0.0171665
image(x = xx, y = xx, z = matrix(predictions$sd2, ncol = length(xx)), xlab="shape1",ylab="shape2", col=cols, main = "sd2 surface")
text(mse$shape1, mse$shape2, labels = rank(mse$mse_sd))

#------------------------------------linear regression-------------------------------
mse <- transform(mse,beta_m = shape1/(shape1+shape2), beta_var = shape1*shape2/(shape1+shape2+1)/(shape1+shape2)^2, y = log(mse$mse_mean))
## plot beta mean v.s log(mse_mean)
# with raw data
plot(mse$beta_m, mse$y, xlab = "shape1/(shape1 + shape2)", ylab = "log(mse_mean)")

# with gp predictions
plot(xgrid[,1]/(xgrid[,1]+xgrid[,2]+.Machine$double.eps),predictions$mean,
xlab = "shape1/(shape1 + shape2)", ylab = "log(mse_mean)")

plot(mse$y, mse$mse_sd, xlab = "log(mse_mean)", ylab = "mse_sd")
