Random Forest and BRMS package

title: “R Notebook” output: html_notebook

Required Libraries

library(readr)
## Warning: package 'readr' was built under R version 4.4.3
library(dplyr) 
## 
## Attaching package: 'dplyr'
## The following objects are masked from 'package:stats':
## 
##     filter, lag
## The following objects are masked from 'package:base':
## 
##     intersect, setdiff, setequal, union
library(ggplot2) 
## Warning: package 'ggplot2' was built under R version 4.4.3
library(reshape2)
## Warning: package 'reshape2' was built under R version 4.4.2
library(gridExtra)
## Warning: package 'gridExtra' was built under R version 4.4.3
## 
## Attaching package: 'gridExtra'
## The following object is masked from 'package:dplyr':
## 
##     combine
library(inspectdf) 
## Warning: package 'inspectdf' was built under R version 4.4.3
library(cowplot)
## Warning: package 'cowplot' was built under R version 4.4.2
library(boot)
library(ISLR)
## Warning: package 'ISLR' was built under R version 4.4.3
library(gtools)
## Warning: package 'gtools' was built under R version 4.4.3
## 
## Attaching package: 'gtools'
## The following objects are masked from 'package:boot':
## 
##     inv.logit, logit
library(randomForest)
## Warning: package 'randomForest' was built under R version 4.4.3
## randomForest 4.7-1.2
## Type rfNews() to see new features/changes/bug fixes.
## 
## Attaching package: 'randomForest'
## The following object is masked from 'package:gridExtra':
## 
##     combine
## The following object is masked from 'package:ggplot2':
## 
##     margin
## The following object is masked from 'package:dplyr':
## 
##     combine
library(janitor)
## Warning: package 'janitor' was built under R version 4.4.3
## 
## Attaching package: 'janitor'
## The following objects are masked from 'package:stats':
## 
##     chisq.test, fisher.test
library(caTools)
## Warning: package 'caTools' was built under R version 4.4.3
library(rattle)
## Warning: package 'rattle' was built under R version 4.4.3
## Loading required package: tibble
## Loading required package: bitops
## Rattle: A free graphical interface for data science with R.
## Version 5.5.1 Copyright (c) 2006-2021 Togaware Pty Ltd.
## Type 'rattle()' to shake, rattle, and roll your data.
## 
## Attaching package: 'rattle'
## The following object is masked from 'package:randomForest':
## 
##     importance
library(GGally)
## Warning: package 'GGally' was built under R version 4.4.3
library(rpart.plot)
## Warning: package 'rpart.plot' was built under R version 4.4.3
## Loading required package: rpart
library(rpart)

# Interactive Plot packages; 
library(viridisLite)
#library(dp)
library(plotly)
## Warning: package 'plotly' was built under R version 4.4.3
## 
## Attaching package: 'plotly'
## The following object is masked from 'package:ggplot2':
## 
##     last_plot
## The following object is masked from 'package:stats':
## 
##     filter
## The following object is masked from 'package:graphics':
## 
##     layout
library(magrittr)


#Prediction Packages: 
library(pROC)
## Warning: package 'pROC' was built under R version 4.4.2
## Type 'citation("pROC")' for a citation.
## 
## Attaching package: 'pROC'
## The following objects are masked from 'package:stats':
## 
##     cov, smooth, var
library(caret)
## Warning: package 'caret' was built under R version 4.4.2
## Loading required package: lattice
## 
## Attaching package: 'lattice'
## The following object is masked from 'package:boot':
## 
##     melanoma
library(MLmetrics)
## Warning: package 'MLmetrics' was built under R version 4.4.3
## 
## Attaching package: 'MLmetrics'
## The following objects are masked from 'package:caret':
## 
##     MAE, RMSE
## The following object is masked from 'package:base':
## 
##     Recall
# brms related packages 
library(brms)
## Warning: package 'brms' was built under R version 4.4.3
## Loading required package: Rcpp
## Loading 'brms' package (version 2.23.0). Useful instructions
## can be found by typing help('brms'). A more detailed introduction
## to the package is available through vignette('brms_overview').
## 
## Attaching package: 'brms'
## The following objects are masked from 'package:gtools':
## 
##     ddirichlet, rdirichlet
## The following object is masked from 'package:stats':
## 
##     ar
library(cmdstanr)
## Warning: package 'cmdstanr' was built under R version 4.4.3
## This is cmdstanr version 0.9.0
## - CmdStanR documentation and vignettes: mc-stan.org/cmdstanr
## - CmdStan path: C:/Users/aidan/.cmdstan/cmdstan-2.37.0
## - CmdStan version: 2.37.0
library(pkgbuild)
## Warning: package 'pkgbuild' was built under R version 4.4.3
library(bayesplot)
## Warning: package 'bayesplot' was built under R version 4.4.3
## This is bayesplot version 1.14.0
## - Online documentation and vignettes at mc-stan.org/bayesplot
## - bayesplot theme set to bayesplot::theme_default()
##    * Does _not_ affect other ggplot2 plots
##    * See ?bayesplot_theme_set for details on theme setting
## 
## Attaching package: 'bayesplot'
## The following object is masked from 'package:brms':
## 
##     rhat
library(loo)
## Warning: package 'loo' was built under R version 4.4.3
## This is loo version 2.8.0
## - Online documentation and vignettes at mc-stan.org/loo
## - As of v2.0.0 loo defaults to 1 core but we recommend using as many as possible. Use the 'cores' argument or set options(mc.cores = NUM_CORES) for an entire session.
## - Windows 10 users: loo may be very slow if 'mc.cores' is set in your .Rprofile file (see https://github.com/stan-dev/loo/issues/94).

Load the Data

wine <- read_delim("winequality-red.csv", delim = ";", quote = "")
## Rows: 1599 Columns: 12
## ── Column specification ────────────────────────────────────────────────────────
## Delimiter: ";"
## dbl (12): "fixed acidity, ""volatile acidity"", ""citric acid"", ""residual ...
## 
## ℹ Use `spec()` to retrieve the full column specification for this data.
## ℹ Specify the column types or set `show_col_types = FALSE` to quiet this message.
names(wine) <- gsub('"', '', names(wine))   # drop all double quotes
View(wine)
str(wine)
## spc_tbl_ [1,599 × 12] (S3: spec_tbl_df/tbl_df/tbl/data.frame)
##  $ fixed acidity       : num [1:1599] 7.4 7.8 7.8 11.2 7.4 7.4 7.9 7.3 7.8 7.5 ...
##  $ volatile acidity    : num [1:1599] 0.7 0.88 0.76 0.28 0.7 0.66 0.6 0.65 0.58 0.5 ...
##  $ citric acid         : num [1:1599] 0 0 0.04 0.56 0 0 0.06 0 0.02 0.36 ...
##  $ residual sugar      : num [1:1599] 1.9 2.6 2.3 1.9 1.9 1.8 1.6 1.2 2 6.1 ...
##  $ chlorides           : num [1:1599] 0.076 0.098 0.092 0.075 0.076 0.075 0.069 0.065 0.073 0.071 ...
##  $ free sulfur dioxide : num [1:1599] 11 25 15 17 11 13 15 15 9 17 ...
##  $ total sulfur dioxide: num [1:1599] 34 67 54 60 34 40 59 21 18 102 ...
##  $ density             : num [1:1599] 0.998 0.997 0.997 0.998 0.998 ...
##  $ pH                  : num [1:1599] 3.51 3.2 3.26 3.16 3.51 3.51 3.3 3.39 3.36 3.35 ...
##  $ sulphates           : num [1:1599] 0.56 0.68 0.65 0.58 0.56 0.56 0.46 0.47 0.57 0.8 ...
##  $ alcohol             : num [1:1599] 9.4 9.8 9.8 9.8 9.4 9.4 9.4 10 9.5 10.5 ...
##  $ quality             : num [1:1599] 5 5 5 6 5 5 5 7 7 5 ...
##  - attr(*, "spec")=
##   .. cols(
##   ..   `"fixed acidity` = col_double(),
##   ..   `""volatile acidity""` = col_double(),
##   ..   `""citric acid""` = col_double(),
##   ..   `""residual sugar""` = col_double(),
##   ..   `""chlorides""` = col_double(),
##   ..   `""free sulfur dioxide""` = col_double(),
##   ..   `""total sulfur dioxide""` = col_double(),
##   ..   `""density""` = col_double(),
##   ..   `""pH""` = col_double(),
##   ..   `""sulphates""` = col_double(),
##   ..   `""alcohol""` = col_double(),
##   ..   `""quality"""` = col_double()
##   .. )
##  - attr(*, "problems")=<externalptr>
# Fixing the column names 

# Re naming Quality class to represent 1 of 3 categories: "Poor" "Normal " Excellent"
wine <- wine %>%
  mutate(quality = case_when(
    quality <= 4 ~ "Poor",
    quality <= 6 ~ "Normal",
    quality >= 7 ~ "Excellent"
  )) %>%
  mutate(quality = factor(quality, levels = c("Poor", "Normal", "Excellent"), ordered = TRUE))


# Fixing the variable names to stop error code when calling random forests
wine<- clean_names(wine)
str(wine) # Now variables have "_" instead of " " between words
## tibble [1,599 × 12] (S3: tbl_df/tbl/data.frame)
##  $ fixed_acidity       : num [1:1599] 7.4 7.8 7.8 11.2 7.4 7.4 7.9 7.3 7.8 7.5 ...
##  $ volatile_acidity    : num [1:1599] 0.7 0.88 0.76 0.28 0.7 0.66 0.6 0.65 0.58 0.5 ...
##  $ citric_acid         : num [1:1599] 0 0 0.04 0.56 0 0 0.06 0 0.02 0.36 ...
##  $ residual_sugar      : num [1:1599] 1.9 2.6 2.3 1.9 1.9 1.8 1.6 1.2 2 6.1 ...
##  $ chlorides           : num [1:1599] 0.076 0.098 0.092 0.075 0.076 0.075 0.069 0.065 0.073 0.071 ...
##  $ free_sulfur_dioxide : num [1:1599] 11 25 15 17 11 13 15 15 9 17 ...
##  $ total_sulfur_dioxide: num [1:1599] 34 67 54 60 34 40 59 21 18 102 ...
##  $ density             : num [1:1599] 0.998 0.997 0.997 0.998 0.998 ...
##  $ p_h                 : num [1:1599] 3.51 3.2 3.26 3.16 3.51 3.51 3.3 3.39 3.36 3.35 ...
##  $ sulphates           : num [1:1599] 0.56 0.68 0.65 0.58 0.56 0.56 0.46 0.47 0.57 0.8 ...
##  $ alcohol             : num [1:1599] 9.4 9.8 9.8 9.8 9.4 9.4 9.4 10 9.5 10.5 ...
##  $ quality             : Ord.factor w/ 3 levels "Poor"<"Normal"<..: 2 2 2 2 2 2 2 3 3 2 ...
#### Test/Train sets ### 70% split
set.seed(123)
# Splitting the data Train/Test set - For model prediction purposes
split <- sample.split(wine$quality, SplitRatio = 0.7) 
wine_train <- subset(wine, split == TRUE)
wine_test <- subset(wine, split == FALSE)

str(wine_train)
## tibble [1,119 × 12] (S3: tbl_df/tbl/data.frame)
##  $ fixed_acidity       : num [1:1119] 7.4 7.8 7.4 7.9 7.3 6.7 7.5 7.8 8.9 8.9 ...
##  $ volatile_acidity    : num [1:1119] 0.7 0.76 0.66 0.6 0.65 0.58 0.5 0.61 0.62 0.62 ...
##  $ citric_acid         : num [1:1119] 0 0.04 0 0.06 0 0.08 0.36 0.29 0.18 0.19 ...
##  $ residual_sugar      : num [1:1119] 1.9 2.3 1.8 1.6 1.2 1.8 6.1 1.6 3.8 3.9 ...
##  $ chlorides           : num [1:1119] 0.076 0.092 0.075 0.069 0.065 0.097 0.071 0.114 0.176 0.17 ...
##  $ free_sulfur_dioxide : num [1:1119] 11 15 13 15 15 15 17 9 52 51 ...
##  $ total_sulfur_dioxide: num [1:1119] 34 54 40 59 21 65 102 29 145 148 ...
##  $ density             : num [1:1119] 0.998 0.997 0.998 0.996 0.995 ...
##  $ p_h                 : num [1:1119] 3.51 3.26 3.51 3.3 3.39 3.28 3.35 3.26 3.16 3.17 ...
##  $ sulphates           : num [1:1119] 0.56 0.65 0.56 0.46 0.47 0.54 0.8 1.56 0.88 0.93 ...
##  $ alcohol             : num [1:1119] 9.4 9.8 9.4 9.4 10 9.2 10.5 9.1 9.2 9.2 ...
##  $ quality             : Ord.factor w/ 3 levels "Poor"<"Normal"<..: 2 2 2 2 3 2 2 2 2 2 ...
head(wine_train)
## # A tibble: 6 × 12
##   fixed_acidity volatile_acidity citric_acid residual_sugar chlorides
##           <dbl>            <dbl>       <dbl>          <dbl>     <dbl>
## 1           7.4             0.7         0               1.9     0.076
## 2           7.8             0.76        0.04            2.3     0.092
## 3           7.4             0.66        0               1.8     0.075
## 4           7.9             0.6         0.06            1.6     0.069
## 5           7.3             0.65        0               1.2     0.065
## 6           6.7             0.58        0.08            1.8     0.097
## # ℹ 7 more variables: free_sulfur_dioxide <dbl>, total_sulfur_dioxide <dbl>,
## #   density <dbl>, p_h <dbl>, sulphates <dbl>, alcohol <dbl>, quality <ord>

Random Forest

model <- randomForest(quality ~., data = wine_train, iter = 6)

model 
## 
## Call:
##  randomForest(formula = quality ~ ., data = wine_train, iter = 6) 
##                Type of random forest: classification
##                      Number of trees: 500
## No. of variables tried at each split: 3
## 
##         OOB estimate of  error rate: 13.32%
## Confusion matrix:
##           Poor Normal Excellent class.error
## Poor         0     43         1  1.00000000
## Normal       3    893        27  0.03250271
## Excellent    0     75        77  0.49342105
# Output of model under 3 class specification, unfortunately, due to the small size of class "poor" the model was only able to predict 1 in 44 wines correctly. Assigning 43 to class "normal" which is why there is an extremely large error rate of 97.7% for the "poor" class. A similar issue is occurring with the "Excellent" Class, however to a lesser degree, i.e. incorrectly classifies 73 or the (79+73) results, hence an error rate of 48%. 

  # for the above reason, we will consider a slight alternative way to modelling the issue, considering now 2 classes "Good" "Poor/Normal" -> Already with this type of method is we will see that the difference between a wine which falls into the lower end of the "good" class and a wine which is considered the upper limit of the "Normal" class, is indeed queit small. 

# another method, is re-running the same model, but with balanced sample sizing. this yeilds the following results. It effectively means that each model samples an equal number of observations from each class (Poor , Normal, Excellent) regardless of there weighting on the total model. 
model_resample <- randomForest(quality ~ ., data = wine_train, 
                      sampsize = rep(min(table(wine_train$quality)), 3))

model_resample # Overall accuracy has dropped, the model is no longer weighted towards the normal class. The model now correctly classifies 1 - Class error rate for all of the classes. The confusion matrix shows that there is a large amount of missclassification betweeen the normal and the excellent class. 
## 
## Call:
##  randomForest(formula = quality ~ ., data = wine_train, sampsize = rep(min(table(wine_train$quality)),      3)) 
##                Type of random forest: classification
##                      Number of trees: 500
## No. of variables tried at each split: 3
## 
##         OOB estimate of  error rate: 32.08%
## Confusion matrix:
##           Poor Normal Excellent class.error
## Poor        27     15         2   0.3863636
## Normal     111    604       208   0.3456121
## Excellent    6     17       129   0.1513158
    # Later to check if 500 trees is sufficient. 

# Comparison of model Prediction
par(mfrow=c(1,2))
plot(model_resample$predicted, main = "Predictive Model", ylim = c(0, 1000))
plot(wine_train$quality, main = "observed Model", ylim = c(0, 1000))

# Can see that there is less weight assigned to the normal class, and more spread from this class between "Poor" and "excellent" with more weighting put on normal values -> excellent (205 from normal -> Excellent), compared with 103 from normal to excellent 

#OOB => Out Of Bag error rate, 
#1 - 00B => how many correctly OOB samples where correctly classified 

# Visual plots 
varImpPlot(model_resample)
# Shows how much each variable decreases node impurity (Gini) across all trees. Higher the value the more important the variable is in classifying wine.

#Ranking: 
  #1). Sulphates (Most influential feature - often linked to wine presevation and flavor, higher sulphates can signal better quality wines (to a degree))
  #2). Alcohol: Strong positive relationship with wine quality, higher alcohol typically correlates with higher quality ratings
  #3). Volatile acidity: Very important negative factor, high volatile acidity usually worsens the taste and reduces quality. 

#4). Helps distrigush between mid and high quality wines. 

#5). Moderate importance, relates to oxidation and wine stability. 

#6-11 less influential but all of these combined help us fine tune the model. 

df_train <- as.data.frame(wine_train)
par(mfrow = c(1, 3))

partialPlot(model_resample, df_train, "alcohol", which.class = "Excellent", main = "Alcohol")
partialPlot(model_resample, df_train, "volatile_acidity", which.class = "Excellent", main = "Volatile Acidity")
partialPlot(model_resample, df_train, "sulphates", which.class = "Excellent", main = "Sulphates")

par(mfrow = c(1, 1))

pred <- predict(model, wine_test)
table(pred, wine_test$quality)
##            
## pred        Poor Normal Excellent
##   Poor         0      0         0
##   Normal      19    390        38
##   Excellent    0      6        27
lvls  <- levels(wine_test$quality)  # should be c("Poor","Normal","Excellent")
pred  <- factor(pred, levels = lvls, ordered = TRUE)
print("Accuracy of Model")
## [1] "Accuracy of Model"
mean(pred == wine_test$quality)
## [1] 0.86875
# Seeing if 500 Trees is enough for optimal classification we can plot the error rates, data frame which formats the error data for ggplot, based off err.rate matrix. Each row represents error rates when making the random Forrest. 
 # Shows the error rates with the variables, we see random forest error rates decreases when we add more trees, we recreate the error rates with a random forests of 1000 trees to see if adding more trees helps decrease OOB. 

## When 2000 Trees where used
set.seed(123)
model_resample_1000_trees <- randomForest(quality ~ ., data = wine_train, ntree=2000, 
                      sampsize = rep(min(table(wine_train$quality)), 3))
model_resample_1000_trees
## 
## Call:
##  randomForest(formula = quality ~ ., data = wine_train, ntree = 2000,      sampsize = rep(min(table(wine_train$quality)), 3)) 
##                Type of random forest: classification
##                      Number of trees: 2000
## No. of variables tried at each split: 3
## 
##         OOB estimate of  error rate: 31.37%
## Confusion matrix:
##           Poor Normal Excellent class.error
## Poor        27     15         2   0.3863636
## Normal     103    613       207   0.3358613
## Excellent    6     18       128   0.1578947
model_resample
## 
## Call:
##  randomForest(formula = quality ~ ., data = wine_train, sampsize = rep(min(table(wine_train$quality)),      3)) 
##                Type of random forest: classification
##                      Number of trees: 500
## No. of variables tried at each split: 3
## 
##         OOB estimate of  error rate: 32.08%
## Confusion matrix:
##           Poor Normal Excellent class.error
## Poor        27     15         2   0.3863636
## Normal     111    604       208   0.3456121
## Excellent    6     17       129   0.1513158
# We can see that OOB hasn't increased, now looking at the error rate plots 


oob.error.data <- data.frame(
  Trees = rep(1:nrow(model_resample_1000_trees$err.rate), times=4),
  Type = rep(c("OOB", "Poor", "Normal", "Excellent"), each=nrow(model_resample_1000_trees$err.rate)), 
  Error = c(model_resample_1000_trees$err.rate[,"OOB"], model_resample_1000_trees$err.rate[,"Poor"], model_resample_1000_trees$err.rate[,"Normal"], model_resample_1000_trees$err.rate[,"Excellent"])
)
head(oob.error.data)
##   Trees Type     Error
## 1     1  OOB 0.4805970
## 2     2  OOB 0.4622468
## 3     3  OOB 0.4606335
## 4     4  OOB 0.4487410
## 5     5  OOB 0.4354839
## 6     6  OOB 0.4109221
ggplot(data = oob.error.data, aes(x=Trees, y=Error)) +
  geom_line(aes(color=Type))

## Code to see the extreme, when 10,000 tree's where used
set.seed(123)
model_resample_1000_trees <- randomForest(quality ~ ., data = wine_train, ntree=10000, 
                      sampsize = rep(min(table(wine_train$quality)), 3))
model_resample_1000_trees
## 
## Call:
##  randomForest(formula = quality ~ ., data = wine_train, ntree = 10000,      sampsize = rep(min(table(wine_train$quality)), 3)) 
##                Type of random forest: classification
##                      Number of trees: 10000
## No. of variables tried at each split: 3
## 
##         OOB estimate of  error rate: 31.55%
## Confusion matrix:
##           Poor Normal Excellent class.error
## Poor        25     16         3   0.4318182
## Normal     101    612       210   0.3369447
## Excellent    6     17       129   0.1513158
model_resample
## 
## Call:
##  randomForest(formula = quality ~ ., data = wine_train, sampsize = rep(min(table(wine_train$quality)),      3)) 
##                Type of random forest: classification
##                      Number of trees: 500
## No. of variables tried at each split: 3
## 
##         OOB estimate of  error rate: 32.08%
## Confusion matrix:
##           Poor Normal Excellent class.error
## Poor        27     15         2   0.3863636
## Normal     111    604       208   0.3456121
## Excellent    6     17       129   0.1513158
# We can see that OOB hasn't increased, now looking at the error rate plots 

oob.error.data <- data.frame(
  Trees = rep(1:nrow(model_resample_1000_trees$err.rate), times=4),
  Type = rep(c("OOB", "Poor", "Normal", "Excellent"), each=nrow(model_resample_1000_trees$err.rate)), 
  Error = c(model_resample_1000_trees$err.rate[,"OOB"], model_resample_1000_trees$err.rate[,"Poor"], model_resample_1000_trees$err.rate[,"Normal"], model_resample_1000_trees$err.rate[,"Excellent"])
)
head(oob.error.data)
##   Trees Type     Error
## 1     1  OOB 0.4805970
## 2     2  OOB 0.4622468
## 3     3  OOB 0.4606335
## 4     4  OOB 0.4487410
## 5     5  OOB 0.4354839
## 6     6  OOB 0.4109221
ggplot(data = oob.error.data, aes(x=Trees, y=Error)) +
  geom_line(aes(color=Type))

model_resample_1000_trees <- randomForest(quality ~ ., data = wine_train, proximity = TRUE, importance = TRUE,ntree=2000, 
                      sampsize = rep(min(table(wine_train$quality)), 3))
model_resample_1000_trees
## 
## Call:
##  randomForest(formula = quality ~ ., data = wine_train, proximity = TRUE,      importance = TRUE, ntree = 2000, sampsize = rep(min(table(wine_train$quality)),          3)) 
##                Type of random forest: classification
##                      Number of trees: 2000
## No. of variables tried at each split: 3
## 
##         OOB estimate of  error rate: 31.9%
## Confusion matrix:
##           Poor Normal Excellent class.error
## Poor        25     17         2   0.4318182
## Normal     108    607       208   0.3423619
## Excellent    6     16       130   0.1447368
# Considering if the number of variables we tried at each step where sufficient
oob.values <- vector(length = 10)
for (i in 1:10){
  temp.model <- randomForest(quality~., data = wine_train, mtry=i, ntree=2000)
  oob.values[i]<-temp.model$err.rate[nrow(temp.model$err.rate),1]
}
head(oob.values)
## [1] 0.1295800 0.1313673 0.1304736 0.1322609 0.1304736 0.1340483
# 3rd value has the lowest out of bag error rate, so default value under the model was optimal. 
# 1. Compute the distance matrix
distance.matrix <- dist(1 - model_resample_1000_trees$proximity)

# 2. Classical MDS (multidimensional scaling)
mds.stuff <- cmdscale(distance.matrix, eig = TRUE, x.ret = TRUE)

# 3. Percentage of variance explained
mds.var.per <- round(mds.stuff$eig / sum(mds.stuff$eig) * 100, 1)

# 4. Extract MDS coordinates
mds.values <- mds.stuff$points
head(mds.values)
##         [,1]       [,2]
## 1 -1.4872462 -1.4696685
## 2 -1.4881697  0.4042023
## 3 -1.7587170 -1.0856461
## 4 -1.8965049 -0.2562595
## 5 -0.8869568 -1.7742413
## 6 -2.3455167  0.3867852
# 5. Create plotting data frame
mds.data <- data.frame(
  Sample = rownames(mds.values),
  X = mds.values[, 1],
  Y = mds.values[, 2]
 )

# 6. Plot in ggplot
library(ggplot2)
ggplot(data = mds.data, aes(x = X, y = Y, colour = "red")) +
  geom_point(size = 3, alpha = 0.8) +
  theme_bw() +
  xlab(paste("MDS1 -", mds.var.per[1], "%")) +
  ylab(paste("MDS2 -", mds.var.per[2], "%")) +
  ggtitle("MDS Plot using (1 - Random Forest Proximities)")

#Plotting the trees 
getTree(model_resample_1000_trees, k = 5, labelVar = TRUE)
##    left daughter right daughter            split var split point status
## 1              2              3          citric_acid     0.31500      1
## 2              4              5  free_sulfur_dioxide     6.50000      1
## 3              6              7            sulphates     0.57500      1
## 4              8              9              density     0.99368      1
## 5             10             11                  p_h     3.40000      1
## 6             12             13 total_sulfur_dioxide     8.00000      1
## 7             14             15            chlorides     0.11850      1
## 8              0              0                 <NA>     0.00000     -1
## 9             16             17              alcohol    11.25000      1
## 10            18             19          citric_acid     0.01500      1
## 11            20             21            sulphates     0.68000      1
## 12             0              0                 <NA>     0.00000     -1
## 13            22             23       residual_sugar     1.95000      1
## 14            24             25            chlorides     0.09150      1
## 15            26             27 total_sulfur_dioxide    18.00000      1
## 16             0              0                 <NA>     0.00000     -1
## 17            28             29 total_sulfur_dioxide    13.50000      1
## 18             0              0                 <NA>     0.00000     -1
## 19             0              0                 <NA>     0.00000     -1
## 20            30             31  free_sulfur_dioxide     7.50000      1
## 21            32             33              alcohol    10.05000      1
## 22             0              0                 <NA>     0.00000     -1
## 23             0              0                 <NA>     0.00000     -1
## 24            34             35              density     0.99538      1
## 25            36             37     volatile_acidity     0.45500      1
## 26             0              0                 <NA>     0.00000     -1
## 27            38             39                  p_h     2.88000      1
## 28             0              0                 <NA>     0.00000     -1
## 29             0              0                 <NA>     0.00000     -1
## 30            40             41        fixed_acidity     6.30000      1
## 31            42             43            sulphates     0.65500      1
## 32             0              0                 <NA>     0.00000     -1
## 33             0              0                 <NA>     0.00000     -1
## 34             0              0                 <NA>     0.00000     -1
## 35            44             45     volatile_acidity     0.34000      1
## 36             0              0                 <NA>     0.00000     -1
## 37             0              0                 <NA>     0.00000     -1
## 38             0              0                 <NA>     0.00000     -1
## 39             0              0                 <NA>     0.00000     -1
## 40             0              0                 <NA>     0.00000     -1
## 41             0              0                 <NA>     0.00000     -1
## 42            46             47  free_sulfur_dioxide    32.00000      1
## 43             0              0                 <NA>     0.00000     -1
## 44            48             49       residual_sugar     3.20000      1
## 45            50             51     volatile_acidity     0.42750      1
## 46            52             53 total_sulfur_dioxide    96.00000      1
## 47            54             55                  p_h     3.64000      1
## 48            56             57       residual_sugar     1.55000      1
## 49             0              0                 <NA>     0.00000     -1
## 50            58             59              density     0.99688      1
## 51            60             61              density     0.99905      1
## 52             0              0                 <NA>     0.00000     -1
## 53             0              0                 <NA>     0.00000     -1
## 54             0              0                 <NA>     0.00000     -1
## 55             0              0                 <NA>     0.00000     -1
## 56            62             63                  p_h     3.28500      1
## 57             0              0                 <NA>     0.00000     -1
## 58             0              0                 <NA>     0.00000     -1
## 59            64             65        fixed_acidity    11.05000      1
## 60             0              0                 <NA>     0.00000     -1
## 61            66             67              alcohol     9.70000      1
## 62             0              0                 <NA>     0.00000     -1
## 63             0              0                 <NA>     0.00000     -1
## 64            68             69 total_sulfur_dioxide    12.00000      1
## 65             0              0                 <NA>     0.00000     -1
## 66             0              0                 <NA>     0.00000     -1
## 67             0              0                 <NA>     0.00000     -1
## 68             0              0                 <NA>     0.00000     -1
## 69             0              0                 <NA>     0.00000     -1
##    prediction
## 1        <NA>
## 2        <NA>
## 3        <NA>
## 4        <NA>
## 5        <NA>
## 6        <NA>
## 7        <NA>
## 8   Excellent
## 9        <NA>
## 10       <NA>
## 11       <NA>
## 12  Excellent
## 13       <NA>
## 14       <NA>
## 15       <NA>
## 16       Poor
## 17       <NA>
## 18       Poor
## 19     Normal
## 20       <NA>
## 21       <NA>
## 22     Normal
## 23       Poor
## 24       <NA>
## 25       <NA>
## 26       Poor
## 27       <NA>
## 28       Poor
## 29     Normal
## 30       <NA>
## 31       <NA>
## 32     Normal
## 33  Excellent
## 34  Excellent
## 35       <NA>
## 36  Excellent
## 37     Normal
## 38       Poor
## 39     Normal
## 40       Poor
## 41     Normal
## 42       <NA>
## 43     Normal
## 44       <NA>
## 45       <NA>
## 46       <NA>
## 47       <NA>
## 48       <NA>
## 49     Normal
## 50       <NA>
## 51       <NA>
## 52       Poor
## 53  Excellent
## 54     Normal
## 55       Poor
## 56       <NA>
## 57  Excellent
## 58     Normal
## 59       <NA>
## 60  Excellent
## 61       <NA>
## 62  Excellent
## 63     Normal
## 64       <NA>
## 65     Normal
## 66     Normal
## 67  Excellent
## 68     Normal
## 69  Excellent
# Extraction of 1 tree and convertion to visual output:

# Build a single decision tree to visualize 
tree_model <- rpart(quality ~ ., data = wine_train, method = "class")

# Plot the tree
rpart.plot(tree_model, type = 3, extra = 104, fallen.leaves = TRUE)

# Better version of above plot
rpart.plot(tree_model,
           type = 3,           # Draw split labels above the branches
           extra = 104,        # Show predicted class + prob + % of obs
           fallen.leaves = TRUE,
           box.palette = "Blues", # Colour palette
           branch.lty = 3,        # Dotted branches
           shadow.col = "gray",   # Add slight shadow for clarity
           main = "Decision Tree for Wine Quality Classification")

# Presentation of the nodal tree splitting
fancyRpartPlot(tree_model)

head(model_resample_1000_trees$proximity)
##           1          2          3         4          5          6           7
## 1 1.0000000 0.11342466 0.77710177 0.1756018 0.16712707 0.13741722 0.009366391
## 2 0.1134247 1.00000000 0.09939594 0.1313077 0.03875433 0.19256018 0.045554336
## 3 0.7771018 0.09939594 1.00000000 0.2500000 0.21912073 0.19524336 0.012721239
## 4 0.1756018 0.13130765 0.25000000 1.0000000 0.30932785 0.35120350 0.017505470
## 5 0.1671271 0.03875433 0.21912073 0.3093278 1.00000000 0.09401114 0.006925208
## 6 0.1374172 0.19256018 0.19524336 0.3512035 0.09401114 1.00000000 0.020925110
##            8           9          10          11          12         13
## 1 0.04116356 0.009289617 0.008205689 0.003474635 0.022626932 0.17835366
## 2 0.18102508 0.153011394 0.149619151 0.029737206 0.210612691 0.02896341
## 3 0.04355017 0.027412281 0.025881057 0.004878049 0.040420819 0.21505376
## 4 0.05122616 0.043360434 0.043383948 0.013821700 0.080371788 0.16212121
## 5 0.01589496 0.004820937 0.004810997 0.001762115 0.009708738 0.12592593
## 6 0.09879254 0.116748500 0.117067834 0.019594122 0.137741047 0.15781487
##           14          15          16         17         18        19        20
## 1 0.06988353 0.056136489 0.029330382 0.14301491 0.04928018 0.7224986 0.1795154
## 2 0.01929438 0.144887917 0.178021978 0.02413604 0.07197802 0.1439560 0.1825613
## 3 0.08533185 0.082274986 0.042682927 0.15982242 0.06055556 0.6463415 0.1715389
## 4 0.10077093 0.044189853 0.066959385 0.12957900 0.09325288 0.1957354 0.1629428
## 5 0.02439024 0.008275862 0.009742519 0.11103400 0.02216066 0.1931897 0.1312155
## 6 0.10765816 0.068681319 0.099114064 0.19447514 0.12693157 0.2201987 0.3218202
##           21         22         23         24         25          26
## 1 0.26414058 0.04008786 0.12945694 0.09221425 0.11634757 0.012061404
## 2 0.14675396 0.47600872 0.04746318 0.05205479 0.05102041 0.034858388
## 3 0.23370166 0.07004964 0.18687982 0.08628319 0.07555556 0.015977961
## 4 0.07080610 0.12527233 0.19324986 0.04052574 0.07759883 0.010331702
## 5 0.09896194 0.02831492 0.07633588 0.05000000 0.21611722 0.004135079
## 6 0.14599341 0.18626374 0.17362637 0.03469163 0.07782673 0.018072289
##            27         28          29          30         31         32
## 1 0.007146784 0.26512651 0.032400589 0.017630854 0.03931340 0.06006674
## 2 0.106790800 0.02620087 0.007225434 0.237677985 0.04615385 0.00770077
## 3 0.007722008 0.21787093 0.034277198 0.018262313 0.04836020 0.07469342
## 4 0.022963368 0.14324619 0.040995608 0.032258065 0.06040637 0.09636564
## 5 0.005547850 0.23638870 0.043557169 0.001392758 0.01736111 0.08025122
## 6 0.042447630 0.08342481 0.046920821 0.082644628 0.10082645 0.10365854
##           33         34         35         36          37         38        39
## 1 0.05082873 0.03703704 0.11496150 0.07560706 0.023730684 0.02373068 0.2318681
## 2 0.02032967 0.17243273 0.07696507 0.05537281 0.069436851 0.21722436 0.1239083
## 3 0.06725959 0.05204873 0.12679162 0.08185841 0.036524626 0.04542936 0.3151815
## 4 0.08406593 0.05540318 0.08015267 0.05798687 0.073304158 0.09956236 0.2320261
## 5 0.03272981 0.01947149 0.10359116 0.08327550 0.006232687 0.02700831 0.1089655
## 6 0.13661504 0.08172281 0.08786381 0.05956977 0.124656782 0.20748487 0.2058178
##            40         41         42          43         44        45         46
## 1 0.002735230 0.18266594 0.13925439 0.028650138 0.23734610 0.4199016 0.13384956
## 2 0.029923830 0.17701525 0.03050109 0.222647702 0.16144414 0.2595212 0.03892544
## 3 0.005503577 0.25399449 0.17290749 0.032132964 0.28219178 0.3726674 0.06866002
## 4 0.014138119 0.16294278 0.11630435 0.050764192 0.16282225 0.1980467 0.06239737
## 5 0.006185567 0.07803867 0.06189821 0.007612457 0.06735751 0.1505155 0.15855355
## 6 0.010970927 0.16309720 0.11378556 0.112087912 0.20861833 0.2423414 0.03147432
##           47          48         49        50        51        52         53
## 1 0.13704735 0.001098901 0.11723010 0.3581267 0.1458680 0.1695008 0.31196581
## 2 0.03875969 0.020163488 0.16106291 0.1395731 0.1142384 0.1106267 0.08039492
## 3 0.06998880 0.001101928 0.07675439 0.4235880 0.1483547 0.1556291 0.26839827
## 4 0.06430155 0.008719346 0.04926909 0.4182613 0.3046832 0.3253536 0.20768137
## 5 0.16235129 0.000000000 0.05209047 0.2588235 0.1188325 0.1231834 0.06968641
## 6 0.03125000 0.009884679 0.03549973 0.3441451 0.2309828 0.2423246 0.22411348
##            54         55         56          57         58          59
## 1 0.020408163 0.09805014 0.07823691 0.006600660 0.12424740 0.012700166
## 2 0.039473684 0.01438053 0.04649891 0.020753687 0.11678436 0.148330597
## 3 0.027700831 0.11675978 0.09812569 0.010520487 0.18096810 0.022123894
## 4 0.003831418 0.10535025 0.09721464 0.018518519 0.11135253 0.041643836
## 5 0.002074689 0.09383754 0.02140884 0.006891799 0.08821502 0.005544006
## 6 0.007162534 0.11500000 0.21452145 0.019305019 0.19344262 0.083931530
##           60           61         62         63          64         65
## 1 0.09934138 0.0049806309 0.41818182 0.07828004 0.013751375 0.01043956
## 2 0.08660131 0.0621562156 0.02788409 0.14519651 0.154223433 0.14792576
## 3 0.12052834 0.0094496943 0.43235781 0.12251656 0.023704520 0.01821192
## 4 0.10941753 0.0164654226 0.16338798 0.22942779 0.045726728 0.04191617
## 5 0.07697642 0.0006934813 0.18892734 0.03407510 0.006882312 0.00621547
## 6 0.06571742 0.0420121614 0.21122112 0.32894737 0.086813187 0.08013172
##           66         67         68         69        70         71         72
## 1 0.10153677 0.04017857 0.26982379 0.05132450 0.1111111 0.14269912 0.12486127
## 2 0.08287895 0.03731343 0.11452055 0.11835616 0.0295082 0.11318681 0.09306167
## 3 0.12156216 0.02398801 0.17357656 0.06419480 0.1260365 0.19399666 0.15971063
## 4 0.10990207 0.10487445 0.04214559 0.10246575 0.1272529 0.18825467 0.36633663
## 5 0.07549760 0.16923077 0.12352533 0.03254848 0.1074896 0.03897008 0.08722959
## 6 0.06410959 0.05731523 0.08985667 0.16941694 0.1195592 0.24419890 0.37990061
##           73         74         75         76          77         78         79
## 1 0.10250696 0.12720264 0.01214798 0.06291391 0.014859659 0.04693539 0.05686167
## 2 0.02149945 0.08842795 0.07283680 0.18883415 0.209836066 0.26783754 0.38294405
## 3 0.12158394 0.16399779 0.01435671 0.08457711 0.019856591 0.06714761 0.06790800
## 4 0.24613687 0.36189956 0.02296337 0.07553366 0.034915439 0.09304871 0.11774281
## 5 0.13505948 0.08512111 0.00000000 0.02491349 0.004840941 0.01530967 0.02888583
## 6 0.10660744 0.37843784 0.05998899 0.13153550 0.085118067 0.15681944 0.16111415
##           80          81         82         83         84        85         86
## 1 0.03925845 0.010561423 0.04696133 0.16740331 0.19384953 0.1844444 0.04223807
## 2 0.42199350 0.072767365 0.27068493 0.07295666 0.14262384 0.1587912 0.23039216
## 3 0.07115490 0.013348165 0.06877427 0.18333333 0.18261826 0.1360355 0.07036833
## 4 0.12979989 0.013201320 0.09095890 0.08105148 0.18651441 0.1468647 0.12887439
## 5 0.02674897 0.003484321 0.01462396 0.06123869 0.07713499 0.0514246 0.02620690
## 6 0.20098039 0.018836565 0.15469613 0.06637168 0.45643836 0.2838137 0.21643836
##           87         88         89         90         91         92         93
## 1 0.62459016 0.10262131 0.18687982 0.13219027 0.01257518 0.01363141 0.11417540
## 2 0.11159263 0.04371887 0.05205479 0.02140505 0.01899078 0.02170374 0.05908096
## 3 0.45908841 0.15117581 0.11973392 0.15786548 0.02031851 0.02083333 0.14988938
## 4 0.16368564 0.29219701 0.07662835 0.08826754 0.01951220 0.02168022 0.10934937
## 5 0.16173434 0.12736179 0.16585704 0.09273356 0.03512397 0.03568977 0.30977131
## 6 0.09541985 0.20879733 0.02314050 0.11283186 0.03000546 0.03213508 0.10236654
##           94         95         96         97         98         99        100
## 1 0.17079890 0.04759270 0.08836443 0.10227904 0.14847162 0.04675468 0.08724462
## 2 0.32185792 0.43179318 0.38595536 0.07925151 0.10309278 0.42864954 0.38815789
## 3 0.13550885 0.04719600 0.06008820 0.14086860 0.20799124 0.04585635 0.06152993
## 4 0.13238512 0.08173341 0.07834603 0.04722680 0.29383117 0.08465319 0.08301475
## 5 0.09116022 0.01948504 0.01717033 0.01544944 0.06976744 0.01795580 0.01863354
## 6 0.21530837 0.14088398 0.09879254 0.10049972 0.34749455 0.14317060 0.09933775
##           101        102        103        104        105         106       107
## 1 0.001648352 0.09265351 0.05271920 0.02268954 0.12995595 0.003003003 0.1245158
## 2 0.002729258 0.03478261 0.15960374 0.15527611 0.06670312 0.062686567 0.1106243
## 3 0.003867403 0.10494505 0.05990017 0.02990033 0.16112957 0.004504505 0.1772853
## 4 0.003271538 0.11285947 0.08726674 0.06014215 0.08956854 0.014992504 0.1855104
## 5 0.005517241 0.10501030 0.01798064 0.00555170 0.11610228 0.000000000 0.1826389
## 6 0.004388371 0.10722101 0.15614618 0.11025358 0.13758943 0.037425150 0.2992257
##           108         109         110         111        112        113
## 1 0.005534034 0.008241758 0.006651885 0.007709251 0.22786885 0.18927790
## 2 0.026272578 0.030534351 0.026490066 0.029492081 0.09288430 0.25923913
## 3 0.008301051 0.011563877 0.009476031 0.011043622 0.29917808 0.12225877
## 4 0.009846827 0.012540894 0.011037528 0.009819967 0.22240260 0.09989142
## 5 0.004184100 0.004840941 0.003486750 0.004152249 0.07049966 0.08716541
## 6 0.016004415 0.021358160 0.017777778 0.018141836 0.34531590 0.18599562
##         114        115        116       117        118        119        120
## 1 0.2401771 0.07973422 0.09191584 0.1768202 0.25415282 0.05660377 0.21220930
## 2 0.1037891 0.03628367 0.08976464 0.0698690 0.07616438 0.31680441 0.05405405
## 3 0.2946726 0.11866295 0.13089296 0.2831595 0.28729895 0.05406912 0.16448326
## 4 0.2504110 0.23435785 0.18883415 0.4075145 0.13691128 0.08535242 0.18454936
## 5 0.1981982 0.09576683 0.16446912 0.2904412 0.21016006 0.01745810 0.16666667
## 6 0.2249173 0.10902048 0.29668508 0.2791728 0.24820739 0.15806988 0.05224964
##          121        122        123        124        125        126        127
## 1 0.07620614 0.07494530 0.17772902 0.12362031 0.06740331 0.12099448 0.08599779
## 2 0.07788671 0.07871878 0.07821836 0.02240437 0.17124040 0.02244116 0.05288986
## 3 0.09095921 0.08681319 0.24807903 0.14072022 0.09605775 0.13779745 0.09136213
## 4 0.11528004 0.11008677 0.19228680 0.20951860 0.09698630 0.21150685 0.02349727
## 5 0.03019904 0.02959394 0.18620690 0.10602911 0.04526462 0.10887656 0.01386963
## 6 0.12876712 0.12431544 0.12233752 0.14002205 0.11885019 0.14135837 0.03583241
##          128        129       130         131       132        133        134
## 1 0.40597345 0.10209713 0.1056687 0.005500550 0.4044077 0.31274131 0.01434087
## 2 0.12843030 0.19890411 0.1953082 0.119476268 0.1769525 0.08118486 0.04288070
## 3 0.29711752 0.12665929 0.1291391 0.012127894 0.3698782 0.34423503 0.01833333
## 4 0.09638554 0.20251779 0.2018549 0.034332425 0.2136612 0.19496166 0.05869446
## 5 0.19486824 0.07093185 0.0699446 0.004149378 0.1998612 0.06994460 0.00625000
## 6 0.06464088 0.15002758 0.1521978 0.080175728 0.1757576 0.26755113 0.12493090
##          135        136        137        138        139       140       141
## 1 0.10401761 0.32542747 0.06256921 0.09961262 0.07134956 0.1212789 0.1224263
## 2 0.02785363 0.08122942 0.11868132 0.06578947 0.27372463 0.1150685 0.1144279
## 3 0.13086692 0.22880886 0.09196676 0.14642263 0.11043285 0.1448314 0.1456473
## 4 0.11921611 0.14926189 0.16144975 0.08054795 0.17250821 0.2252597 0.2280508
## 5 0.02356202 0.21473245 0.02432245 0.02713987 0.03129346 0.1051176 0.1090014
## 6 0.20659341 0.06174201 0.27790055 0.10369553 0.37217007 0.2099174 0.2114317
##          142         143        144         145          146        147
## 1 0.12665929 0.013947001 0.12888889 0.002092050 0.0021990104 0.10088203
## 2 0.14747807 0.038700760 0.04654655 0.022774327 0.0556464812 0.03721949
## 3 0.17415419 0.004181185 0.08792846 0.002805049 0.0055096419 0.13875069
## 4 0.14238773 0.008959338 0.10164425 0.010395010 0.0130647795 0.13668671
## 5 0.09205021 0.018453427 0.21564885 0.007922535 0.0006915629 0.05597789
## 6 0.15270121 0.009084556 0.03303303 0.010423905 0.0389895662 0.25426527
##          148          149          150        151        152          153
## 1 0.11135491 0.0000000000 0.0000000000 0.06873977 0.02843084 0.0006973501
## 2 0.02995643 0.0069783671 0.0061643836 0.07305195 0.26072787 0.0041666667
## 3 0.14419373 0.0000000000 0.0000000000 0.10630137 0.04832510 0.0006978367
## 4 0.12717391 0.0000000000 0.0000000000 0.30648649 0.08676790 0.0020761246
## 5 0.05360825 0.0008695652 0.0008748906 0.06716929 0.01035197 0.0008904720
## 6 0.24205915 0.0014005602 0.0013783598 0.26851852 0.16338798 0.0000000000
##            154        155        156        157        158        159
## 1 0.0010946907 0.02797586 0.10181618 0.06125828 0.21904238 0.05846995
## 2 0.0065146580 0.05667575 0.51501912 0.15726027 0.04478427 0.12906725
## 3 0.0000000000 0.05347299 0.06791828 0.09126106 0.25981205 0.08812261
## 4 0.0027159153 0.33768352 0.05875952 0.13183807 0.17066521 0.14270270
## 5 0.0006868132 0.15923130 0.01588398 0.04350829 0.14740484 0.01917808
## 6 0.0021893815 0.15780822 0.09220637 0.22754821 0.19560440 0.17929155
##          160        161        162        163        164         165        166
## 1 0.05918142 0.07498632 0.12064195 0.12603648 0.11046512 0.004986150 0.14662273
## 2 0.06085526 0.17237629 0.03738318 0.18082192 0.12262774 0.115448048 0.19662309
## 3 0.08097615 0.10824176 0.13277778 0.12548584 0.15905744 0.011653718 0.08930540
## 4 0.10843373 0.18305269 0.10082192 0.09534247 0.18247126 0.038419319 0.02178649
## 5 0.01107266 0.04049417 0.19426974 0.06903766 0.06294964 0.008344924 0.03448276
## 6 0.13237728 0.42113910 0.07403315 0.12355212 0.25000000 0.080332410 0.02028509
##          167        168        169         170        171        172       173
## 1 0.05564738 0.02200825 0.18092105 0.026315789 0.10172318 0.25983380 0.3131868
## 2 0.08201203 0.01777170 0.05122616 0.150163221 0.02699725 0.14199229 0.1542234
## 3 0.09161148 0.03603604 0.23360882 0.044603524 0.11643454 0.18186874 0.4040794
## 4 0.12227074 0.03670972 0.27015251 0.041847826 0.08525853 0.16273973 0.1773667
## 5 0.04347826 0.04707934 0.16024759 0.007570544 0.10145935 0.08692629 0.1473829
## 6 0.11648352 0.03980782 0.19506849 0.054216867 0.08849558 0.10747922 0.3448276
##         174       175       176        177          178        179          180
## 1 0.3104972 0.3237251 0.3107811 0.25676422 0.0000000000 0.04933481 0.0049822064
## 2 0.1521620 0.1897408 0.1541826 0.13885840 0.0103768433 0.15705656 0.0226148410
## 3 0.4008859 0.4306407 0.4051962 0.18091010 0.0000000000 0.08055556 0.0064423765
## 4 0.1753425 0.2125551 0.1755725 0.16411379 0.0000000000 0.26688633 0.0035236082
## 5 0.1475296 0.1780726 0.1467128 0.08605135 0.0006954103 0.09444444 0.0008849558
## 6 0.3426997 0.3835920 0.3424959 0.10730088 0.0022002200 0.38197899 0.0113074205
##           181        182         183        184         185        186
## 1 0.004989309 0.08619749 0.001101322 0.16648230 0.006644518 0.10148108
## 2 0.021861777 0.12669193 0.026717557 0.21377802 0.018701870 0.20620239
## 3 0.006419401 0.10940919 0.001653804 0.21654636 0.007234279 0.07260726
## 4 0.003538570 0.37466161 0.009264305 0.08333333 0.005491488 0.09283388
## 5 0.000890472 0.09009629 0.008990318 0.08310249 0.008379888 0.02481048
## 6 0.011387900 0.28050109 0.010416667 0.09387079 0.015529673 0.18206670
##          187        188          189          190        191        192
## 1 0.16022099 0.27068493 0.0110011001 0.0035014006 0.05722222 0.23021978
## 2 0.21252059 0.30978261 0.0626361656 0.0152777778 0.19094923 0.06331878
## 3 0.21043865 0.23102310 0.0126931567 0.0014035088 0.09035137 0.25950413
## 4 0.08621636 0.08482871 0.0185185185 0.0006954103 0.13593836 0.23949809
## 5 0.08019526 0.05075446 0.0006944444 0.0017746229 0.02456140 0.22683264
## 6 0.09639889 0.13964951 0.0368131868 0.0020905923 0.15363283 0.14631463
##          193          194          195         196        197        198
## 1 0.04412576 0.0005503577 0.0007122507 0.004137931 0.10546659 0.04042082
## 2 0.20922570 0.0262151830 0.0099431818 0.013004791 0.10214168 0.19636964
## 3 0.07337410 0.0000000000 0.0042857143 0.003453039 0.11536328 0.06013363
## 4 0.12280702 0.0010911075 0.0035511364 0.002051984 0.03721949 0.07311710
## 5 0.01728907 0.0027624309 0.0017969452 0.000000000 0.01936376 0.01749475
## 6 0.18277195 0.0038419319 0.0071174377 0.003462604 0.03579295 0.10132890
##            199        200        201        202        203         204
## 1 0.0000000000 0.27027027 0.04294479 0.03869541 0.10468320 0.002765487
## 2 0.0274423710 0.23835616 0.17127072 0.19264544 0.09622745 0.025767544
## 3 0.0005537099 0.18469218 0.07170868 0.06048835 0.11491713 0.003880266
## 4 0.0087912088 0.11226725 0.04573003 0.07506849 0.03551913 0.004934211
## 5 0.0013917884 0.04158004 0.01127555 0.01806810 0.02004147 0.001383126
## 6 0.0099667774 0.05521811 0.07384786 0.10181618 0.03630363 0.006077348
##           205         206         207         208          209         210
## 1 0.009167842 0.001106195 0.022639426 0.021349862 0.0011086475 0.022299652
## 2 0.032212885 0.031301483 0.150219298 0.133561644 0.0391400221 0.132963989
## 3 0.009200283 0.002219756 0.038248337 0.040027605 0.0044518642 0.041316527
## 4 0.006289308 0.006578947 0.031815688 0.035446489 0.0077007701 0.036576950
## 5 0.001782531 0.001385042 0.008397481 0.006861063 0.0006968641 0.007819288
## 6 0.007027407 0.007747648 0.055893747 0.042465753 0.0288408209 0.042243767
##           211        212         213         214        215        216
## 1 0.002185792 0.02477974 0.001649258 0.013796909 0.02371760 0.10219780
## 2 0.008676790 0.21693989 0.041462084 0.057080132 0.13910186 0.13576881
## 3 0.001097695 0.04583103 0.004405286 0.017165006 0.02434975 0.15168229
## 4 0.001083424 0.07267760 0.005449591 0.013136289 0.04814004 0.11044614
## 5 0.001373626 0.01591696 0.000691085 0.007649513 0.01455301 0.05878285
## 6 0.003275109 0.11538462 0.013721186 0.019326339 0.08094714 0.14481624
##          217        218        219        220        221         222
## 1 0.10910088 0.26477146 0.04733076 0.01890990 0.19481236 0.006626173
## 2 0.18603382 0.06246545 0.07084469 0.03644395 0.06359649 0.038398245
## 3 0.07446222 0.22539150 0.06070640 0.02518187 0.20854132 0.008291874
## 4 0.01525886 0.15359116 0.09095861 0.02971932 0.25604396 0.009299781
## 5 0.02352941 0.18784916 0.02432245 0.01742160 0.10895212 0.002775850
## 6 0.01753425 0.16685206 0.19506849 0.02885683 0.17090708 0.020396913
##           223        224        225         226         227        228
## 1 0.104860732 0.01936912 0.09031199 0.015503876 0.019251925 0.06073992
## 2 0.009761388 0.03778751 0.14657980 0.094453597 0.166303162 0.25781679
## 3 0.125342091 0.02547065 0.14027397 0.022765130 0.034197463 0.05260244
## 4 0.106775068 0.02962150 0.33260988 0.019178082 0.069830878 0.07334428
## 5 0.093214531 0.01804303 0.06400551 0.004857738 0.006920415 0.01111883
## 6 0.055100927 0.02817680 0.37465866 0.043670536 0.091758242 0.08714837
##           229         230        231        232         233         234
## 1 0.008462623 0.008191126 0.03410341 0.17660044 0.008752735 0.015925316
## 2 0.064898814 0.063642519 0.21967213 0.21905805 0.153720804 0.126430518
## 3 0.012039660 0.011651816 0.05905077 0.18060942 0.017014270 0.034710744
## 4 0.007692308 0.007457627 0.10457516 0.16475096 0.036916395 0.048501362
## 5 0.004416961 0.003415884 0.01663202 0.07310345 0.004118051 0.004830918
## 6 0.014184397 0.013698630 0.17233809 0.15168229 0.079278294 0.070175439
##           235         236         237         238        239         240
## 1 0.015960374 0.000000000 0.000000000 0.002768549 0.02147577 0.001660210
## 2 0.124590164 0.011588275 0.012547736 0.033498078 0.03825137 0.022002200
## 3 0.033738938 0.000000000 0.000000000 0.002779322 0.02263943 0.002782415
## 4 0.049726776 0.000000000 0.000000000 0.002196595 0.02894593 0.001098901
## 5 0.005513439 0.000000000 0.000000000 0.000000000 0.01111883 0.000000000
## 6 0.069268829 0.002080444 0.003844042 0.004424779 0.02701213 0.003880266
##           241        242        243          244          245          246
## 1 0.001094092 0.05628415 0.03923706 0.0027797081 0.0021246459 0.0005534034
## 2 0.021668472 0.13572204 0.15097403 0.0400829302 0.0435087719 0.0126304228
## 3 0.002192982 0.09170785 0.06783370 0.0006963788 0.0042643923 0.0011129661
## 4 0.001624256 0.23915401 0.16657653 0.0006901311 0.0014025245 0.0000000000
## 5 0.000000000 0.05833905 0.07103825 0.0017406440 0.0008873114 0.0000000000
## 6 0.003818876 0.22088573 0.25231608 0.0048409405 0.0049504950 0.0033185841
##           247        248         249        250        251        252
## 1 0.002202643 0.14073667 0.035764376 0.10779436 0.03053859 0.10099889
## 2 0.037663755 0.17448201 0.079166667 0.24175824 0.23925028 0.24544953
## 3 0.003863135 0.13245033 0.019732206 0.06607440 0.03899721 0.06033520
## 4 0.005455537 0.06052345 0.003477051 0.02079912 0.03960396 0.01986755
## 5 0.004143646 0.03438790 0.006156552 0.01941748 0.01602787 0.01884159
## 6 0.008781559 0.05662452 0.008421053 0.01826231 0.06260388 0.01942286
##          253          254         255        256         257         258
## 1 0.10301370 0.0000000000 0.001658375 0.11105003 0.000000000 0.000000000
## 2 0.32680805 0.0070575461 0.009335530 0.08365227 0.023446020 0.009790210
## 3 0.09851403 0.0000000000 0.004441977 0.08282717 0.001104362 0.000000000
## 4 0.06029332 0.0005408329 0.022490400 0.01800327 0.001088732 0.000000000
## 5 0.04539202 0.0000000000 0.017409471 0.06698895 0.000000000 0.000000000
## 6 0.09523810 0.0038188762 0.035359116 0.04235424 0.003852504 0.001409443
##            259        260         261         262          263         264
## 1 0.0000000000 0.06239737 0.017089305 0.004975124 0.0005476451 0.004778157
## 2 0.0173852573 0.38131450 0.097374179 0.051663128 0.0114379085 0.053097345
## 3 0.0007007708 0.06479956 0.025470653 0.006437768 0.0005497526 0.006237006
## 4 0.0013802622 0.07646421 0.019715225 0.004915730 0.0000000000 0.004768392
## 5 0.0008779631 0.01379310 0.003472222 0.003530450 0.0000000000 0.002577320
## 6 0.0048814505 0.13442623 0.027041943 0.006369427 0.0016411379 0.006194081
##           265          266         267         268          269         270
## 1 0.009418283 0.0000000000 0.016022099 0.002192982 0.0000000000 0.000000000
## 2 0.073117097 0.0051282051 0.034482759 0.010899183 0.0103542234 0.009027778
## 3 0.011135857 0.0000000000 0.017699115 0.003305785 0.0000000000 0.000000000
## 4 0.008771930 0.0007256894 0.029572837 0.004354927 0.0000000000 0.000000000
## 5 0.002079002 0.0000000000 0.009059233 0.002068966 0.0006906077 0.000000000
## 6 0.022689541 0.0000000000 0.013215859 0.003289474 0.0005491488 0.001397624
##           271          272         273         274         275         276
## 1 0.000000000 0.0000000000 0.004955947 0.016958425 0.018201875 0.015401540
## 2 0.027487631 0.0063113604 0.062807209 0.069451980 0.072757112 0.072637903
## 3 0.001111111 0.0000000000 0.006084071 0.019769357 0.020453289 0.018221977
## 4 0.001649258 0.0007012623 0.010371179 0.018398268 0.018013100 0.018022938
## 5 0.000000000 0.0000000000 0.000000000 0.001371742 0.001384083 0.001381215
## 6 0.006073992 0.0000000000 0.024766098 0.026775956 0.028650138 0.026359143
##          277       278         279         280        281        282
## 1 0.06336088 0.2450331 0.010359116 0.000000000 0.03905391 0.04030922
## 2 0.15846995 0.1226725 0.146291209 0.024309392 0.02024070 0.02910489
## 3 0.09000552 0.3351801 0.016689847 0.001677852 0.03818484 0.04618809
## 4 0.04967249 0.3015873 0.020632737 0.002208724 0.04590164 0.04555434
## 5 0.00899654 0.1498959 0.006065858 0.001397624 0.01513067 0.03808864
## 6 0.09295930 0.2447514 0.052047189 0.006128134 0.05109890 0.02211166
##           283        284         285         286        287        288
## 1 0.002803083 0.14892443 0.009309967 0.009470752 0.12758997 0.15038419
## 2 0.007660167 0.12122874 0.129700272 0.123545706 0.20216802 0.12145969
## 3 0.002814919 0.14325375 0.012665198 0.012906846 0.09364732 0.14419373
## 4 0.005578801 0.07291667 0.013601741 0.014364641 0.15384615 0.07232191
## 5 0.001777778 0.05412908 0.003445899 0.004225352 0.06538197 0.05513439
## 6 0.002820874 0.13598673 0.024630542 0.023463687 0.12342982 0.13964951
##           289         290        291         292         293          294
## 1 0.003350084 0.025330396 0.19330038 0.004975124 0.004961021 0.0005479452
## 2 0.046987286 0.081554461 0.37022901 0.018052516 0.035211268 0.0059815117
## 3 0.008958567 0.024916944 0.16648291 0.006080708 0.007790368 0.0000000000
## 4 0.015486726 0.014721919 0.14790647 0.013143483 0.002105263 0.0000000000
## 5 0.002096436 0.008316008 0.07044199 0.017170330 0.002702703 0.0000000000
## 6 0.040066778 0.023089610 0.22453450 0.003863135 0.013446568 0.0021941854
##           295        296         297         298         299         300
## 1 0.009749304 0.04490690 0.048219178 0.000000000 0.026431718 0.027487631
## 2 0.057851240 0.06800871 0.071273123 0.010380623 0.134825328 0.151829601
## 3 0.015277778 0.06633772 0.067582418 0.000000000 0.044751381 0.028114664
## 4 0.042349727 0.06626833 0.067464635 0.000000000 0.074114441 0.027838428
## 5 0.017543860 0.01029513 0.008940853 0.000000000 0.008298755 0.002762431
## 6 0.045769764 0.08214677 0.087527352 0.001395673 0.134287287 0.045529347
##            301         302        303         304          305        306
## 1 0.0005503577 0.001654716 0.13656633 0.001426534 0.0007042254 0.08442982
## 2 0.0273373428 0.019726027 0.22923588 0.067844523 0.0048780488 0.06948969
## 3 0.0005534034 0.002214839 0.12723214 0.002867384 0.0000000000 0.06308283
## 4 0.0016384489 0.002186987 0.04803976 0.011971831 0.0000000000 0.19370591
## 5 0.0006925208 0.001389854 0.04225352 0.000907441 0.0000000000 0.20055325
## 6 0.0044004400 0.004412576 0.05116796 0.031960227 0.0007027407 0.06612022
##          307        308        309         310          311        312
## 1 0.10089841 0.09185919 0.06449835 0.030990592 0.0006915629 0.07688083
## 2 0.06133701 0.03393541 0.07506849 0.115384615 0.0041180508 0.10262009
## 3 0.08136300 0.09075816 0.07466814 0.023849140 0.0000000000 0.11810155
## 4 0.03693570 0.12663755 0.10278841 0.010433828 0.0000000000 0.17312944
## 5 0.09353147 0.23464458 0.03800968 0.004888268 0.0000000000 0.03181189
## 6 0.03974895 0.06652007 0.09135938 0.018805310 0.0006906077 0.27921009
##            313        314         315        316          317         318
## 1 0.0005509642 0.01758242 0.000000000 0.05320900 0.0011019284 0.002099370
## 2 0.0186199343 0.01363884 0.051602390 0.13195202 0.0141921397 0.028651293
## 3 0.0016648169 0.01817181 0.002741228 0.06219042 0.0005537099 0.004228330
## 4 0.0038314176 0.02446982 0.006504065 0.16521265 0.0021822149 0.002090592
## 5 0.0013917884 0.01381215 0.002060440 0.08977901 0.0027720028 0.000000000
## 6 0.0060472787 0.02414929 0.015821058 0.17708333 0.0016510732 0.001407460
##            319          320         321         322        323         324
## 1 0.0000000000 0.0055401662 0.006306938 0.004166667 0.19158361 0.002195390
## 2 0.0114566285 0.0591065292 0.034770515 0.009688581 0.17863014 0.048939641
## 3 0.0000000000 0.0020935101 0.008462623 0.006333568 0.13007226 0.002748763
## 4 0.0000000000 0.0034270048 0.011111111 0.002754821 0.08671789 0.007061380
## 5 0.0006901311 0.0008912656 0.007894737 0.016769638 0.05175983 0.000000000
## 6 0.0021941854 0.0048510049 0.022456140 0.003491620 0.09839690 0.023026316
##           325         326          327        328          329        330
## 1 0.004427227 0.006040637 0.0007087172 0.01276360 0.0021306818 0.07411504
## 2 0.024671053 0.038251366 0.0084925690 0.01431718 0.0259467041 0.04505495
## 3 0.008859358 0.007174393 0.0000000000 0.02056698 0.0014214641 0.07508343
## 4 0.005491488 0.007646095 0.0000000000 0.03076923 0.0007047216 0.03898957
## 5 0.004163775 0.000000000 0.0000000000 0.02996516 0.0000000000 0.04735376
## 6 0.021546961 0.023678414 0.0000000000 0.02931416 0.0007062147 0.04143646
##          331          332        333        334         335         336
## 1 0.06043956 0.0006993007 0.06446991 0.01645639 0.001651982 0.010005559
## 2 0.20787746 0.0055363322 0.04397163 0.02388708 0.012575178 0.033039648
## 3 0.09905921 0.0000000000 0.07703281 0.02417582 0.002760906 0.012256267
## 4 0.14394766 0.0000000000 0.06478873 0.01468189 0.002733734 0.001651982
## 5 0.02837370 0.0000000000 0.01805054 0.01310345 0.002084781 0.002087683
## 6 0.25453546 0.0000000000 0.13544669 0.01863014 0.004402862 0.005549390
##            337          338         339        340          341         342
## 1 0.0000000000 0.0000000000 0.006037322 0.08821918 0.0005534034 0.005530973
## 2 0.0306513410 0.0049207217 0.013646288 0.26579521 0.0274725275 0.037828947
## 3 0.0016592920 0.0000000000 0.007182320 0.07903403 0.0011098779 0.007194245
## 4 0.0060076461 0.0005461496 0.004899292 0.13010343 0.0044004400 0.008214677
## 5 0.0006901311 0.0048543689 0.002762431 0.12179931 0.0000000000 0.000000000
## 6 0.0088057237 0.0000000000 0.021965953 0.13676149 0.0077348066 0.016547159
##          343         344        345        346         347         348
## 1 0.08631116 0.007671233 0.08808864 0.05601318 0.005534034 0.000000000
## 2 0.17859093 0.061956522 0.17381738 0.22568306 0.041163557 0.006254343
## 3 0.05530973 0.008791209 0.05669817 0.03316750 0.006090808 0.000000000
## 4 0.03875546 0.009244154 0.04061471 0.03167668 0.010399562 0.000000000
## 5 0.03051318 0.002053388 0.02865129 0.01382170 0.002779708 0.000000000
## 6 0.05769231 0.024056862 0.05260244 0.03848268 0.010479868 0.000000000
##            349          350        351        352        353         354
## 1 0.0022014309 0.0000000000 0.06784335 0.06666667 0.04333516 0.000000000
## 2 0.0212302667 0.0136314068 0.04545455 0.04435926 0.03318825 0.009078212
## 3 0.0033094319 0.0000000000 0.05801105 0.05752212 0.05118327 0.000000000
## 4 0.0016348774 0.0016348774 0.08821918 0.08921730 0.08269859 0.002099370
## 5 0.0006920415 0.0006930007 0.09632710 0.09424809 0.08137931 0.000000000
## 6 0.0087623220 0.0022002200 0.04237755 0.04015402 0.04059243 0.004912281
##          355          356         357         358        359         360
## 1 0.03164908 0.0000000000 0.013311148 0.008250825 0.12186978 0.004942339
## 2 0.22589532 0.0075034106 0.059602649 0.004928806 0.16905286 0.032170120
## 3 0.03171953 0.0006925208 0.012820513 0.014933628 0.16415410 0.005509642
## 4 0.02965404 0.0006825939 0.007135016 0.033351558 0.05622933 0.007069059
## 5 0.00489853 0.0017406440 0.003486750 0.032571033 0.02029391 0.000000000
## 6 0.02990033 0.0000000000 0.010520487 0.033627343 0.09555556 0.017004937
##            361         362        363         364         365          366
## 1 0.0007137759 0.012609649 0.12698413 0.000000000 0.000000000 0.0013736264
## 2 0.0211565585 0.059142702 0.16720955 0.002828854 0.004098361 0.0047619048
## 3 0.0014316392 0.012081274 0.17161716 0.000000000 0.000000000 0.0006891799
## 4 0.0035260931 0.005971770 0.05757740 0.001405481 0.001369863 0.0000000000
## 5 0.0000000000 0.003455425 0.02136458 0.000000000 0.000000000 0.0008591065
## 6 0.0035511364 0.009304871 0.10000000 0.004264392 0.004101162 0.0027322404
##            367        368          369         370        371         372
## 1 0.0000000000 0.02279044 0.0006958942 0.010462555 0.02353585 0.003318584
## 2 0.0160502442 0.16354626 0.0075862069 0.040305011 0.16702938 0.080636314
## 3 0.0000000000 0.04296875 0.0006954103 0.020948181 0.04116356 0.007198228
## 4 0.0006958942 0.06985699 0.0006915629 0.022913257 0.07127312 0.005473454
## 5 0.0000000000 0.01603905 0.0000000000 0.005521049 0.01655172 0.002760524
## 6 0.0006978367 0.12901440 0.0020804438 0.031868132 0.13107591 0.025995575
##            373         374         375         376        377          378
## 1 0.0007183908 0.014771049 0.001100110 0.003852504 0.06622517 0.0060373216
## 2 0.0271234832 0.072058824 0.013631407 0.022002200 0.07447974 0.0588876772
## 3 0.0000000000 0.017804154 0.001100715 0.002777778 0.09579181 0.0088251517
## 4 0.0014255167 0.017467249 0.000000000 0.004945055 0.03440743 0.0098092643
## 5 0.0018281536 0.009157509 0.000000000 0.000000000 0.01176471 0.0006863418
## 6 0.0057678443 0.072485207 0.001643836 0.011608624 0.07607497 0.0252192982
##           379         380        381         382          383         384
## 1 0.022062879 0.015367728 0.02141680 0.003835616 0.0032894737 0.009330406
## 2 0.059178082 0.112017401 0.31202186 0.020663404 0.0201305767 0.069285325
## 3 0.034387133 0.026388125 0.04083885 0.002199010 0.0027457441 0.017070485
## 4 0.052660450 0.045676998 0.06601200 0.004340749 0.0027218291 0.013609145
## 5 0.006958942 0.004837595 0.01453287 0.000000000 0.0006887052 0.002074689
## 6 0.114301491 0.089413055 0.10720176 0.010946907 0.0131578947 0.025219298
##           385        386         387        388         389          390
## 1 0.021134594 0.06070640 0.019758507 0.14215418 0.034140969 0.0007163324
## 2 0.037486218 0.09857612 0.036085293 0.11997828 0.112876712 0.0128205128
## 3 0.024567281 0.08365651 0.023796348 0.14717188 0.011080332 0.0007220217
## 4 0.009911894 0.07217059 0.008738394 0.12236058 0.004934211 0.0014255167
## 5 0.007697691 0.02574809 0.006237006 0.03991741 0.012491325 0.0000000000
## 6 0.007210205 0.13939394 0.007142857 0.11857923 0.003854626 0.0021505376
##            391        392          393        394        395          396
## 1 0.0027593819 0.03400987 0.0005497526 0.09579181 0.11667584 0.0005494505
## 2 0.0148026316 0.32969432 0.0114754098 0.16473829 0.13121925 0.0076169750
## 3 0.0011092623 0.05905077 0.0005509642 0.06281267 0.12894300 0.0000000000
## 4 0.0021869874 0.08015267 0.0010934937 0.09340659 0.04860732 0.0005425936
## 5 0.0006958942 0.01998622 0.0000000000 0.06438069 0.01731302 0.0006915629
## 6 0.0093560815 0.13626374 0.0027457441 0.07534626 0.03962576 0.0000000000
##          397        398        399         400        401         402
## 1 0.04381586 0.25054705 0.01774820 0.019294377 0.02681992 0.001095890
## 2 0.06662996 0.05603917 0.17614578 0.178336980 0.03259098 0.038105607
## 3 0.04454343 0.26707048 0.03279600 0.033795014 0.02469813 0.001651982
## 4 0.04617922 0.15778020 0.03474903 0.033935413 0.02440347 0.002718869
## 5 0.01535241 0.16815989 0.00983837 0.009762901 0.03642612 0.004137931
## 6 0.08582503 0.19178082 0.06032097 0.057692308 0.02292576 0.005476451
##           403         404         405        406         407        408
## 1 0.001099505 0.011001100 0.011660189 0.03357182 0.007662835 0.01371366
## 2 0.039301310 0.010922993 0.012686156 0.02295082 0.083695652 0.06273868
## 3 0.001656543 0.011576626 0.012263099 0.03421634 0.020285088 0.02316602
## 4 0.002727769 0.003818876 0.003302146 0.01965066 0.039566396 0.03816794
## 5 0.004158004 0.003465003 0.003484321 0.01726519 0.006177076 0.00621118
## 6 0.006043956 0.016456391 0.019444444 0.01704233 0.122270742 0.06040637
##            409        410        411         412          413        414
## 1 0.0011013216 0.03252481 0.10641799 0.001651982 0.0011055832 0.03359031
## 2 0.0131219245 0.02297593 0.07516340 0.015855659 0.0049342105 0.01643836
## 3 0.0005534034 0.03377630 0.05646930 0.001106195 0.0005555556 0.05709534
## 4 0.0010922993 0.01858939 0.03262643 0.001092896 0.0005485464 0.01368363
## 5 0.0000000000 0.01803051 0.02060440 0.002060440 0.0000000000 0.02912621
## 6 0.0021978022 0.01872247 0.04545455 0.002202643 0.0000000000 0.06346578
##          415          416         417        418         419         420
## 1 0.07757405 0.0011123471 0.000000000 0.05007364 0.027731559 0.013888889
## 2 0.05617978 0.0501377410 0.026129559 0.02023121 0.064462810 0.088754135
## 3 0.11412268 0.0033444816 0.001103144 0.06537890 0.041248606 0.022777778
## 4 0.18627451 0.0049586777 0.001089325 0.06578947 0.062122045 0.045079714
## 5 0.05882353 0.0006939625 0.000000000 0.06148282 0.007660167 0.007724719
## 6 0.10608204 0.0188574598 0.004378763 0.05726872 0.076454294 0.079312257
##          421         422         423          424        425         426
## 1 0.01433297 0.018201875 0.009265859 0.0000000000 0.11368653 0.002115656
## 2 0.01586433 0.009315068 0.046511628 0.0219546742 0.29589041 0.004184100
## 3 0.01495845 0.019922524 0.015647226 0.0000000000 0.11129568 0.002127660
## 4 0.02735230 0.030021834 0.009838370 0.0000000000 0.14887794 0.002799160
## 5 0.01940402 0.015883978 0.003606853 0.0009033424 0.03203343 0.003527337
## 6 0.02913689 0.047487576 0.017793594 0.0028368794 0.32341598 0.002115656
##            427         428        429         430         431         432
## 1 0.0000000000 0.005997819 0.03503010 0.025041736 0.029041096 0.009444444
## 2 0.0083275503 0.013008130 0.03367735 0.028145695 0.083832335 0.014892443
## 3 0.0000000000 0.011513158 0.05202629 0.032348020 0.047461369 0.011111111
## 4 0.0006939625 0.050270270 0.04991861 0.024202420 0.075163399 0.017630854
## 5 0.0000000000 0.031550069 0.01579670 0.004901961 0.008971705 0.012526096
## 6 0.0000000000 0.060490463 0.07717570 0.067590028 0.157346491 0.014404432
##          433        434         435        436        437        438        439
## 1 0.02949360 0.09080842 0.016456391 0.03028634 0.23629386 0.03042035 0.14482379
## 2 0.04517906 0.12046205 0.098637602 0.01146288 0.13071895 0.01206802 0.08702791
## 3 0.03285078 0.09888889 0.015986770 0.04470199 0.15719801 0.04500000 0.15329275
## 4 0.03742433 0.21569704 0.012002182 0.03266195 0.14029364 0.03347969 0.11220580
## 5 0.03140265 0.10724234 0.006224066 0.01521438 0.11249137 0.01456311 0.04293629
## 6 0.03664631 0.16335541 0.019220209 0.05057724 0.07232877 0.05088496 0.13304013
##            440        441        442        443       444        445        446
## 1 0.0007097232 0.01973684 0.04331140 0.04607789 0.1973612 0.01211454 0.09719934
## 2 0.0056377731 0.02664492 0.07792916 0.02011963 0.2146368 0.08246860 0.04959128
## 3 0.0007127584 0.03638368 0.04065934 0.05998899 0.2421401 0.01382743 0.12017641
## 4 0.0000000000 0.04836957 0.03588907 0.05223069 0.1509537 0.03061782 0.07123437
## 5 0.0000000000 0.01925722 0.01309442 0.01028807 0.1988912 0.01251739 0.02617080
## 6 0.0000000000 0.05701754 0.04107338 0.08008777 0.1441144 0.02866593 0.08891328
##           447         448          449        450        451        452
## 1 0.002197802 0.002203857 0.0011025358 0.06068890 0.08004447 0.16933260
## 2 0.044189853 0.032240437 0.0460021906 0.20585049 0.20176893 0.10557987
## 3 0.004947774 0.001106807 0.0038631347 0.09688013 0.11682504 0.18580144
## 4 0.001086957 0.001634877 0.0021822149 0.15455531 0.15995587 0.17923497
## 5 0.001381215 0.002077562 0.0006882312 0.02129121 0.02097902 0.05963939
## 6 0.008228195 0.007150715 0.0104338276 0.20699072 0.20944444 0.20518478
##          453        454        455        456         457         458
## 1 0.08550805 0.09100938 0.05797902 0.08774834 0.010462555 0.076700434
## 2 0.17880795 0.01640241 0.16111415 0.34153005 0.032240437 0.121167883
## 3 0.07688022 0.09457965 0.06806862 0.09442297 0.009392265 0.046989721
## 4 0.03028634 0.11796832 0.07435757 0.13537118 0.008174387 0.018950437
## 5 0.01885475 0.08695652 0.01517241 0.02491349 0.003469813 0.007155635
## 6 0.05869324 0.09725275 0.15456546 0.27367841 0.021475771 0.023494860
##          459        460        461        462        463        464        465
## 1 0.05406912 0.04835165 0.05228398 0.06152993 0.06274292 0.02264685 0.16971714
## 2 0.11683278 0.17775354 0.18712493 0.05646930 0.05469613 0.10764873 0.16061606
## 3 0.09080717 0.05068871 0.05176211 0.07966574 0.07928532 0.02771855 0.12208657
## 4 0.19258439 0.08174387 0.08346972 0.06432106 0.06508549 0.01199718 0.03577325
## 5 0.02096436 0.01245675 0.01315789 0.02580195 0.02642559 0.01861702 0.09223301
## 6 0.22456921 0.13322368 0.13681319 0.04099723 0.03997779 0.01703336 0.04875346
##          466         467         468         469         470        471
## 1 0.14225352 0.009810792 0.016939891 0.000000000 0.010928962 0.04600219
## 2 0.09476662 0.044692737 0.124321390 0.005455537 0.008156607 0.03747963
## 3 0.08477011 0.014820042 0.013706140 0.000000000 0.011557512 0.05390539
## 4 0.05782793 0.023693380 0.012472885 0.000000000 0.006507592 0.08147746
## 5 0.11858407 0.015831135 0.001368925 0.000000000 0.003441156 0.03586207
## 6 0.02127660 0.013258897 0.020174482 0.002194185 0.010917031 0.04814004
##          472         473        474        475        476        477
## 1 0.02726767 0.003521127 0.06681391 0.06135987 0.06806862 0.12061404
## 2 0.02267699 0.041143654 0.11196487 0.11386139 0.09290819 0.17292007
## 3 0.02901786 0.004249292 0.08033241 0.07675195 0.10450250 0.13461538
## 4 0.03528115 0.011813760 0.06692265 0.06747120 0.09868421 0.09880565
## 5 0.01530967 0.007067138 0.05080028 0.04940849 0.02789400 0.06073154
## 6 0.04997224 0.011299435 0.04743519 0.04746137 0.13440265 0.08419902
##            478         479          480        481          482        483
## 1 0.0005506608 0.002189381 0.0060874377 0.04352557 0.0055401662 0.04182719
## 2 0.0114628821 0.078804348 0.0406593407 0.09832523 0.0417582418 0.16912166
## 3 0.0011049724 0.006589786 0.0072182121 0.03551913 0.0061179088 0.07123136
## 4 0.0032768979 0.009766685 0.0049342105 0.07235421 0.0060273973 0.09661572
## 5 0.0020703934 0.002057613 0.0006973501 0.05324232 0.0006915629 0.03603604
## 6 0.0038546256 0.024056862 0.0165471594 0.05764002 0.0182623132 0.21763085
##           484         485         486        487         488        489
## 1 0.047356828 0.007700770 0.007667032 0.24220886 0.023849140 0.03152655
## 2 0.106301370 0.074822501 0.073449402 0.11624117 0.009879254 0.12768299
## 3 0.048780488 0.011037528 0.010989011 0.17527473 0.037222222 0.04679666
## 4 0.067213115 0.009264305 0.009787928 0.13882863 0.086121777 0.15769231
## 5 0.009009009 0.002063274 0.002749141 0.16655196 0.030598053 0.07885555
## 6 0.104510451 0.023026316 0.023001095 0.05076419 0.060354374 0.19778393
##          490        491        492         493        494         495
## 1 0.07142857 0.13236930 0.09762824 0.015546918 0.16785911 0.004393191
## 2 0.04907306 0.06696182 0.21886999 0.046754675 0.09781421 0.084921067
## 3 0.10546659 0.07988827 0.14127424 0.017847183 0.13591160 0.007713499
## 4 0.11364872 0.06408840 0.10690789 0.018701870 0.05834242 0.017934783
## 5 0.02841303 0.08887334 0.04085873 0.002808989 0.03531856 0.000000000
## 6 0.11848601 0.01505017 0.16914601 0.021690768 0.05772402 0.057597367
##          496         497        498        499         500        501
## 1 0.04168952 0.018743109 0.20785597 0.06881486 0.005008347 0.21052632
## 2 0.07801418 0.006014215 0.20379404 0.08184282 0.011049724 0.20184984
## 3 0.06828194 0.026548673 0.22070099 0.10733844 0.002794857 0.22167217
## 4 0.08650707 0.017429194 0.08761493 0.16729832 0.004415011 0.08655416
## 5 0.01100413 0.053287197 0.12732278 0.05906593 0.004198740 0.13148789
## 6 0.10312671 0.011544805 0.06702997 0.16366612 0.005011136 0.06743421
##          502         503        504        505        506         507
## 1 0.21290678 0.016877637 0.18056322 0.09779006 0.06791828 0.005485464
## 2 0.21061269 0.110955056 0.09489852 0.05908096 0.06140351 0.043169399
## 3 0.22228381 0.025280899 0.10654828 0.14206437 0.07202216 0.008291874
## 4 0.09672131 0.039270687 0.13081554 0.15927750 0.08911974 0.015812432
## 5 0.12724758 0.005145798 0.31824513 0.13888889 0.05890506 0.013831259
## 6 0.06902264 0.043723554 0.11147903 0.12231405 0.05408389 0.015925316
##           508        509        510        511        512        513       514
## 1 0.025683060 0.05338470 0.10825588 0.07832322 0.10779436 0.09723757 0.1155973
## 2 0.111895709 0.08493151 0.01957586 0.06492089 0.02028509 0.18739726 0.1165476
## 3 0.024175824 0.07154742 0.13743815 0.11829740 0.13518006 0.13033833 0.1338889
## 4 0.032030402 0.07381083 0.16576381 0.30475150 0.16538883 0.15287671 0.2306425
## 5 0.006206897 0.01178918 0.05802048 0.12240664 0.05894591 0.04375000 0.1375000
## 6 0.061235648 0.10743802 0.06400438 0.21518987 0.06460519 0.21334068 0.1705426
##         515        516        517        518         519        520        521
## 1 0.3469163 0.01758242 0.07635830 0.06439185 0.096385542 0.25730028 0.10445300
## 2 0.1143952 0.08605664 0.06588580 0.08246860 0.036512262 0.11700383 0.07255865
## 3 0.4615810 0.04452996 0.03524229 0.07913669 0.052805281 0.17331118 0.13701657
## 4 0.1538041 0.10391730 0.01010101 0.04428650 0.035345296 0.15191257 0.07415485
## 5 0.1352288 0.01304049 0.03903346 0.04155125 0.057201930 0.27727589 0.01581843
## 6 0.2075991 0.22958904 0.01313869 0.03357182 0.008767123 0.08250825 0.12678375
##          522        523        524         525        526         527
## 1 0.26533997 0.13281682 0.14745011 0.034710744 0.21551247 0.004424779
## 2 0.08292147 0.22447859 0.30286344 0.088476242 0.16538037 0.031250000
## 3 0.30044346 0.07995558 0.21702838 0.017689331 0.23119777 0.009983361
## 4 0.10818232 0.16355653 0.11251372 0.006550218 0.09785596 0.022453450
## 5 0.04100069 0.09556787 0.03417015 0.010423905 0.13626834 0.006949270
## 6 0.10755654 0.19779006 0.13732004 0.003298516 0.11184939 0.063395810
##          528        529        530        531        532        533        534
## 1 0.04059243 0.09571508 0.19268559 0.12306843 0.19337017 0.12349067 0.12969095
## 2 0.03322440 0.10526316 0.19272530 0.16201423 0.19241341 0.06492089 0.04331140
## 3 0.05347299 0.12073784 0.27442371 0.17865044 0.27404113 0.10451045 0.09761509
## 4 0.14106754 0.04362231 0.23685637 0.12924425 0.23642348 0.16032609 0.10546448
## 5 0.02142364 0.02112676 0.07211538 0.02011096 0.07256393 0.24311295 0.27750865
## 6 0.10159253 0.03234802 0.27103825 0.21247241 0.27031509 0.04340659 0.04465270
##         535        536        537        538        539        540       541
## 1 0.1934066 0.09655938 0.10148924 0.26433643 0.09215579 0.12705367 0.2086093
## 2 0.2716320 0.13618891 0.11391019 0.12438892 0.16675749 0.11364872 0.1089814
## 3 0.1250000 0.11735261 0.11055832 0.24835526 0.12403528 0.16675839 0.2846877
## 4 0.1772083 0.14623419 0.16803503 0.16983180 0.19356598 0.08138904 0.1742217
## 5 0.1472011 0.03275261 0.07847222 0.06168609 0.03314917 0.02064694 0.1089655
## 6 0.1453744 0.17044826 0.16639118 0.14246725 0.27796053 0.12869660 0.2353914
##          542         543        544        545        546        547        548
## 1 0.06828194 0.034387133 0.15881709 0.12896175 0.07802198 0.06094183 0.16022099
## 2 0.13832695 0.095604396 0.07947741 0.17362995 0.07084469 0.04028698 0.13713659
## 3 0.10840708 0.036687048 0.09350935 0.07785088 0.08112583 0.06291759 0.22535991
## 4 0.15783725 0.042400881 0.07967480 0.04234528 0.04362050 0.03138767 0.11542670
## 5 0.02008310 0.004854369 0.23747426 0.04545455 0.09889350 0.01608392 0.05153203
## 6 0.22912088 0.047645429 0.05194095 0.08074195 0.09429825 0.02933038 0.16105902
##          549        550        551        552        553        554        555
## 1 0.20883978 0.08467072 0.13189845 0.08497807 0.03302146 0.03336980 0.06122449
## 2 0.16639299 0.15835616 0.15270936 0.15640327 0.04438356 0.04298150 0.13432018
## 3 0.25665188 0.09706045 0.11911357 0.09515952 0.03530061 0.03581267 0.09706045
## 4 0.16757941 0.03073546 0.09682713 0.03313417 0.07603939 0.07721588 0.14833060
## 5 0.06592644 0.01109570 0.06471816 0.01032347 0.01178101 0.01103448 0.03254848
## 6 0.25234160 0.06456954 0.08875413 0.06739726 0.07331863 0.07119387 0.17760618
##          556         557         558         559         560        561
## 1 0.13666301 0.001103144 0.010958904 0.009413068 0.000000000 0.02260198
## 2 0.02885139 0.002186987 0.153094463 0.144426139 0.016129032 0.01259584
## 3 0.14553473 0.003320421 0.005500550 0.017777778 0.000000000 0.02323009
## 4 0.11050626 0.013668671 0.009777295 0.030769231 0.004871260 0.03549973
## 5 0.19419489 0.011797363 0.002074689 0.002785515 0.004436557 0.02698962
## 6 0.04000000 0.007162534 0.020218579 0.071349558 0.007741027 0.02420242
##          562        563        564         565        566        567
## 1 0.02302632 0.06961326 0.04883463 0.008438819 0.05937328 0.05983380
## 2 0.01251360 0.18859649 0.09922822 0.020920502 0.46946565 0.13671444
## 3 0.02365237 0.11308204 0.06295265 0.013352073 0.09487038 0.06733445
## 4 0.03422053 0.28070175 0.19262521 0.014644351 0.12050164 0.08640616
## 5 0.02611684 0.06963788 0.14245810 0.015971606 0.01786942 0.04033380
## 6 0.02185792 0.35891773 0.10171365 0.015482055 0.20977485 0.13130194
##            568         569         570         571        572        573
## 1 0.0000000000 0.000000000 0.000000000 0.022866704 0.02835333 0.02907296
## 2 0.0028169014 0.003556188 0.002074689 0.121951220 0.06063887 0.02625821
## 3 0.0000000000 0.000000000 0.000000000 0.023529412 0.03831418 0.03484513
## 4 0.0000000000 0.000000000 0.000000000 0.037078030 0.06327745 0.13810044
## 5 0.0008952551 0.001831502 0.001748252 0.008450704 0.04030055 0.14809689
## 6 0.0000000000 0.000000000 0.000000000 0.097601785 0.06372549 0.05953693
##           574         575         576         577        578       579
## 1 0.000000000 0.001665741 0.012893983 0.001655629 0.04305177 0.2113687
## 2 0.017032967 0.058370044 0.004291845 0.059016393 0.09745533 0.1767926
## 3 0.000000000 0.002788622 0.010115607 0.003867403 0.05361050 0.1851441
## 4 0.001096491 0.003854626 0.002877698 0.003284072 0.05729730 0.3959474
## 5 0.002084781 0.000000000 0.014466546 0.000000000 0.01647220 0.2147838
## 6 0.002765487 0.016666667 0.031791908 0.018191841 0.07403375 0.2022039
##          580        581         582        583        584        585        586
## 1 0.11700383 0.31692478 0.003453039 0.17634052 0.17475193 0.02802198 0.08213892
## 2 0.15062534 0.19505495 0.006830601 0.14591333 0.14340449 0.07037643 0.04273973
## 3 0.13439386 0.21705426 0.005547850 0.22364040 0.22277501 0.03752759 0.11993337
## 4 0.15518177 0.09170785 0.001369863 0.09215579 0.09006550 0.08283379 0.16081229
## 5 0.03701165 0.16342142 0.006092254 0.04996530 0.05085911 0.07438017 0.13948647
## 6 0.21631982 0.15422886 0.002747253 0.14917127 0.14615385 0.09125893 0.13031474
##          587        588        589        590         591        592        593
## 1 0.08283050 0.01125968 0.02200220 0.14243759 0.005847953 0.17098731 0.19616438
## 2 0.04359673 0.03786816 0.04426230 0.03953148 0.046309696 0.07662835 0.07181719
## 3 0.12092766 0.01480959 0.03143960 0.09523810 0.008759124 0.15099558 0.25852585
## 4 0.16412214 0.01256983 0.01853871 0.08575581 0.010189229 0.30870279 0.43641304
## 5 0.14226232 0.02380952 0.04689655 0.15135135 0.007259528 0.21591696 0.29484536
## 6 0.13285949 0.01060071 0.01640241 0.04258443 0.014534884 0.13876652 0.18719212
##          594        595        596        597         598        599        600
## 1 0.00139470 0.12770683 0.13734142 0.12788145 0.001104972 0.14165733 0.07260726
## 2 0.02437326 0.03748622 0.16511755 0.19112815 0.009868421 0.20900500 0.04369197
## 3 0.00280112 0.16266070 0.17679558 0.08618785 0.001662971 0.18605958 0.07628524
## 4 0.05609418 0.15104741 0.05622271 0.10060142 0.003287671 0.06892718 0.08633880
## 5 0.03475936 0.23467967 0.10934256 0.02271163 0.001387925 0.06755806 0.03223594
## 6 0.05567154 0.14674819 0.13084112 0.19790518 0.004969630 0.10067114 0.13454146
##           601         602         603         604        605        606
## 1 0.004422333 0.004402862 0.003536068 0.002139800 0.01709873 0.03397260
## 2 0.016429354 0.018579235 0.020322355 0.024630542 0.04153005 0.03099511
## 3 0.004986150 0.004966887 0.004255319 0.003594536 0.02651934 0.02528862
## 4 0.010393873 0.008738394 0.002118644 0.009915014 0.01588171 0.01794454
## 5 0.006963788 0.006232687 0.001771479 0.008130081 0.04350829 0.03443526
## 6 0.006073992 0.006060606 0.001416431 0.014967926 0.02253986 0.02949208
##          607       608         609        610        611        612        613
## 1 0.02043070 0.3076075 0.012147985 0.01822198 0.13249038 0.12910284 0.13060109
## 2 0.04717499 0.1329322 0.028540066 0.04448105 0.16266376 0.06800871 0.05914270
## 3 0.02722222 0.4034311 0.017738359 0.02608213 0.18557269 0.11641955 0.18605928
## 4 0.09983544 0.2133479 0.007667032 0.01646542 0.09036473 0.07425474 0.09095831
## 5 0.07409972 0.1398892 0.026261230 0.04166667 0.10082873 0.17125172 0.13674033
## 6 0.06253459 0.2665198 0.011043622 0.02266446 0.11287671 0.04597701 0.15258856
##          614         615          616        617        618         619
## 1 0.06744868 0.003486750 0.0000000000 0.12714207 0.29911210 0.000000000
## 2 0.03918723 0.006211180 0.0083391244 0.06732348 0.12383049 0.023691460
## 3 0.12740741 0.006284916 0.0000000000 0.11387507 0.39052925 0.001114206
## 4 0.12973761 0.023464458 0.0020979021 0.07510965 0.22979659 0.006057269
## 5 0.10439560 0.015859031 0.0008795075 0.17210271 0.08768267 0.001396648
## 6 0.11773256 0.013937282 0.0014064698 0.04527885 0.27232390 0.013311148
##          620        621        622        623          624        625
## 1 0.29817780 0.03467254 0.14058210 0.10142544 0.0000000000 0.09015487
## 2 0.12239297 0.15700219 0.19553377 0.27010870 0.0310515173 0.05090312
## 3 0.39000000 0.06032097 0.08324146 0.05503577 0.0007142857 0.13108407
## 4 0.22782037 0.16295492 0.03159041 0.02340773 0.0049435028 0.09912377
## 5 0.08541667 0.04507628 0.03308063 0.02542955 0.0000000000 0.12874043
## 6 0.27477974 0.21067107 0.02138158 0.01421542 0.0063965885 0.13373693
##           626       627        628       629       630       631       632
## 1 0.007756233 0.3598234 0.04809287 0.3658129 0.2803279 0.1457424 0.1456628
## 2 0.147252747 0.1650219 0.16199890 0.1669431 0.1665762 0.1176790 0.1173913
## 3 0.010005559 0.4625624 0.05546312 0.4642058 0.3684789 0.2064800 0.2064800
## 4 0.015393073 0.1823658 0.06140351 0.1851852 0.1649485 0.0915493 0.0899729
## 5 0.000000000 0.1245651 0.01724138 0.1250000 0.0997249 0.1431521 0.1411846
## 6 0.039800995 0.2253444 0.09005525 0.2314969 0.2224044 0.1533006 0.1509847
##            633        634         635        636        637        638
## 1 0.0014114326 0.14082504 0.001643836 0.10295176 0.11412268 0.07502738
## 2 0.0112123336 0.05524862 0.005443658 0.16142857 0.06002825 0.21576087
## 3 0.0007087172 0.08707865 0.001100110 0.12889211 0.11453114 0.11606161
## 4 0.0014035088 0.04861111 0.001629549 0.04500000 0.09097370 0.17525773
## 5 0.0008936550 0.11209964 0.003460208 0.03870387 0.16636364 0.02758621
## 6 0.0028188865 0.01131542 0.001096491 0.05319914 0.08428571 0.24303659
##           639         640        641         642         643          644
## 1 0.030269675 0.079889807 0.06142778 0.005537099 0.001644737 0.0006954103
## 2 0.097320940 0.002186987 0.07291667 0.001097695 0.004357298 0.0145328720
## 3 0.037569061 0.070835639 0.09539656 0.008869180 0.002754821 0.0006978367
## 4 0.030584380 0.022416621 0.06849315 0.023013699 0.005440696 0.0013793103
## 5 0.007654836 0.120689655 0.05062413 0.053287197 0.004135079 0.0000000000
## 6 0.057237204 0.042284459 0.11900826 0.014892443 0.000000000 0.0034843206
##            645        646        647         648        649         650
## 1 0.0011154490 0.10607735 0.03514552 0.004434590 0.01596916 0.004928806
## 2 0.0127635960 0.04994512 0.07213115 0.041758242 0.04800873 0.039695487
## 3 0.0005602241 0.06873614 0.05791506 0.007230256 0.02649007 0.007154651
## 4 0.0000000000 0.04004388 0.02340773 0.014867841 0.02292576 0.014099783
## 5 0.0020964361 0.06999307 0.01522491 0.000000000 0.03170227 0.000000000
## 6 0.0005567929 0.08314978 0.06479956 0.017718715 0.01483516 0.016967707
##          651         652         653        654         655         656
## 1 0.03362734 0.002733734 0.002822865 0.10703364 0.003302146 0.000000000
## 2 0.07283680 0.062940857 0.008391608 0.33834586 0.062910284 0.004861111
## 3 0.05598670 0.007131103 0.004264392 0.11297710 0.008830022 0.000000000
## 4 0.02408320 0.018448182 0.009142053 0.14692654 0.019639935 0.002074689
## 5 0.01525659 0.014393420 0.004484305 0.01727447 0.015193370 0.006156552
## 6 0.06435644 0.023484435 0.011371713 0.34298780 0.023652365 0.000000000
##          657        658        659          660          661          662
## 1 0.10337203 0.31002203 0.11661166 0.0005503577 0.0005509642 0.0497512438
## 2 0.09350935 0.29672131 0.08069793 0.0043525571 0.0032804811 0.0005485464
## 3 0.10931400 0.40364440 0.16390728 0.0011025358 0.0011049724 0.0537396122
## 4 0.02860286 0.11395856 0.12609170 0.0027247956 0.0016393443 0.0340472268
## 5 0.04027778 0.11211073 0.02700831 0.0041465100 0.0041782730 0.0917303683
## 6 0.03149171 0.09845985 0.16446645 0.0032912781 0.0027487631 0.0370165746
##            663        664          665         666         667         668
## 1 0.0006954103 0.04267425 0.0000000000 0.000000000 0.002103787 0.002120141
## 2 0.0013850416 0.13875263 0.0090027701 0.007812500 0.001383126 0.001397624
## 3 0.0020876827 0.05003524 0.0000000000 0.000000000 0.002783577 0.002826855
## 4 0.0055172414 0.04140351 0.0000000000 0.002123142 0.001382170 0.002093510
## 5 0.0114335972 0.01257862 0.0000000000 0.001803427 0.002643172 0.002652520
## 6 0.0132588974 0.08879493 0.0006920415 0.006373938 0.001387925 0.001407460
##           669          670         671        672         673         674
## 1 0.000000000 0.0013947001 0.000000000 0.01267218 0.001637555 0.017495899
## 2 0.006882312 0.0048644892 0.002851033 0.03778751 0.005414185 0.023913043
## 3 0.000000000 0.0007022472 0.000000000 0.01216142 0.001096491 0.020925110
## 4 0.002056203 0.0041436464 0.000000000 0.01256144 0.007571660 0.036896365
## 5 0.000877193 0.0017574692 0.002757353 0.01249133 0.003429355 0.005486968
## 6 0.006232687 0.0006988120 0.000000000 0.01762115 0.007072905 0.060142154
##           675       676         677        678        679          680
## 1 0.017717931 0.2360350 0.004961411 0.10110497 0.06419480 0.0021941854
## 2 0.061181435 0.1995662 0.059080963 0.15030170 0.13260274 0.0217627856
## 3 0.029223093 0.2635914 0.011627907 0.11784325 0.07491676 0.0016501650
## 4 0.014154282 0.1544318 0.022938285 0.15131579 0.03778751 0.0021750952
## 5 0.009883199 0.1238816 0.011764706 0.06675939 0.03254848 0.0006872852
## 6 0.050569801 0.1601968 0.031335899 0.24724062 0.07237569 0.0109589041
##           681          682         683         684        685          686
## 1 0.004427227 0.0005518764 0.001435750 0.001646542 0.12575508 0.0005488474
## 2 0.058888277 0.0142309797 0.012048193 0.021810251 0.09145346 0.0027218291
## 3 0.012235818 0.0011055832 0.002153625 0.004955947 0.13270925 0.0005506608
## 4 0.021953897 0.0038188762 0.005693950 0.015786609 0.06474429 0.0000000000
## 5 0.011871508 0.0048375950 0.006294964 0.010366275 0.08096886 0.0000000000
## 6 0.032614704 0.0011013216 0.012134190 0.018671060 0.10526316 0.0005488474
##           687        688         689        690        691         692
## 1 0.020487265 0.09160724 0.007032349 0.01552106 0.10088203 0.001651073
## 2 0.004398021 0.04243743 0.011929825 0.03579295 0.27945205 0.008757526
## 3 0.020011117 0.12665198 0.009852217 0.01838440 0.08019912 0.003316750
## 4 0.010976948 0.13104948 0.043871866 0.01815182 0.06666667 0.005470460
## 5 0.009769714 0.04258242 0.075704225 0.01180556 0.04152249 0.006906077
## 6 0.009961262 0.18705431 0.042657343 0.01107420 0.09641873 0.008796042
##          693         694        695         696        697        698
## 1 0.01603982 0.004945055 0.12807882 0.002087683 0.08226793 0.05353201
## 2 0.03956044 0.027777778 0.02060738 0.002076125 0.02208724 0.09836066
## 3 0.01890990 0.006615215 0.11916529 0.004210526 0.11135857 0.07982262
## 4 0.01923077 0.012520414 0.06890939 0.018055556 0.15198238 0.20894708
## 5 0.01115760 0.004824259 0.19021365 0.027336861 0.05505226 0.09065744
## 6 0.01165372 0.017534247 0.03335156 0.010445682 0.15235457 0.25785124
##          699        700        701         702         703         704
## 1 0.07700831 0.08195820 0.07845304 0.011647255 0.039106145 0.039133473
## 2 0.02583837 0.03224044 0.02519168 0.041276830 0.006206897 0.006237006
## 3 0.10789766 0.11319713 0.10537992 0.021702838 0.036933798 0.037245257
## 4 0.14692982 0.19564033 0.14857456 0.062671798 0.004824259 0.004874652
## 5 0.04586518 0.11703601 0.04586518 0.006289308 0.052038161 0.052447552
## 6 0.16519337 0.19243421 0.16162727 0.085920177 0.009735744 0.009762901
##          705         706          707          708        709          710
## 1 0.03109656 0.002785515 0.0000000000 0.0048644892 0.04759270 0.0056377731
## 2 0.04446855 0.008298755 0.0070372977 0.0006839945 0.10043908 0.0007032349
## 3 0.02300110 0.002803083 0.0000000000 0.0069881202 0.07615342 0.0085106383
## 4 0.06442880 0.001386963 0.0000000000 0.0103448276 0.13406593 0.0104384134
## 5 0.07305307 0.005258545 0.0017761989 0.0295652174 0.05237430 0.0291777188
## 6 0.05891980 0.001399580 0.0007072136 0.0166551006 0.19545958 0.0168657765
##            711        712        713        714         715          716
## 1 0.0000000000 0.02036324 0.09801762 0.01660210 0.001103144 0.0007037298
## 2 0.0057061341 0.03768433 0.22727273 0.06787083 0.014778325 0.0097629010
## 3 0.0000000000 0.02596685 0.05530973 0.01552967 0.001662971 0.0007077141
## 4 0.0000000000 0.03440743 0.13395298 0.04221491 0.001093494 0.0000000000
## 5 0.0018018018 0.02835408 0.08996540 0.04450626 0.001390821 0.0026619343
## 6 0.0007158196 0.08425110 0.17142857 0.09607951 0.002206288 0.0007072136
##           717         718         719         720         721         722
## 1 0.001095290 0.002205072 0.002177463 0.046703297 0.003346347 0.023140496
## 2 0.003259098 0.003833516 0.003243243 0.007629428 0.004424779 0.004373975
## 3 0.002195390 0.001662050 0.001640241 0.049532196 0.002239642 0.034254144
## 4 0.028184282 0.003823048 0.003241491 0.073529412 0.004980631 0.035499727
## 5 0.028236915 0.003467406 0.003431709 0.130103806 0.004213483 0.059187887
## 6 0.018062397 0.006063947 0.005440696 0.056438356 0.012827663 0.023102310
##          723        724        725        726         727         728
## 1 0.09186497 0.05788313 0.11889597 0.19845645 0.002849003 0.012107870
## 2 0.05396476 0.04806117 0.04574243 0.09434997 0.064039409 0.075027382
## 3 0.07405345 0.09850581 0.12847966 0.12334071 0.002117149 0.023255814
## 4 0.07472527 0.10049153 0.07087719 0.05695509 0.004234298 0.024017467
## 5 0.17618384 0.05882353 0.19874101 0.10562891 0.003555556 0.007591442
## 6 0.05703212 0.15659341 0.11016949 0.03799559 0.010653409 0.024215740
##          729         730         731         732        733        734
## 1 0.16876712 0.014262205 0.001430615 0.002765487 0.16971714 0.08094714
## 2 0.29885683 0.075625680 0.019067797 0.012074643 0.23584387 0.11323851
## 3 0.13901099 0.023691460 0.001429593 0.003882418 0.15200445 0.12908587
## 4 0.02610114 0.025027203 0.005661713 0.009857612 0.11544805 0.08192245
## 5 0.04679972 0.006177076 0.002705140 0.009027778 0.09179416 0.03678001
## 6 0.03665208 0.024657534 0.007112376 0.013812155 0.11221669 0.12892562
##            735         736        737         738        739          740
## 1 0.0038631347 0.004986150 0.08000000 0.005580357 0.05283648 0.0000000000
## 2 0.0388615216 0.051648352 0.11406844 0.069137168 0.01542700 0.0041350793
## 3 0.0033185841 0.005002779 0.12959912 0.007262570 0.06867672 0.0000000000
## 4 0.0059945504 0.008223684 0.07871878 0.018743109 0.03577325 0.0006868132
## 5 0.0006930007 0.003467406 0.03490760 0.004895105 0.11406578 0.0034965035
## 6 0.0104683196 0.017718715 0.12957900 0.042292710 0.04386452 0.0000000000
##           741         742          743         744         745          746
## 1 0.056986301 0.055741360 0.0000000000 0.031649084 0.000000000 0.0000000000
## 2 0.146579805 0.150776053 0.0112123336 0.094818082 0.052891396 0.0116999312
## 3 0.067656766 0.064913262 0.0007137759 0.060133630 0.000000000 0.0006944444
## 4 0.076045627 0.076923077 0.0028109628 0.091859186 0.005617978 0.0027453672
## 5 0.008965517 0.009097271 0.0017857143 0.008356546 0.000000000 0.0017391304
## 6 0.197382770 0.196002221 0.0105857445 0.238888889 0.010653409 0.0103734440
##           747         748        749         750         751        752
## 1 0.001654716 0.009350935 0.07538803 0.006915629 0.000000000 0.03818484
## 2 0.018052516 0.020240700 0.08131868 0.029593944 0.005505850 0.12403952
## 3 0.003871681 0.009356081 0.03833333 0.009749304 0.000000000 0.05604883
## 4 0.002735230 0.019629226 0.07688083 0.005505850 0.000000000 0.07021393
## 5 0.000000000 0.009015257 0.10160056 0.022827041 0.000000000 0.02090592
## 6 0.006607930 0.030803080 0.06921373 0.006219765 0.001389854 0.08453039
##           753         754         755          756          757         758
## 1 0.000000000 0.061157025 0.062534588 0.0000000000 0.0038652678 0.001100715
## 2 0.006206897 0.223741794 0.221365639 0.0027382256 0.0136761488 0.000000000
## 3 0.000000000 0.066334992 0.065108514 0.0005534034 0.0044370494 0.002219756
## 4 0.001374570 0.073224044 0.073666850 0.0021893815 0.0038335159 0.012048193
## 5 0.005263158 0.008356546 0.008350731 0.0000000000 0.0006954103 0.020804438
## 6 0.002777778 0.212672176 0.210643016 0.0011007155 0.0077177508 0.004961411
##          759        760         761         762        763        764
## 1 0.08769994 0.04848485 0.000000000 0.003561254 0.01490066 0.07289928
## 2 0.05464481 0.11111111 0.007633588 0.055084746 0.01423877 0.02537231
## 3 0.13171002 0.08093126 0.001101928 0.006423983 0.01438053 0.06844741
## 4 0.12862616 0.25465498 0.006525285 0.008426966 0.01420765 0.02585259
## 5 0.03331020 0.10242215 0.009595613 0.006156552 0.02018093 0.10793872
## 6 0.13781698 0.31201764 0.004939627 0.020743920 0.02206288 0.04550499
##          765        766          767         768        769        770
## 1 0.01098298 0.11787281 0.0000000000 0.000000000 0.01817181 0.07836645
## 2 0.02673213 0.05872757 0.0028129395 0.009836066 0.03659203 0.01638449
## 3 0.01321586 0.06343078 0.0000000000 0.000000000 0.02980132 0.09961262
## 4 0.01856909 0.08600980 0.0000000000 0.000000000 0.03167668 0.04759300
## 5 0.01718213 0.15934066 0.0008865248 0.002073255 0.06241331 0.14683368
## 6 0.02744237 0.07835616 0.0014114326 0.001101928 0.03247111 0.11019284
##          771        772          773        774        775        776
## 1 0.01603096 0.17279210 0.0005530973 0.04751381 0.01522491 0.13325930
## 2 0.02199010 0.07411444 0.0357142857 0.14945055 0.03583735 0.16105902
## 3 0.02720711 0.10616062 0.0005561735 0.06107718 0.02902557 0.09090909
## 4 0.02689352 0.09847661 0.0043931906 0.10928062 0.01441318 0.05019305
## 5 0.04643105 0.09592823 0.0000000000 0.09847434 0.01139351 0.14644351
## 6 0.03373894 0.03671233 0.0182320442 0.10668878 0.05521049 0.09317804
##          777        778         779         780        781         782
## 1 0.13098514 0.13241758 0.063736264 0.004219409 0.04860732 0.004410143
## 2 0.16129032 0.16294278 0.008178844 0.030769231 0.01029810 0.054674686
## 3 0.08854455 0.09040794 0.065600882 0.004261364 0.06794521 0.007190265
## 4 0.05455537 0.05558583 0.018478261 0.004189944 0.03796095 0.010952903
## 5 0.14196676 0.14432990 0.097510373 0.007985803 0.16438356 0.007654836
## 6 0.09796368 0.09873834 0.014262205 0.010578279 0.04741144 0.022038567
##          783         784        785         786        787        788
## 1 0.04704097 0.008771930 0.10824176 0.003858875 0.06298343 0.02091359
## 2 0.01047904 0.000000000 0.10722101 0.111899563 0.10928062 0.01311475
## 3 0.05135952 0.010491441 0.09618574 0.007747648 0.07095344 0.02868174
## 4 0.01355422 0.008719346 0.03442623 0.018599562 0.07447974 0.15111839
## 5 0.04247104 0.037215713 0.07501721 0.002764340 0.07033426 0.10261708
## 6 0.03625378 0.009868421 0.05619835 0.048431480 0.06522941 0.09845985
##            789        790          791         792       793        794
## 1 0.0007168459 0.02072909 0.0022050717 0.028176796 0.1018826 0.06326304
## 2 0.0042613636 0.07097681 0.0241360395 0.107025247 0.1482094 0.12686156
## 3 0.0021582734 0.01783167 0.0005543237 0.059312639 0.1444444 0.08862876
## 4 0.0007087172 0.01198872 0.0021941854 0.080043860 0.1463146 0.07920792
## 5 0.0035971223 0.01342883 0.0006963788 0.006915629 0.0312500 0.01947149
## 6 0.0014388489 0.02048023 0.0038546256 0.227071823 0.1653761 0.11123409
##          795         796         797         798          799         800
## 1 0.01377410 0.004969630 0.009381898 0.002732240 0.0000000000 0.003293085
## 2 0.02189381 0.033406353 0.026301370 0.000000000 0.0034891835 0.088427948
## 3 0.01935841 0.005543237 0.007734807 0.004934211 0.0000000000 0.008810573
## 4 0.01367615 0.005467469 0.005476451 0.017876490 0.0006934813 0.014705882
## 5 0.01722950 0.002775850 0.015256588 0.017869416 0.0026525199 0.004140787
## 6 0.01100715 0.006607930 0.011037528 0.008178844 0.0000000000 0.034596376
##          801         802         803          804         805          806
## 1 0.07692308 0.008893830 0.002750275 0.0007117438 0.007131103 0.0007067138
## 2 0.15228147 0.037465565 0.059530311 0.0084685956 0.061202186 0.0112280702
## 3 0.09489456 0.010608599 0.002768549 0.0000000000 0.008834898 0.0014224751
## 4 0.16153846 0.008264463 0.003816794 0.0007052186 0.016902944 0.0006993007
## 5 0.06827586 0.005582694 0.001386963 0.0000000000 0.015916955 0.0017574692
## 6 0.24958495 0.022172949 0.012672176 0.0021337127 0.031885651 0.0021246459
##          807        808         809         810          811        812
## 1 0.07982504 0.07984581 0.008228195 0.013698630 0.0000000000 0.01046832
## 2 0.27539381 0.27595628 0.083242655 0.147362697 0.0104384134 0.03065134
## 3 0.04843148 0.04759270 0.014325069 0.018121911 0.0007007708 0.01272828
## 4 0.05863192 0.06010929 0.019586507 0.041259501 0.0000000000 0.03823048
## 5 0.04287690 0.04203997 0.004126547 0.004810997 0.0000000000 0.02708333
## 6 0.05147864 0.05225523 0.049890351 0.061269147 0.0000000000 0.04958678
##           813         814         815        816        817        818
## 1 0.012141280 0.003307607 0.000551572 0.08902239 0.02145215 0.11079545
## 2 0.017534247 0.024563319 0.009863014 0.06666667 0.08788210 0.06993007
## 3 0.009413068 0.003869541 0.001106195 0.12719298 0.03585218 0.09000000
## 4 0.016420361 0.010958904 0.001091703 0.09761388 0.02617230 0.08077994
## 5 0.022869023 0.002772003 0.000000000 0.01793103 0.01386963 0.10017575
## 6 0.013774105 0.011043622 0.001102536 0.12404372 0.05659341 0.07323944
##           819         820         821         822         823       824
## 1 0.027609056 0.005506608 0.005443658 0.002747253 0.001644737 0.1473799
## 2 0.073585942 0.044238121 0.043266631 0.004354927 0.020697168 0.2283080
## 3 0.014884234 0.003869541 0.003280481 0.005512679 0.004402862 0.2149123
## 4 0.007119387 0.006014215 0.005939525 0.033242507 0.008156607 0.3859079
## 5 0.017361111 0.000000000 0.000000000 0.033149171 0.004149378 0.1537406
## 6 0.011049724 0.009895547 0.010881393 0.015925316 0.004388371 0.4044760
##          825        826         827        828        829        830        831
## 1 0.04367054 0.10806363 0.021253406 0.04598338 0.12219178 0.14238593 0.08251366
## 2 0.11587040 0.03050109 0.004867496 0.12004405 0.04622077 0.37335526 0.23902439
## 3 0.04656319 0.06043956 0.022988506 0.04621381 0.06875688 0.13976705 0.12904997
## 4 0.04057018 0.03320631 0.039978390 0.04002193 0.03590860 0.19441402 0.22414727
## 5 0.02437326 0.09441764 0.065753425 0.02226862 0.13764625 0.05736006 0.03650138
## 6 0.07446222 0.02079912 0.041870582 0.07403315 0.04157549 0.41997792 0.30588877
##          832         833        834         835          836         837
## 1 0.03673246 0.004407713 0.03782895 0.007057163 0.0000000000 0.062465753
## 2 0.07294502 0.052430366 0.07403375 0.006997901 0.0014245014 0.182906913
## 3 0.04515419 0.008844666 0.04405286 0.010615711 0.0007194245 0.066225166
## 4 0.12384574 0.032258065 0.12363834 0.032100488 0.0021428571 0.091304348
## 5 0.12834592 0.010395010 0.12552011 0.032461677 0.0018132366 0.008990318
## 6 0.12041598 0.035773253 0.11890411 0.023206751 0.0000000000 0.162280702
##           838         839         840         841         842         843
## 1 0.002857143 0.002816901 0.002735978 0.020262870 0.002840909 0.003551136
## 2 0.009866103 0.011879804 0.012244898 0.093275488 0.012676056 0.007037298
## 3 0.004273504 0.003526093 0.004112406 0.015934066 0.003548616 0.004264392
## 4 0.002120141 0.002094972 0.002035278 0.016260163 0.002108222 0.001403509
## 5 0.002695418 0.002680965 0.003407155 0.008928571 0.002671416 0.008912656
## 6 0.003551136 0.003516174 0.003419973 0.016921397 0.003543586 0.003526093
##          844         845        846          847         848          849
## 1 0.13669065 0.003861004 0.05671806 0.0000000000 0.012735327 0.0000000000
## 2 0.08941306 0.016967707 0.20349154 0.0071389347 0.070136986 0.0196399345
## 3 0.13548029 0.004983389 0.06453392 0.0005546312 0.013865779 0.0005521811
## 4 0.06095552 0.006560962 0.09929078 0.0010964912 0.019715225 0.0065466448
## 5 0.22648084 0.000691085 0.01106501 0.0041695622 0.004861111 0.0089717046
## 6 0.08563536 0.007713499 0.12006579 0.0027670172 0.038695412 0.0076880835
##            850          851        852         853          854        855
## 1 0.0005549390 0.0005506608 0.04542936 0.000000000 0.0005546312 0.07039911
## 2 0.0099282956 0.0109409190 0.13908741 0.007625272 0.0220264317 0.14955850
## 3 0.0005561735 0.0011049724 0.07786429 0.000000000 0.0011142061 0.11328125
## 4 0.0005506608 0.0010922993 0.26673985 0.000000000 0.0016492578 0.27603306
## 5 0.0000000000 0.0006915629 0.06054280 0.000000000 0.0013956734 0.10385965
## 6 0.0038802661 0.0044052863 0.24806202 0.000000000 0.0033259424 0.32000000
##          856         857         858        859        860        861
## 1 0.04180418 0.002073255 0.001653804 0.12887168 0.08924612 0.02199413
## 2 0.22131148 0.005486968 0.009309967 0.20780649 0.12904997 0.02456647
## 3 0.04864566 0.004844291 0.001660210 0.11679644 0.13871866 0.02662722
## 4 0.05558583 0.004115226 0.004371585 0.07515085 0.11618943 0.09941520
## 5 0.02005533 0.006890612 0.005517241 0.04596100 0.02659202 0.04324324
## 6 0.09290819 0.008977901 0.002202643 0.08849558 0.14317425 0.07391304
##          862         863        864        865        866         867
## 1 0.08925620 0.007173601 0.35302594 0.11772853 0.06604293 0.003325942
## 2 0.02188184 0.007163324 0.11695906 0.02602740 0.11864407 0.020385675
## 3 0.05423354 0.010101010 0.27034884 0.10896552 0.09889503 0.007234279
## 4 0.06560962 0.035919540 0.19247467 0.03804348 0.31515812 0.018151815
## 5 0.23958333 0.010771993 0.28462998 0.18378378 0.13604972 0.016794962
## 6 0.01706109 0.038904899 0.06029412 0.03703704 0.32070291 0.020509978
##          868        869        870       871        872         873        874
## 1 0.13520971 0.02040816 0.07611693 0.1180364 0.07633169 0.008214677 0.11973756
## 2 0.11433260 0.02409639 0.13910186 0.2624385 0.14207650 0.022270505 0.11340206
## 3 0.15605977 0.03652463 0.09718388 0.1112341 0.09668508 0.010468320 0.16703297
## 4 0.11986864 0.08433735 0.05237316 0.2571116 0.05443658 0.001627781 0.07542051
## 5 0.02506964 0.03414634 0.02781641 0.2234559 0.02768166 0.004827586 0.14630780
## 6 0.10639471 0.07891832 0.07213656 0.2706725 0.07291667 0.014238773 0.11584699
##          875        876        877        878        879        880        881
## 1 0.10573178 0.27105410 0.22068584 0.11964874 0.06194690 0.02149945 0.10970232
## 2 0.27413127 0.08462389 0.04333516 0.20586638 0.03294893 0.11445783 0.12280702
## 3 0.15848214 0.18344519 0.23375902 0.09521189 0.09983361 0.02603878 0.13565891
## 4 0.26214128 0.05472637 0.11287671 0.05000000 0.14701042 0.03938731 0.04385965
## 5 0.06451613 0.14516129 0.19252078 0.04135079 0.28333333 0.01319444 0.11450382
## 6 0.37208009 0.08004447 0.10325787 0.05966065 0.06736610 0.07272727 0.11816676
##          882         883       884         885       886       887         888
## 1 0.00872886 0.015350877 0.3263889 0.001102536 0.1360132 0.1338843 0.001105583
## 2 0.12744035 0.099074578 0.1349862 0.006027397 0.1808743 0.1772429 0.016420361
## 3 0.01587302 0.026417171 0.2835616 0.001661130 0.1823204 0.1820188 0.002767017
## 4 0.03841991 0.054921153 0.2129121 0.002738226 0.1131766 0.1124454 0.003289474
## 5 0.01022495 0.009661836 0.2661996 0.005563282 0.1348548 0.1326883 0.002079002
## 6 0.07528642 0.111354909 0.1006993 0.001655629 0.1864686 0.1839207 0.004966887
##          889         890         891        892        893        894
## 1 0.14230343 0.003508772 0.001099505 0.02918502 0.10160133 0.05801105
## 2 0.05159166 0.007697691 0.003810561 0.01314348 0.02249040 0.11690450
## 3 0.19055556 0.004908836 0.002201431 0.04643449 0.14008859 0.08738938
## 4 0.11696870 0.007665505 0.005452563 0.08264915 0.05808219 0.27013699
## 5 0.04111498 0.007042254 0.006896552 0.10704420 0.13365651 0.14909847
## 6 0.19105467 0.009148487 0.004939627 0.05405405 0.10479868 0.26062949
##          895        896         897        898        899        900        901
## 1 0.04678041 0.18839779 0.001522070 0.18818985 0.18725762 0.09873834 0.03065134
## 2 0.14043716 0.27607025 0.005952381 0.09857612 0.28059537 0.21405229 0.28804348
## 3 0.06464088 0.19071310 0.001524390 0.11289430 0.18715084 0.04961411 0.05674931
## 4 0.12111293 0.23194748 0.017937220 0.05090312 0.23068433 0.02934783 0.10234077
## 5 0.10650069 0.05197505 0.007648184 0.10672211 0.05303559 0.05175983 0.02346446
## 6 0.25041186 0.26417171 0.004573171 0.07056229 0.26570317 0.02631579 0.19232877
##          902         903         904         905        906        907
## 1 0.06181619 0.019188596 0.005494505 0.009381898 0.23651452 0.02854007
## 2 0.02880435 0.025585193 0.003816794 0.044931507 0.08659218 0.18389554
## 3 0.09145674 0.029735683 0.011019284 0.010537992 0.17342657 0.04780220
## 4 0.06778742 0.026673925 0.021218716 0.056892779 0.15426997 0.09249184
## 5 0.11881868 0.008258775 0.036526533 0.073509015 0.22431507 0.02351314
## 6 0.07897603 0.063969382 0.019178082 0.044101433 0.07544582 0.08852459
##          908        909        910         911        912         913
## 1 0.05681191 0.05872667 0.02273988 0.088705234 0.06592798 0.008820287
## 2 0.16475096 0.16539301 0.03353491 0.005476451 0.03143960 0.030617824
## 3 0.08517699 0.08940397 0.03053859 0.094579646 0.08240535 0.012714207
## 4 0.22896175 0.22863364 0.04507971 0.056192035 0.02308961 0.014192140
## 5 0.09661836 0.09903047 0.11172797 0.221067221 0.10552061 0.007575758
## 6 0.33590308 0.33187295 0.04585635 0.041254125 0.04547976 0.026967529
##          914       915        916        917        918       919        920
## 1 0.05193370 0.1058374 0.04743519 0.11934156 0.09301046 0.1084932 0.12952799
## 2 0.20842451 0.1134021 0.17608338 0.05882353 0.19594964 0.1126224 0.08156607
## 3 0.04148230 0.1610077 0.04648589 0.09766162 0.14847645 0.1644664 0.10291690
## 4 0.16538883 0.3596966 0.05679956 0.13513514 0.30508475 0.3606735 0.16303162
## 5 0.05416667 0.1218344 0.01448276 0.28161889 0.10194175 0.1217331 0.19931034
## 6 0.27367841 0.3087834 0.08890116 0.11050477 0.43234323 0.3062227 0.08131868
##           921        922         923         924        925         926
## 1 0.001100110 0.08724462 0.007150715 0.018722467 0.06670342 0.007688083
## 2 0.034991799 0.24986271 0.143558952 0.246036085 0.03056769 0.002724796
## 3 0.001660210 0.12645591 0.006057269 0.027101770 0.05755396 0.009376724
## 4 0.001643836 0.20746432 0.023458811 0.030054645 0.02677596 0.017973856
## 5 0.000000000 0.05732044 0.004135079 0.007570544 0.09172414 0.030513176
## 6 0.007166483 0.34123484 0.064285714 0.065456546 0.01925193 0.014810752
##          927        928        929        930        931        932        933
## 1 0.06655574 0.06911684 0.06854616 0.06942149 0.11117226 0.11589404 0.04132231
## 2 0.05335534 0.05234460 0.05274725 0.05291871 0.17085153 0.17122538 0.20678337
## 3 0.09510567 0.09416300 0.09560867 0.09618574 0.15818584 0.16187845 0.03257869
## 4 0.05002749 0.05160239 0.04945055 0.05019094 0.26037118 0.26284153 0.14847162
## 5 0.08901252 0.08896552 0.08907446 0.08611111 0.08188758 0.08200139 0.03321799
## 6 0.10736027 0.10825588 0.10871965 0.11019284 0.37541164 0.37328202 0.22734761
##          934        935        936        937        938        939        940
## 1 0.13533001 0.02782324 0.08769994 0.08698031 0.18041804 0.14749587 0.18332413
## 2 0.20549451 0.07675676 0.01256831 0.01308615 0.05145047 0.05507088 0.05245902
## 3 0.11006115 0.04045927 0.10779436 0.10523416 0.22571744 0.19171271 0.22744881
## 4 0.14881933 0.03082747 0.23375205 0.23369565 0.14590164 0.22628135 0.14668856
## 5 0.05277778 0.05910653 0.45925414 0.45784784 0.22245322 0.29647547 0.22122053
## 6 0.11566132 0.04080522 0.11074380 0.11452055 0.18422504 0.16684962 0.18622590
##           941        942        943       944        945         946        947
## 1 0.090010977 0.09917808 0.06673961 0.2379121 0.21210469 0.037465565 0.06239647
## 2 0.002723312 0.04725693 0.05971770 0.1178396 0.05952381 0.006553796 0.05095890
## 3 0.086596801 0.14560440 0.09688013 0.3060773 0.20940405 0.036403751 0.09307479
## 4 0.038776625 0.09994568 0.23289902 0.4122471 0.19967532 0.058406114 0.06123565
## 5 0.169082126 0.20547945 0.09710744 0.3347251 0.34065934 0.138812155 0.01951220
## 6 0.035714286 0.14160744 0.15051998 0.2352617 0.14978214 0.073585942 0.09641873
##          948         949        950         951        952        953
## 1 0.05937328 0.007747648 0.03728436 0.008830022 0.14961832 0.17469880
## 2 0.13591703 0.091108672 0.07849641 0.092653509 0.21568627 0.02661597
## 3 0.09641873 0.008314856 0.04355109 0.009439200 0.11384615 0.17279210
## 4 0.19039825 0.016429354 0.05126792 0.016429354 0.31762918 0.09609121
## 5 0.04979253 0.013212796 0.01610644 0.013879251 0.09197652 0.24395301
## 6 0.25618472 0.028761062 0.10371603 0.027578599 0.25339367 0.13646288
##          954       955        956         957         958         959
## 1 0.08411732 0.1739371 0.08823529 0.067867036 0.010514665 0.002189381
## 2 0.24546952 0.2624385 0.22889610 0.005502063 0.161538462 0.027233115
## 3 0.11074012 0.1400000 0.07831325 0.087078652 0.008328706 0.001097695
## 4 0.13541667 0.2489035 0.20583468 0.167352538 0.020890599 0.003255562
## 5 0.02367688 0.1598616 0.08710562 0.352631579 0.006263048 0.006872852
## 6 0.21700718 0.2990603 0.20663404 0.094313454 0.069213732 0.011487965
##          960        961        962        963         964        965        966
## 1 0.04328767 0.12759644 0.06759003 0.05577029 0.014277869 0.15516289 0.06318083
## 2 0.19630234 0.12170088 0.14513469 0.14215418 0.026215183 0.11135491 0.10523475
## 3 0.03799559 0.09748892 0.10339077 0.03100775 0.028145695 0.15354767 0.10759148
## 4 0.15878195 0.12992701 0.21612726 0.10727969 0.020719738 0.08278509 0.35208221
## 5 0.03310345 0.33519553 0.06958942 0.15620642 0.008971705 0.09318498 0.11833105
## 6 0.20865279 0.18842730 0.36675900 0.11239669 0.061403509 0.09437086 0.26278564
##          967        968       969        970        971        972        973
## 1 0.08508287 0.05054945 0.2147172 0.03926991 0.05647841 0.10503040 0.05312673
## 2 0.22971491 0.17294053 0.1566921 0.15422613 0.12857143 0.06907895 0.08585581
## 3 0.08448371 0.04196576 0.2322900 0.04386452 0.08888889 0.10266371 0.06329817
## 4 0.20197044 0.15393013 0.2367991 0.02687877 0.14622125 0.07502738 0.18595721
## 5 0.06241331 0.02551724 0.1844460 0.02006920 0.06458333 0.07023644 0.26315789
## 6 0.33572216 0.18481848 0.2929901 0.04628099 0.28752759 0.08604523 0.13473219
##          974        975        976        977       978        979         980
## 1 0.06063947 0.04210526 0.11907387 0.10905125 0.2065142 0.09060773 0.002757860
## 2 0.08310552 0.04455446 0.19693654 0.19003790 0.1523679 0.11293860 0.004928806
## 3 0.09297178 0.07670928 0.07249585 0.07021393 0.2681564 0.06038781 0.003314917
## 4 0.33496999 0.28516484 0.06062261 0.06013001 0.1125691 0.02844639 0.002186987
## 5 0.11126469 0.10013908 0.03867403 0.03964457 0.1200000 0.07538036 0.001385042
## 6 0.22369146 0.24254144 0.04182719 0.03873432 0.1449275 0.01931567 0.000000000
##           981        982         983          984         985        986
## 1 0.003508772 0.05664488 0.001420455 0.0005467469 0.000000000 0.01952036
## 2 0.030598053 0.04805616 0.018284107 0.0152173913 0.011142061 0.04653740
## 3 0.001415428 0.07209175 0.001425517 0.0000000000 0.000000000 0.03419283
## 4 0.009790210 0.02321814 0.007007708 0.0016286645 0.000698812 0.02374379
## 5 0.007943513 0.01163587 0.002678571 0.0006877579 0.000890472 0.03792135
## 6 0.017556180 0.03534530 0.012091038 0.0027352298 0.001408451 0.02675585
##          987         988          989        990         991        992
## 1 0.10804851 0.010503040 0.0005500550 0.01147541 0.003331483 0.11056106
## 2 0.02791461 0.018141836 0.0152755046 0.16440586 0.026937878 0.10103768
## 3 0.06515737 0.014452474 0.0000000000 0.01974767 0.004449388 0.12548369
## 4 0.04316940 0.007127193 0.0016348774 0.05579632 0.002199010 0.07906216
## 5 0.17614424 0.006925208 0.0006901311 0.01033770 0.000000000 0.08115543
## 6 0.03082003 0.007186291 0.0027472527 0.09557619 0.007190265 0.04607789
##           993          994        995        996        997        998
## 1 0.003852504 0.0006910850 0.06765677 0.04832510 0.07024336 0.02908891
## 2 0.025697102 0.0055096419 0.09651036 0.15720524 0.09599561 0.08006536
## 3 0.004977876 0.0000000000 0.08314978 0.08103638 0.08430394 0.04242424
## 4 0.002183406 0.0006868132 0.18770403 0.26608506 0.19123288 0.02830702
## 5 0.000000000 0.0043478261 0.21724138 0.11203320 0.22176022 0.02076125
## 6 0.007142857 0.0000000000 0.15161467 0.27252747 0.15359116 0.04052574
##           999        1000       1001       1002      1003       1004       1005
## 1 0.033755274 0.020441989 0.02091359 0.02467105 0.1253436 0.05494505 0.04292790
## 2 0.111420613 0.074643249 0.07494530 0.02066340 0.1805783 0.07103825 0.09066084
## 3 0.043356643 0.023849140 0.02434975 0.01976936 0.1743929 0.08669244 0.06949807
## 4 0.018181818 0.024109589 0.02679060 0.03369565 0.1891008 0.09700272 0.17993457
## 5 0.005328597 0.005571031 0.00625000 0.02739726 0.1137147 0.09681881 0.05244997
## 6 0.054016620 0.032578686 0.03195592 0.03608529 0.1539307 0.07734504 0.19989017
##          1006       1007         1008        1009        1010       1011
## 1 0.002178649 0.03257790 0.0005534034 0.001646542 0.004975124 0.05500550
## 2 0.004317323 0.05939902 0.0592755214 0.058791508 0.069741900 0.17130387
## 3 0.002735230 0.04240283 0.0044419767 0.004964148 0.012208657 0.06198118
## 4 0.002695418 0.03006993 0.0109649123 0.011976048 0.030153509 0.06205770
## 5 0.018455229 0.03815439 0.0000000000 0.000000000 0.005536332 0.03583735
## 6 0.001086957 0.03354298 0.0413907285 0.041028446 0.071152785 0.06930693
##           1012       1013       1014       1015       1016       1017
## 1 0.0006988120 0.06263858 0.05665567 0.06050605 0.12431544 0.15659341
## 2 0.0213204952 0.25344353 0.17550574 0.25068418 0.22512235 0.25942108
## 3 0.0007007708 0.05951057 0.06412383 0.05761773 0.15786579 0.16777042
## 4 0.0048275862 0.12252747 0.06222707 0.11542670 0.06902174 0.06434024
## 5 0.0077922078 0.02725367 0.03596127 0.02781641 0.07766323 0.05809129
## 6 0.0069541029 0.19911259 0.06820682 0.19470491 0.08643326 0.08831596
##           1018         1019       1020       1021         1022       1023
## 1 0.0006925208 0.0007147963 0.01380262 0.07326408 0.0000000000 0.02077638
## 2 0.0269337017 0.0226950355 0.06694272 0.05591748 0.0158643326 0.04180239
## 3 0.0013917884 0.0007178751 0.02236198 0.11403509 0.0005527916 0.03793293
## 4 0.0048242591 0.0042462845 0.03161512 0.12086721 0.0005458515 0.20358306
## 5 0.0060922541 0.0044762757 0.03836094 0.02957359 0.0020790021 0.15840220
## 6 0.0131578947 0.0071022727 0.03726708 0.16557377 0.0033039648 0.15191257
##         1024       1025       1026       1027       1028       1029        1030
## 1 0.01918860 0.19150110 0.07806487 0.02142857 0.02079002 0.07472527 0.003325942
## 2 0.01307190 0.07881773 0.19455041 0.13793103 0.14523975 0.01147541 0.014293568
## 3 0.02310231 0.23682751 0.11178414 0.03214286 0.03146853 0.04136790 0.002782415
## 4 0.03695652 0.17194323 0.12948857 0.06524823 0.06409373 0.04577657 0.003291278
## 5 0.03880804 0.28196268 0.08425414 0.03451327 0.03174603 0.15379310 0.000694927
## 6 0.01972603 0.19084391 0.13344316 0.06241135 0.06197772 0.02414929 0.001107420
##           1031        1032         1033        1034       1035       1036
## 1 0.0005540166 0.001413428 0.0005527916 0.001420455 0.16740741 0.09156977
## 2 0.0445544554 0.004915730 0.0455543359 0.004219409 0.05934718 0.03524229
## 3 0.0027793218 0.001408451 0.0033351862 0.001428571 0.10432191 0.11722142
## 4 0.0054945055 0.004210526 0.0076796489 0.003521127 0.04733728 0.03235294
## 5 0.0000000000 0.002650177 0.0000000000 0.001795332 0.11371841 0.14673913
## 6 0.0287292818 0.006346968 0.0309392265 0.004996431 0.02980626 0.04499274
##          1037       1038        1039        1040       1041       1042
## 1 0.004415011 0.05240175 0.003333333 0.079594790 0.25742574 0.04203540
## 2 0.020833333 0.01308140 0.004412576 0.073170732 0.06174863 0.05656233
## 3 0.004434590 0.05029586 0.004997224 0.042335766 0.28468767 0.05219323
## 4 0.001094691 0.04184704 0.015401540 0.008547009 0.18439716 0.02083333
## 5 0.002785515 0.03717472 0.027291812 0.055133080 0.24068966 0.08113731
## 6 0.003305785 0.06432749 0.004988914 0.007194245 0.17621145 0.01767956
##          1043        1044        1045       1046       1047       1048
## 1 0.003320421 0.037239869 0.078657866 0.06045480 0.01795580 0.11919081
## 2 0.021978022 0.002176279 0.006560962 0.21734358 0.05349794 0.07654723
## 3 0.006107718 0.035281147 0.083517699 0.09761509 0.02837370 0.06410959
## 4 0.004388371 0.015786609 0.054068815 0.18465753 0.02804378 0.02222222
## 5 0.020905923 0.084195997 0.197779320 0.02855153 0.08836395 0.12396694
## 6 0.006640841 0.016456391 0.037953795 0.32577434 0.02349689 0.01693989
##         1049       1050       1051      1052        1053        1054       1055
## 1 0.16095133 0.12017641 0.06873614 0.1678591 0.004953220 0.017117615 0.20498615
## 2 0.06622879 0.07326408 0.22124381 0.1776927 0.011494253 0.003831418 0.13755158
## 3 0.09151414 0.06467662 0.05552471 0.2341248 0.006087438 0.013858093 0.13988920
## 4 0.03501094 0.02185792 0.12795167 0.3549618 0.002738226 0.013129103 0.03611111
## 5 0.10607118 0.12655602 0.01873699 0.1512431 0.002766252 0.016701461 0.09041591
## 6 0.02976847 0.01596916 0.18995030 0.3880351 0.007705008 0.018753447 0.06077348
##         1056        1057        1058         1059       1060       1061
## 1 0.09156095 0.016930639 0.006060606 0.0005491488 0.13861386 0.05423354
## 2 0.11561644 0.003252033 0.013668671 0.0092643052 0.18951393 0.08786381
## 3 0.04878049 0.013721186 0.007182320 0.0016556291 0.08890116 0.08555556
## 4 0.01750547 0.013001083 0.005461496 0.0032644178 0.03431373 0.09989023
## 5 0.05601660 0.016438356 0.003460208 0.0055286800 0.09695291 0.08027682
## 6 0.01709873 0.019115238 0.004947774 0.0060373216 0.03626374 0.20741150
##          1062        1063        1064       1065       1066       1067
## 1 0.079670330 0.083516484 0.012568306 0.02535832 0.23861766 0.02640264
## 2 0.197052402 0.196399345 0.039587852 0.04266958 0.16539717 0.07962658
## 3 0.074421169 0.076033058 0.019758507 0.02490315 0.17721519 0.03722222
## 4 0.029989095 0.029427793 0.026044493 0.02240437 0.06844106 0.02905702
## 5 0.006194081 0.006185567 0.005479452 0.02006920 0.11395028 0.05551700
## 6 0.064250412 0.065384615 0.027277687 0.04068169 0.14731654 0.04022039
##         1068       1069       1070       1071        1072        1073
## 1 0.01368363 0.07241570 0.05365044 0.03984505 0.007700770 0.008282717
## 2 0.04073873 0.12438356 0.20581778 0.14772103 0.087834152 0.022478070
## 3 0.02145215 0.09756098 0.08398220 0.05336298 0.012134584 0.009983361
## 4 0.02497286 0.11439518 0.12287438 0.09682713 0.023433243 0.006020799
## 5 0.00550585 0.09993060 0.06838800 0.10138889 0.004143646 0.036136206
## 6 0.02408320 0.13072256 0.13709232 0.10325787 0.036732456 0.009933775
##         1074       1075      1076       1077       1078       1079       1080
## 1 0.13002755 0.05528188 0.1613611 0.04908991 0.08185841 0.06027397 0.06810631
## 2 0.20218579 0.05655247 0.1456628 0.09409190 0.07260726 0.06188925 0.02744237
## 3 0.09652510 0.09356081 0.2247934 0.07641196 0.10394664 0.07569940 0.05862832
## 4 0.03058438 0.09635275 0.1724701 0.03663204 0.06425041 0.02711497 0.02631579
## 5 0.04830918 0.01793103 0.1085003 0.03331020 0.15566366 0.08475735 0.02298050
## 6 0.05375754 0.12972085 0.2404162 0.07170436 0.08637874 0.06287589 0.03195592
##          1081      1082       1083        1084        1085      1086      1087
## 1 0.009232955 0.1312910 0.04429679 0.064906491 0.004834254 0.4373626 0.4446903
## 2 0.032348805 0.1225490 0.08044077 0.002183406 0.032458564 0.1185145 0.1206140
## 3 0.014184397 0.1112335 0.06559200 0.072375691 0.006302521 0.3789300 0.4174058
## 4 0.018374558 0.1147363 0.07734504 0.086743044 0.008971705 0.1259542 0.1233553
## 5 0.007174888 0.1771978 0.15389972 0.211432507 0.004382121 0.2048276 0.1862404
## 6 0.011355571 0.1188390 0.08241150 0.043406593 0.020905923 0.1482702 0.1614151
##         1088       1089       1090       1091       1092      1093       1094
## 1 0.38240177 0.03394625 0.33680746 0.15769231 0.04856512 0.0550055 0.05384615
## 2 0.22185612 0.09205903 0.11444142 0.12377317 0.10290093 0.1687602 0.16402406
## 3 0.31722222 0.05516266 0.26074972 0.11852260 0.07313019 0.0839779 0.08678828
## 4 0.08388158 0.04044630 0.09961894 0.05059848 0.07864555 0.2339149 0.23413567
## 5 0.12301313 0.10717449 0.19944979 0.12733564 0.01044568 0.0615917 0.06154910
## 6 0.10657096 0.03745583 0.08776742 0.04772353 0.21014892 0.3204846 0.32050577
##        1095      1096         1097      1098       1099         1100       1101
## 1 0.2498621 0.2455752 0.0000000000 0.2426068 0.13758943 0.0011068069 0.01387347
## 2 0.1336254 0.1353425 0.0092744135 0.1345316 0.09896118 0.0027397260 0.03636364
## 3 0.1772853 0.1736959 0.0005497526 0.1685950 0.20121614 0.0005546312 0.02060134
## 4 0.1648412 0.1689523 0.0021810251 0.1628540 0.20512821 0.0000000000 0.01541002
## 5 0.2315716 0.2275574 0.0055210490 0.2272099 0.07809261 0.0000000000 0.04517026
## 6 0.2005510 0.2037548 0.0054824561 0.2033991 0.30473047 0.0038695412 0.01828255
##         1102        1103       1104       1105        1106       1107
## 1 0.01894150 0.015907844 0.05901820 0.04670330 0.004402862 0.04452996
## 2 0.06329114 0.008169935 0.11822660 0.02128821 0.008192245 0.01909438
## 3 0.03242035 0.024766098 0.05794702 0.06504961 0.006070640 0.06229327
## 4 0.01822198 0.042414356 0.02128821 0.04196185 0.014161220 0.04084967
## 5 0.02712100 0.029532967 0.06167706 0.11019284 0.028178694 0.10880111
## 6 0.03722222 0.047697368 0.05112699 0.05159166 0.010439560 0.05219780
##          1108        1109         1110       1111        1112       1113
## 1 0.000000000 0.001108647 0.0000000000 0.03577325 0.002759382 0.11135612
## 2 0.009161381 0.012644310 0.0081922447 0.06075534 0.049288061 0.13005464
## 3 0.000000000 0.001667593 0.0005521811 0.05300939 0.006084071 0.11332228
## 4 0.001409443 0.004405286 0.0038188762 0.02894593 0.012575178 0.15447598
## 5 0.004456328 0.004861111 0.0048476454 0.09342561 0.014553015 0.03094911
## 6 0.004255319 0.002771619 0.0027517887 0.03951701 0.024820739 0.24573942
##         1114       1115       1116        1117       1118        1119
## 1 0.01438849 0.13611416 0.10049696 0.009955752 0.04367054 0.002748763
## 2 0.03903244 0.05223069 0.16712329 0.034577387 0.11068493 0.016339869
## 3 0.01778766 0.10462555 0.14024390 0.018333333 0.07329262 0.004415011
## 4 0.01701427 0.02773246 0.10224166 0.016492578 0.03620406 0.005997819
## 5 0.01394700 0.14511692 0.04854369 0.018093250 0.03331020 0.008264463
## 6 0.02542841 0.08826754 0.10044150 0.023796348 0.05074462 0.011525796
wine.mds <- cmdscale(1 - model_resample_1000_trees$proximity, eig=TRUE)
op <- par(pty="s")
pairs(cbind(wine_train[c("alcohol", "sulphates", "volatile_acidity", "total_sulfur_dioxide", "chlorides")
], wine.mds$points), cex=0.6, gap=0,
      col=c("red", "green", "blue")[as.numeric(wine_train$quality)],
      main="Wine Data: Predictors and MDS of Proximity Based on RandomForest")

# plot of the most important predictor variables of wine quality and then red, green, blue to represent the different qualitlies "Poor", "Normal", "Excellent"

#Explanatory Plot

ggpairs(data.frame(
  alcohol = wine_train$alcohol,
  sulphates = wine_train$sulphates,
  volatile_acidity = wine_train$volatile_acidity,
  MDS1 = wine.mds$points[,1],
  MDS2 = wine.mds$points[,2],
  quality = wine_train$quality
), mapping = aes(color = quality))
## `stat_bin()` using `bins = 30`. Pick better value `binwidth`.
## `stat_bin()` using `bins = 30`. Pick better value `binwidth`.
## `stat_bin()` using `bins = 30`. Pick better value `binwidth`.
## `stat_bin()` using `bins = 30`. Pick better value `binwidth`.
## `stat_bin()` using `bins = 30`. Pick better value `binwidth`.

# Interactive graph: 
p <- plot_ly(
  data = wine_train,
  x = ~alcohol,
  y = ~volatile_acidity,
  type = "scatter",
  mode = "markers",                 # <-- fix
  color = ~quality,                 # discrete if factor/ordered
  colors = viridis(nlevels(wine_train$quality)),
  size = ~sulphates,                # bubble size mapping
  sizes = c(5, 25),                 # pixel range for bubbles
  text = ~paste(
    "Quality:", quality,
    "<br>Alcohol:", alcohol,
    "<br>Volatile acidity:", volatile_acidity,
    "<br>Sulphates:", sulphates
  ),
  hoverinfo = "text"
) %>%
  layout(
    xaxis = list(title = "Alcohol"),
    yaxis = list(title = "Volatile acidity"),
    legend = list(title = list(text = "Quality"))
  )
p
## Warning: `line.width` does not currently support multiple values.
## Warning: `line.width` does not currently support multiple values.
## Warning: `line.width` does not currently support multiple values.
varImpPlot(model_resample_1000_trees)

ggplot(transform(wine_train, MDS1 = wine.mds$points[,1], MDS2 = wine.mds$points[,2]),
       aes(MDS1, MDS2)) +
  geom_hex(bins = 40) +                # or: geom_bin2d()
  facet_wrap(~ quality, nrow = 1) +    # show each class separately
  theme_bw() + labs(title = "Class-wise density (hex bins)")

mds_df <- data.frame(
  MDS1    = wine.mds$points[, 1],
  MDS2    = wine.mds$points[, 2],
  quality = wine_train$quality,      # your class label
  row_id  = seq_len(nrow(wine_train))# optional: ID for tooltips
)

# Visualize overlap between classes
ggplot(mds_df, aes(MDS1, MDS2, color = quality)) +
  geom_point(alpha = 0.6) +
  theme_minimal() +
  labs(title = "Random Forest proximity MDS by wine quality")

# Cluster tightness per class
aggregate(cbind(MDS1, MDS2) ~ quality, mds_df, function(x) sd(x))
##     quality       MDS1      MDS2
## 1      Poor 0.09883704 0.1826716
## 2    Normal 0.18552602 0.1543027
## 3 Excellent 0.15503251 0.1127544

Predictive Power of the Model

## Prepare X (predictors) and y (target) for TEST/VALIDATION of the Random Forrest 
wine_test$quality <- factor(wine_test$quality,
                            levels = levels(wine_train$quality),
                            ordered = is.ordered(wine_train$quality))

validate_x <- subset(wine_test, select = setdiff(names(wine_test), "quality"))
validate_y <- wine_test$quality

## 2) Get predictions: class labels and class probabilities
# Use your balanced model; change the object name if needed
rf_fit <- model_resample_1000_trees

# class predictions
pred_y <- predict(rf_fit, newdata = validate_x, type = "class")

# probability matrix (cols: "Poor","Normal","Excellent")
probs  <- predict(rf_fit, newdata = validate_x, type = "prob")

## 3) Confusion matrix + overall metrics 
cm <- confusionMatrix(pred_y, validate_y)  # multiclass by default
cm
## Confusion Matrix and Statistics
## 
##            Reference
## Prediction  Poor Normal Excellent
##   Poor        13     51         3
##   Normal       6    263         8
##   Excellent    0     82        54
## 
## Overall Statistics
##                                           
##                Accuracy : 0.6875          
##                  95% CI : (0.6439, 0.7287)
##     No Information Rate : 0.825           
##     P-Value [Acc > NIR] : 1               
##                                           
##                   Kappa : 0.349           
##                                           
##  Mcnemar's Test P-Value : <2e-16          
## 
## Statistics by Class:
## 
##                      Class: Poor Class: Normal Class: Excellent
## Sensitivity              0.68421        0.6641           0.8308
## Specificity              0.88286        0.8333           0.8024
## Pos Pred Value           0.19403        0.9495           0.3971
## Neg Pred Value           0.98547        0.3448           0.9680
## Prevalence               0.03958        0.8250           0.1354
## Detection Rate           0.02708        0.5479           0.1125
## Detection Prevalence     0.13958        0.5771           0.2833
## Balanced Accuracy        0.78354        0.7487           0.8166
cm$byclass
## NULL
## 4) Top-2 accuracy, useful with 3 classes ---
top2_correct <- mean(apply(probs, 1, function(p) {
  ord <- order(p, decreasing = TRUE)
  levels(validate_y)[ord[1]] == as.character(validate_y)[which.max(p)] || 
  levels(validate_y)[ord[2]] == as.character(validate_y)[which.max(p)]
}))
top2_correct
## [1] 0.9833333
## 5) One-vs-rest ROC AUC per class 
auc_per_class <- sapply(levels(validate_y), function(cl){
  y_bin <- factor(ifelse(validate_y == cl, cl, paste0("Not_", cl)),
                  levels = c(paste0("Not_", cl), cl))
  auc(roc(y_bin, probs[, cl], levels = levels(y_bin), direction = "<"))
})
auc_per_class
##      Poor    Normal Excellent 
## 0.8526658 0.8354227 0.8810195
# Confusion Matrix Graphic 
cm_table <- as.table(cm$table)
cm_df <- melt(cm_table)
colnames(cm_df) <- c("True", "Predicted", "Count")

ggplot(cm_df, aes(x = Predicted, y = True, fill = Count)) +
  geom_tile(color = "white") +
  geom_text(aes(label = Count), color = "black", size = 4) +
  scale_fill_viridis_c() +
  theme_minimal() +
  labs(title = "Confusion Matrix — Random Forest Wine Quality",
       x = "Predicted Class", y = "True Class")

# Creation of ROC plots: 
roc_poor  <- roc(validate_y == "Poor",      probs[,"Poor"])
## Setting levels: control = FALSE, case = TRUE
## Setting direction: controls < cases
roc_norm  <- roc(validate_y == "Normal",    probs[,"Normal"])
## Setting levels: control = FALSE, case = TRUE
## Setting direction: controls < cases
roc_ex    <- roc(validate_y == "Excellent", probs[,"Excellent"])
## Setting levels: control = FALSE, case = TRUE
## Setting direction: controls < cases
plot.roc(roc_poor, col="purple",  print.auc=TRUE)
plot.roc(roc_norm, add=TRUE, col="blue",    print.auc=TRUE, print.auc.y=0.4)
plot.roc(roc_ex,   add=TRUE, col="gold",    print.auc=TRUE, print.auc.y=0.3)
legend("bottomright", c("Poor vs rest","Normal vs rest","Excellent vs rest"),
       col=c("purple","blue","gold"), lwd=2)

# varIMplot to see which variables help the model distigusih what type of wine it is
varImpPlot(model_resample_1000_trees, main="Variable Importance — Random Forest")

par(mfrow=c(1,3))

# PPC plots: to show relationship between probability weightings to classes and signficant variables
partialPlot(model_resample_1000_trees, as.data.frame(wine_train), "alcohol", which.class="Excellent")
partialPlot(model_resample_1000_trees, as.data.frame(wine_train), "volatile_acidity", which.class="Excellent")
partialPlot(model_resample_1000_trees, as.data.frame(wine_train), "sulphates", which.class="Excellent")

par(mfrow=c(1,1))

Bayesian Ordinal Regression Model

# Problem arising due to corput ggplot code: 
#search()               # should show only base packages
# Remove broken installs: 
#lib <- .libPaths()[1]
#unlink(file.path(lib, "ggplot2"), recursive = TRUE, force = TRUE)
#unlink(file.path(lib, "00LOCK*"), recursive = TRUE, force = TRUE)
# Re-install package cleanly 
#options(repos = c(CRAN = "https://cloud.r-project.org"))
#options(pkgType = "binary")
#install.packages("ggplot2")   # should NOT complain it's in use
#library(ggplot2)              # quick sanity check


# Need To install Rtools First
#install.packages("cmdstanr",
#  repos = c("https://mc-stan.org/r-packages/", getOption("repos"))
#)

#check_cmdstan_toolchain(fix = TRUE)   # should say toolchain is good
#install_cmdstan()                     # first-time download & build (~few minutes)


## Check ## After Resatall: ##
#has_build_tools(debug = TRUE)
Sys.which(c("make","g++")) # if in form "C:\\rtools44\\usr\\bin\\make.exe" "C:\\rtools44\\X86_64~1.POS\\bin\\G__~1.EXE" under Make and g++ repspectively then good to go. 
##                                         make 
##           "C:\\rtools44\\usr\\bin\\make.exe" 
##                                          g++ 
## "C:\\rtools44\\X86_64~1.POS\\bin\\G__~1.EXE"
#Installing Windows friendly Binary from Stans R-Universe

# Check that install was all good: 
check_cmdstan_toolchain(fix = TRUE)
## The C++ toolchain required for CmdStan is setup properly!
# one-time install of CmdStan (downloads & builds)
#cmdstanr::install_cmdstan()   # installs to: %USERPROFILE%/.cmdstanr/cmdstan-<ver>
#cmdstanr::cmdstan_path()      # verify path

## Running Bayesian Ordinal Model ##
# make sure the outcome is ordered *and* levels are in order
wine_train$quality <- ordered(wine_train$quality,
                              levels = c("Poor","Normal","Excellent"))

pri <- c(
  set_prior("normal(0,1)", class = "b"),               # slopes
  set_prior("student_t(3,0,2.5)", class = "Intercept") # cutpoints
)
fit_ord <- brm(
  quality ~ .,
  data   = wine_train,
  family = cumulative("logit"),
  prior  = pri,
  chains = 4, cores = 4, iter = 2000, seed = 123,
  backend = "cmdstanr"     # <— uses your CmdStan at ~/.cmdstan
)
## Start sampling
## Running MCMC with 4 parallel chains...
## 
## Chain 1 Iteration:    1 / 2000 [  0%]  (Warmup) 
## Chain 2 Iteration:    1 / 2000 [  0%]  (Warmup) 
## Chain 3 Iteration:    1 / 2000 [  0%]  (Warmup) 
## Chain 4 Iteration:    1 / 2000 [  0%]  (Warmup) 
## Chain 2 Iteration:  100 / 2000 [  5%]  (Warmup) 
## Chain 2 Iteration:  200 / 2000 [ 10%]  (Warmup) 
## Chain 3 Iteration:  100 / 2000 [  5%]  (Warmup) 
## Chain 1 Iteration:  100 / 2000 [  5%]  (Warmup) 
## Chain 2 Iteration:  300 / 2000 [ 15%]  (Warmup) 
## Chain 2 Iteration:  400 / 2000 [ 20%]  (Warmup) 
## Chain 3 Iteration:  200 / 2000 [ 10%]  (Warmup) 
## Chain 2 Iteration:  500 / 2000 [ 25%]  (Warmup) 
## Chain 1 Iteration:  200 / 2000 [ 10%]  (Warmup) 
## Chain 3 Iteration:  300 / 2000 [ 15%]  (Warmup) 
## Chain 2 Iteration:  600 / 2000 [ 30%]  (Warmup) 
## Chain 1 Iteration:  300 / 2000 [ 15%]  (Warmup) 
## Chain 2 Iteration:  700 / 2000 [ 35%]  (Warmup) 
## Chain 3 Iteration:  400 / 2000 [ 20%]  (Warmup) 
## Chain 2 Iteration:  800 / 2000 [ 40%]  (Warmup) 
## Chain 1 Iteration:  400 / 2000 [ 20%]  (Warmup) 
## Chain 3 Iteration:  500 / 2000 [ 25%]  (Warmup) 
## Chain 2 Iteration:  900 / 2000 [ 45%]  (Warmup) 
## Chain 1 Iteration:  500 / 2000 [ 25%]  (Warmup) 
## Chain 3 Iteration:  600 / 2000 [ 30%]  (Warmup) 
## Chain 2 Iteration: 1000 / 2000 [ 50%]  (Warmup) 
## Chain 2 Iteration: 1001 / 2000 [ 50%]  (Sampling) 
## Chain 1 Iteration:  600 / 2000 [ 30%]  (Warmup) 
## Chain 3 Iteration:  700 / 2000 [ 35%]  (Warmup) 
## Chain 4 Iteration:  100 / 2000 [  5%]  (Warmup) 
## Chain 2 Iteration: 1100 / 2000 [ 55%]  (Sampling) 
## Chain 1 Iteration:  700 / 2000 [ 35%]  (Warmup) 
## Chain 3 Iteration:  800 / 2000 [ 40%]  (Warmup) 
## Chain 2 Iteration: 1200 / 2000 [ 60%]  (Sampling) 
## Chain 1 Iteration:  800 / 2000 [ 40%]  (Warmup) 
## Chain 3 Iteration:  900 / 2000 [ 45%]  (Warmup) 
## Chain 2 Iteration: 1300 / 2000 [ 65%]  (Sampling) 
## Chain 4 Iteration:  200 / 2000 [ 10%]  (Warmup) 
## Chain 1 Iteration:  900 / 2000 [ 45%]  (Warmup) 
## Chain 3 Iteration: 1000 / 2000 [ 50%]  (Warmup) 
## Chain 3 Iteration: 1001 / 2000 [ 50%]  (Sampling) 
## Chain 2 Iteration: 1400 / 2000 [ 70%]  (Sampling) 
## Chain 1 Iteration: 1000 / 2000 [ 50%]  (Warmup) 
## Chain 1 Iteration: 1001 / 2000 [ 50%]  (Sampling) 
## Chain 4 Iteration:  300 / 2000 [ 15%]  (Warmup) 
## Chain 3 Iteration: 1100 / 2000 [ 55%]  (Sampling) 
## Chain 2 Iteration: 1500 / 2000 [ 75%]  (Sampling) 
## Chain 1 Iteration: 1100 / 2000 [ 55%]  (Sampling) 
## Chain 4 Iteration:  400 / 2000 [ 20%]  (Warmup) 
## Chain 3 Iteration: 1200 / 2000 [ 60%]  (Sampling) 
## Chain 2 Iteration: 1600 / 2000 [ 80%]  (Sampling) 
## Chain 1 Iteration: 1200 / 2000 [ 60%]  (Sampling) 
## Chain 4 Iteration:  500 / 2000 [ 25%]  (Warmup) 
## Chain 3 Iteration: 1300 / 2000 [ 65%]  (Sampling) 
## Chain 2 Iteration: 1700 / 2000 [ 85%]  (Sampling) 
## Chain 1 Iteration: 1300 / 2000 [ 65%]  (Sampling) 
## Chain 4 Iteration:  600 / 2000 [ 30%]  (Warmup) 
## Chain 3 Iteration: 1400 / 2000 [ 70%]  (Sampling) 
## Chain 2 Iteration: 1800 / 2000 [ 90%]  (Sampling) 
## Chain 1 Iteration: 1400 / 2000 [ 70%]  (Sampling) 
## Chain 4 Iteration:  700 / 2000 [ 35%]  (Warmup) 
## Chain 2 Iteration: 1900 / 2000 [ 95%]  (Sampling) 
## Chain 3 Iteration: 1500 / 2000 [ 75%]  (Sampling) 
## Chain 4 Iteration:  800 / 2000 [ 40%]  (Warmup) 
## Chain 1 Iteration: 1500 / 2000 [ 75%]  (Sampling) 
## Chain 2 Iteration: 2000 / 2000 [100%]  (Sampling) 
## Chain 3 Iteration: 1600 / 2000 [ 80%]  (Sampling) 
## Chain 4 Iteration:  900 / 2000 [ 45%]  (Warmup) 
## Chain 2 finished in 27.3 seconds.
## Chain 1 Iteration: 1600 / 2000 [ 80%]  (Sampling) 
## Chain 4 Iteration: 1000 / 2000 [ 50%]  (Warmup) 
## Chain 1 Iteration: 1700 / 2000 [ 85%]  (Sampling) 
## Chain 3 Iteration: 1700 / 2000 [ 85%]  (Sampling) 
## Chain 4 Iteration: 1001 / 2000 [ 50%]  (Sampling) 
## Chain 1 Iteration: 1800 / 2000 [ 90%]  (Sampling) 
## Chain 3 Iteration: 1800 / 2000 [ 90%]  (Sampling) 
## Chain 4 Iteration: 1100 / 2000 [ 55%]  (Sampling) 
## Chain 1 Iteration: 1900 / 2000 [ 95%]  (Sampling) 
## Chain 3 Iteration: 1900 / 2000 [ 95%]  (Sampling) 
## Chain 4 Iteration: 1200 / 2000 [ 60%]  (Sampling) 
## Chain 1 Iteration: 2000 / 2000 [100%]  (Sampling) 
## Chain 3 Iteration: 2000 / 2000 [100%]  (Sampling) 
## Chain 4 Iteration: 1300 / 2000 [ 65%]  (Sampling) 
## Chain 1 finished in 29.6 seconds.
## Chain 3 finished in 29.6 seconds.
## Chain 4 Iteration: 1400 / 2000 [ 70%]  (Sampling) 
## Chain 4 Iteration: 1500 / 2000 [ 75%]  (Sampling) 
## Chain 4 Iteration: 1600 / 2000 [ 80%]  (Sampling) 
## Chain 4 Iteration: 1700 / 2000 [ 85%]  (Sampling) 
## Chain 4 Iteration: 1800 / 2000 [ 90%]  (Sampling) 
## Chain 4 Iteration: 1900 / 2000 [ 95%]  (Sampling) 
## Chain 4 Iteration: 2000 / 2000 [100%]  (Sampling) 
## Chain 4 finished in 33.1 seconds.
## 
## All 4 chains finished successfully.
## Mean chain execution time: 29.9 seconds.
## Total execution time: 33.4 seconds.
## Loading required package: rstan
## Warning: package 'rstan' was built under R version 4.4.3
## Loading required package: StanHeaders
## Warning: package 'StanHeaders' was built under R version 4.4.3
## 
## rstan version 2.32.7 (Stan version 2.32.2)
## For execution on a local, multicore CPU with excess RAM we recommend calling
## options(mc.cores = parallel::detectCores()).
## To avoid recompilation of unchanged Stan programs, we recommend calling
## rstan_options(auto_write = TRUE)
## For within-chain threading using `reduce_sum()` or `map_rect()` Stan functions,
## change `threads_per_chain` option:
## rstan_options(threads_per_chain = 1)
## Do not specify '-march=native' in 'LOCAL_CPPFLAGS' or a Makevars file
## 
## Attaching package: 'rstan'
## The following object is masked from 'package:magrittr':
## 
##     extract
# Assessing models fit: 
summary(fit_ord)                     # coefficients + cutpoints + Rhat/ESS
##  Family: cumulative 
##   Links: mu = logit 
## Formula: quality ~ fixed_acidity + volatile_acidity + citric_acid + residual_sugar + chlorides + free_sulfur_dioxide + total_sulfur_dioxide + density + p_h + sulphates + alcohol 
##    Data: wine_train (Number of observations: 1119) 
##   Draws: 4 chains, each with iter = 2000; warmup = 1000; thin = 1;
##          total post-warmup draws = 4000
## 
## Regression Coefficients:
##                      Estimate Est.Error l-95% CI u-95% CI Rhat Bulk_ESS
## Intercept[1]             2.97      2.73    -2.24     8.34 1.00     5469
## Intercept[2]             9.34      2.76     3.96    14.83 1.00     5266
## fixed_acidity            0.11      0.07    -0.03     0.24 1.00     3985
## volatile_acidity        -2.74      0.51    -3.73    -1.73 1.00     4755
## citric_acid              0.38      0.57    -0.77     1.46 1.00     4235
## residual_sugar          -0.01      0.06    -0.13     0.11 1.00     5914
## chlorides               -1.53      0.87    -3.21     0.18 1.00     6568
## free_sulfur_dioxide      0.02      0.01    -0.01     0.04 1.00     4957
## total_sulfur_dioxide    -0.01      0.00    -0.01     0.00 1.00     4629
## density                 -0.01      0.99    -1.97     1.90 1.00     6828
## p_h                     -0.82      0.64    -2.08     0.45 1.00     5061
## sulphates                2.20      0.46     1.30     3.08 1.00     5544
## alcohol                  0.84      0.09     0.67     1.03 1.00     4307
##                      Tail_ESS
## Intercept[1]             3197
## Intercept[2]             2967
## fixed_acidity            3416
## volatile_acidity         3085
## citric_acid              3305
## residual_sugar           3189
## chlorides                3082
## free_sulfur_dioxide      3169
## total_sulfur_dioxide     3054
## density                  2794
## p_h                      2872
## sulphates                3193
## alcohol                  3036
## 
## Further Distributional Parameters:
##      Estimate Est.Error l-95% CI u-95% CI Rhat Bulk_ESS Tail_ESS
## disc     1.00      0.00     1.00     1.00   NA       NA       NA
## 
## Draws were sampled using sample(hmc). For each parameter, Bulk_ESS
## and Tail_ESS are effective sample size measures, and Rhat is the potential
## scale reduction factor on split chains (at convergence, Rhat = 1).
bayes_R2(fit_ord)
## Warning: Predictions are treated as continuous variables in 'bayes_R2' which is
## likely invalid for ordinal families.
##     Estimate  Est.Error      Q2.5     Q97.5
## R2 0.2130358 0.02284927 0.1676189 0.2561668
pp_check(fit_ord, type = "bars")     # overall class counts
## Using 10 posterior draws for ppc type 'bars' by default.

pp_check(fit_ord, type = "bars_grouped", group = "quality")
## Using 10 posterior draws for ppc type 'bars_grouped' by default.

Predictive Power of the Model

labs <- c("Poor","Normal","Excellent")  # ensure this matches your factor order

# Draws x N matrix of category indices (1..K)
pp <- posterior_predict(fit_ord, newdata = wine_test)  

# N x K matrix of posterior mean probs
probs <- t(apply(pp, 2, function(v) prop.table(tabulate(v, nbins = length(labs)))))
# Hard labels via argmax
pred_lab <- factor(labs[max.col(probs, ties.method = "first")], levels = labs, ordered = TRUE)

# Confusion + accuracy
table(Truth = wine_test$quality, Pred = pred_lab)
##            Pred
## Truth       Poor Normal Excellent
##   Poor         0     19         0
##   Normal       0    384        12
##   Excellent    0     52        13
mean(pred_lab == wine_test$quality) # Accuarcy 
## [1] 0.8270833
pred <- factor(pred_lab, levels = levels(wine_test$quality))
ref  <- factor(wine_test$quality, levels = levels(wine_test$quality))
confusionMatrix(pred, ref)
## Confusion Matrix and Statistics
## 
##            Reference
## Prediction  Poor Normal Excellent
##   Poor         0      0         0
##   Normal      19    384        52
##   Excellent    0     12        13
## 
## Overall Statistics
##                                           
##                Accuracy : 0.8271          
##                  95% CI : (0.7902, 0.8599)
##     No Information Rate : 0.825           
##     P-Value [Acc > NIR] : 0.4812          
##                                           
##                   Kappa : 0.1802          
##                                           
##  Mcnemar's Test P-Value : NA              
## 
## Statistics by Class:
## 
##                      Class: Poor Class: Normal Class: Excellent
## Sensitivity              0.00000        0.9697          0.20000
## Specificity              1.00000        0.1548          0.97108
## Pos Pred Value               NaN        0.8440          0.52000
## Neg Pred Value           0.96042        0.5200          0.88571
## Prevalence               0.03958        0.8250          0.13542
## Detection Rate           0.00000        0.8000          0.02708
## Detection Prevalence     0.00000        0.9479          0.05208
## Balanced Accuracy        0.50000        0.5622          0.58554
# ROC for the three classes
roc_poor <- roc(wine_test$quality == "Poor", probs[, 1])
## Setting levels: control = FALSE, case = TRUE
## Setting direction: controls < cases
roc_norm <- roc(wine_test$quality == "Normal", probs[, 2])
## Setting levels: control = FALSE, case = TRUE
## Setting direction: controls < cases
roc_ex   <- roc(wine_test$quality == "Excellent", probs[, 3])
## Setting levels: control = FALSE, case = TRUE
## Setting direction: controls < cases
auc(roc_poor); auc(roc_norm); auc(roc_ex)
## Area under the curve: 0.7883
## Area under the curve: 0.7739
## Area under the curve: 0.8579
plot.roc(roc_norm, col="blue", main="One-vs-All ROC Curves")
lines.roc(roc_poor, col="red")
lines.roc(roc_ex, col="green")
legend("bottomright", legend=c("Poor","Normal","Excellent"),
       col=c("red","blue","green"), lwd=2)

# Metrics for model performance: 

# MCM Graph: 
posterior_summary(fit_ord, pars = "^b_Intercept")
## Warning: Argument 'pars' is deprecated. Please use 'variable' instead.
##                Estimate Est.Error      Q2.5     Q97.5
## b_Intercept[1] 2.970277  2.728673 -2.241332  8.341163
## b_Intercept[2] 9.342572  2.764952  3.957856 14.831309
# 95% inner band, 99% outer band (avoids the prob/prob_outer warning)
mcmc_intervals(
  fit_ord,
  regex_pars = "^b_Intercept",
  prob = 0.95,
  prob_outer = 0.99
)

## After a quick review of the confusion matrix, we can see that the oridinal bayes model is unable to correctly model and of the poor wines in the test set. We consider an application which considers a Class-weighted likelihood, to try midgate the dominance of the normal set over the poor and excellent wines. 

Smoothed Bayesian Ordinal regression Model

##Relaxing proportional odds, another method to try prevent normal class dominating the others:
fit_cs <- brm(
  quality ~ cs(volatile_acidity) + alcohol + sulphates + citric_acid + p_h,
  data = wine_train, family = cumulative("logit"),
  prior = pri, chains = 4, iter = 2000, backend = "cmdstanr"
)
## Warning: Category specific effects for this family should be considered
## experimental and may have convergence issues.
## Start sampling
## Running MCMC with 4 sequential chains...
## 
## Chain 1 Iteration:    1 / 2000 [  0%]  (Warmup) 
## Chain 1 Iteration:  100 / 2000 [  5%]  (Warmup) 
## Chain 1 Iteration:  200 / 2000 [ 10%]  (Warmup) 
## Chain 1 Iteration:  300 / 2000 [ 15%]  (Warmup) 
## Chain 1 Iteration:  400 / 2000 [ 20%]  (Warmup) 
## Chain 1 Iteration:  500 / 2000 [ 25%]  (Warmup) 
## Chain 1 Iteration:  600 / 2000 [ 30%]  (Warmup) 
## Chain 1 Iteration:  700 / 2000 [ 35%]  (Warmup) 
## Chain 1 Iteration:  800 / 2000 [ 40%]  (Warmup) 
## Chain 1 Iteration:  900 / 2000 [ 45%]  (Warmup) 
## Chain 1 Iteration: 1000 / 2000 [ 50%]  (Warmup) 
## Chain 1 Iteration: 1001 / 2000 [ 50%]  (Sampling) 
## Chain 1 Iteration: 1100 / 2000 [ 55%]  (Sampling) 
## Chain 1 Iteration: 1200 / 2000 [ 60%]  (Sampling) 
## Chain 1 Iteration: 1300 / 2000 [ 65%]  (Sampling) 
## Chain 1 Iteration: 1400 / 2000 [ 70%]  (Sampling) 
## Chain 1 Iteration: 1500 / 2000 [ 75%]  (Sampling) 
## Chain 1 Iteration: 1600 / 2000 [ 80%]  (Sampling) 
## Chain 1 Iteration: 1700 / 2000 [ 85%]  (Sampling) 
## Chain 1 Iteration: 1800 / 2000 [ 90%]  (Sampling) 
## Chain 1 Iteration: 1900 / 2000 [ 95%]  (Sampling) 
## Chain 1 Iteration: 2000 / 2000 [100%]  (Sampling) 
## Chain 1 finished in 49.2 seconds.
## Chain 2 Rejecting initial value:
## Chain 2   Log probability evaluates to log(0), i.e. negative infinity.
## Chain 2   Stan can't start sampling from this initial value.
## Chain 2 Iteration:    1 / 2000 [  0%]  (Warmup) 
## Chain 2 Iteration:  100 / 2000 [  5%]  (Warmup) 
## Chain 2 Iteration:  200 / 2000 [ 10%]  (Warmup) 
## Chain 2 Iteration:  300 / 2000 [ 15%]  (Warmup) 
## Chain 2 Iteration:  400 / 2000 [ 20%]  (Warmup) 
## Chain 2 Iteration:  500 / 2000 [ 25%]  (Warmup) 
## Chain 2 Iteration:  600 / 2000 [ 30%]  (Warmup) 
## Chain 2 Iteration:  700 / 2000 [ 35%]  (Warmup) 
## Chain 2 Iteration:  800 / 2000 [ 40%]  (Warmup) 
## Chain 2 Iteration:  900 / 2000 [ 45%]  (Warmup) 
## Chain 2 Iteration: 1000 / 2000 [ 50%]  (Warmup) 
## Chain 2 Iteration: 1001 / 2000 [ 50%]  (Sampling) 
## Chain 2 Iteration: 1100 / 2000 [ 55%]  (Sampling) 
## Chain 2 Iteration: 1200 / 2000 [ 60%]  (Sampling) 
## Chain 2 Iteration: 1300 / 2000 [ 65%]  (Sampling) 
## Chain 2 Iteration: 1400 / 2000 [ 70%]  (Sampling) 
## Chain 2 Iteration: 1500 / 2000 [ 75%]  (Sampling) 
## Chain 2 Iteration: 1600 / 2000 [ 80%]  (Sampling) 
## Chain 2 Iteration: 1700 / 2000 [ 85%]  (Sampling) 
## Chain 2 Iteration: 1800 / 2000 [ 90%]  (Sampling) 
## Chain 2 Iteration: 1900 / 2000 [ 95%]  (Sampling) 
## Chain 2 Iteration: 2000 / 2000 [100%]  (Sampling) 
## Chain 2 finished in 45.3 seconds.
## Chain 3 Iteration:    1 / 2000 [  0%]  (Warmup) 
## Chain 3 Iteration:  100 / 2000 [  5%]  (Warmup) 
## Chain 3 Iteration:  200 / 2000 [ 10%]  (Warmup) 
## Chain 3 Iteration:  300 / 2000 [ 15%]  (Warmup) 
## Chain 3 Iteration:  400 / 2000 [ 20%]  (Warmup) 
## Chain 3 Iteration:  500 / 2000 [ 25%]  (Warmup) 
## Chain 3 Iteration:  600 / 2000 [ 30%]  (Warmup) 
## Chain 3 Iteration:  700 / 2000 [ 35%]  (Warmup) 
## Chain 3 Iteration:  800 / 2000 [ 40%]  (Warmup) 
## Chain 3 Iteration:  900 / 2000 [ 45%]  (Warmup) 
## Chain 3 Iteration: 1000 / 2000 [ 50%]  (Warmup) 
## Chain 3 Iteration: 1001 / 2000 [ 50%]  (Sampling) 
## Chain 3 Iteration: 1100 / 2000 [ 55%]  (Sampling) 
## Chain 3 Iteration: 1200 / 2000 [ 60%]  (Sampling) 
## Chain 3 Iteration: 1300 / 2000 [ 65%]  (Sampling) 
## Chain 3 Iteration: 1400 / 2000 [ 70%]  (Sampling) 
## Chain 3 Iteration: 1500 / 2000 [ 75%]  (Sampling) 
## Chain 3 Iteration: 1600 / 2000 [ 80%]  (Sampling) 
## Chain 3 Iteration: 1700 / 2000 [ 85%]  (Sampling) 
## Chain 3 Iteration: 1800 / 2000 [ 90%]  (Sampling) 
## Chain 3 Iteration: 1900 / 2000 [ 95%]  (Sampling) 
## Chain 3 Iteration: 2000 / 2000 [100%]  (Sampling) 
## Chain 3 finished in 44.0 seconds.
## Chain 4 Iteration:    1 / 2000 [  0%]  (Warmup) 
## Chain 4 Iteration:  100 / 2000 [  5%]  (Warmup) 
## Chain 4 Iteration:  200 / 2000 [ 10%]  (Warmup) 
## Chain 4 Iteration:  300 / 2000 [ 15%]  (Warmup) 
## Chain 4 Iteration:  400 / 2000 [ 20%]  (Warmup) 
## Chain 4 Iteration:  500 / 2000 [ 25%]  (Warmup) 
## Chain 4 Iteration:  600 / 2000 [ 30%]  (Warmup) 
## Chain 4 Iteration:  700 / 2000 [ 35%]  (Warmup) 
## Chain 4 Iteration:  800 / 2000 [ 40%]  (Warmup) 
## Chain 4 Iteration:  900 / 2000 [ 45%]  (Warmup) 
## Chain 4 Iteration: 1000 / 2000 [ 50%]  (Warmup) 
## Chain 4 Iteration: 1001 / 2000 [ 50%]  (Sampling) 
## Chain 4 Iteration: 1100 / 2000 [ 55%]  (Sampling) 
## Chain 4 Iteration: 1200 / 2000 [ 60%]  (Sampling) 
## Chain 4 Iteration: 1300 / 2000 [ 65%]  (Sampling) 
## Chain 4 Iteration: 1400 / 2000 [ 70%]  (Sampling) 
## Chain 4 Iteration: 1500 / 2000 [ 75%]  (Sampling) 
## Chain 4 Iteration: 1600 / 2000 [ 80%]  (Sampling) 
## Chain 4 Iteration: 1700 / 2000 [ 85%]  (Sampling) 
## Chain 4 Iteration: 1800 / 2000 [ 90%]  (Sampling) 
## Chain 4 Iteration: 1900 / 2000 [ 95%]  (Sampling) 
## Chain 4 Iteration: 2000 / 2000 [100%]  (Sampling) 
## Chain 4 finished in 45.0 seconds.
## 
## All 4 chains finished successfully.
## Mean chain execution time: 45.9 seconds.
## Total execution time: 184.0 seconds.
## Warning: Category specific effects for this family should be considered
## experimental and may have convergence issues.
# Assessing models fit: 
model_b<-summary(fit_cs)                     
## Warning: Category specific effects for this family should be considered
## experimental and may have convergence issues.
summary(fit_cs)      
## Warning: Category specific effects for this family should be considered
## experimental and may have convergence issues.
##  Family: cumulative 
##   Links:
## Warning: Category specific effects for this family should be considered
## experimental and may have convergence issues.
## mu = logit 
## Formula: quality ~ cs(volatile_acidity) + alcohol + sulphates + citric_acid + p_h 
##    Data: wine_train (Number of observations: 1119) 
##   Draws: 4 chains, each with iter = 2000; warmup = 1000; thin = 1;
##          total post-warmup draws = 4000
## 
## Regression Coefficients:
##                     Estimate Est.Error l-95% CI u-95% CI Rhat Bulk_ESS Tail_ESS
## Intercept[1]            1.95      2.01    -1.95     5.90 1.00     3841     2811
## Intercept[2]            7.80      2.03     3.88    11.82 1.00     3798     2685
## alcohol                 0.86      0.09     0.69     1.03 1.00     4054     3048
## sulphates               2.05      0.45     1.15     2.90 1.00     4791     2490
## citric_acid             0.84      0.50    -0.13     1.83 1.00     3342     2916
## p_h                    -1.17      0.57    -2.26    -0.04 1.00     3459     3162
## volatile_acidity[1]    -1.86      0.67    -3.18    -0.57 1.00     2195     2648
## volatile_acidity[2]    -2.52      0.58    -3.68    -1.40 1.00     3523     2660
## 
## Further Distributional Parameters:
##      Estimate Est.Error l-95% CI u-95% CI Rhat Bulk_ESS Tail_ESS
## disc     1.00      0.00     1.00     1.00   NA       NA       NA
## 
## Draws were sampled using sample(hmc). For each parameter, Bulk_ESS
## and Tail_ESS are effective sample size measures, and Rhat is the potential
## scale reduction factor on split chains (at convergence, Rhat = 1).
bayes_R2(fit_cs)
## Warning: Category specific effects for this family should be considered
## experimental and may have convergence issues.
## Warning: Category specific effects for this family should be considered
## experimental and may have convergence issues.
## Warning: Predictions are treated as continuous variables in 'bayes_R2' which is
## likely invalid for ordinal families.
## Warning: Category specific effects for this family should be considered
## experimental and may have convergence issues.
## Warning: Category specific effects for this family should be considered
## experimental and may have convergence issues.
## Warning: Category specific effects for this family should be considered
## experimental and may have convergence issues.
## Warning: Category specific effects for this family should be considered
## experimental and may have convergence issues.
## Warning: Category specific effects for this family should be considered
## experimental and may have convergence issues.
## Warning: Category specific effects for this family should be considered
## experimental and may have convergence issues.
##    Estimate  Est.Error      Q2.5     Q97.5
## R2 0.201218 0.02247582 0.1559278 0.2444526
pp_check(fit_cs, type = "bars")    
## Warning: Category specific effects for this family should be considered
## experimental and may have convergence issues.
## Warning: Category specific effects for this family should be considered
## experimental and may have convergence issues.
## Using 10 posterior draws for ppc type 'bars' by default.
## Warning: Category specific effects for this family should be considered
## experimental and may have convergence issues.
## Warning: Category specific effects for this family should be considered
## experimental and may have convergence issues.
## Warning: Category specific effects for this family should be considered
## experimental and may have convergence issues.
## Warning: Category specific effects for this family should be considered
## experimental and may have convergence issues.
## Warning: Category specific effects for this family should be considered
## experimental and may have convergence issues.
## Warning: Category specific effects for this family should be considered
## experimental and may have convergence issues.

pp_check(fit_cs, type = "bars_grouped", group = "quality")
## Warning: Category specific effects for this family should be considered
## experimental and may have convergence issues.
## Warning: Category specific effects for this family should be considered
## experimental and may have convergence issues.
## Using 10 posterior draws for ppc type 'bars_grouped' by default.
## Warning: Category specific effects for this family should be considered
## experimental and may have convergence issues.
## Warning: Category specific effects for this family should be considered
## experimental and may have convergence issues.
## Warning: Category specific effects for this family should be considered
## experimental and may have convergence issues.
## Warning: Category specific effects for this family should be considered
## experimental and may have convergence issues.
## Warning: Category specific effects for this family should be considered
## experimental and may have convergence issues.
## Warning: Category specific effects for this family should be considered
## experimental and may have convergence issues.

Predictive Power of the Model

labs <- c("Poor","Normal","Excellent")  # ensure this matches your factor order

# Draws x N matrix of category indices (1..K)
pp <- posterior_predict(fit_cs, newdata = wine_test)  
## Warning: Category specific effects for this family should be considered
## experimental and may have convergence issues.
## Warning: Category specific effects for this family should be considered
## experimental and may have convergence issues.
## Warning: Category specific effects for this family should be considered
## experimental and may have convergence issues.
## Warning: Category specific effects for this family should be considered
## experimental and may have convergence issues.
## Warning: Category specific effects for this family should be considered
## experimental and may have convergence issues.
## Warning: Category specific effects for this family should be considered
## experimental and may have convergence issues.
## Warning: Category specific effects for this family should be considered
## experimental and may have convergence issues.
# N x K matrix of posterior mean probs
probs <- t(apply(pp, 2, function(v) prop.table(tabulate(v, nbins = length(labs)))))
# Hard labels via argmax
pred_lab <- factor(labs[max.col(probs, ties.method = "first")], levels = labs, ordered = TRUE)

# Confusion + accuracy
table(Truth = wine_test$quality, Pred = pred_lab)
##            Pred
## Truth       Poor Normal Excellent
##   Poor         0     19         0
##   Normal       0    388         8
##   Excellent    0     52        13
mean(pred_lab == wine_test$quality)
## [1] 0.8354167
pred <- factor(pred_lab, levels = levels(wine_test$quality))
ref  <- factor(wine_test$quality, levels = levels(wine_test$quality))
confusionMatrix(pred, ref)
## Confusion Matrix and Statistics
## 
##            Reference
## Prediction  Poor Normal Excellent
##   Poor         0      0         0
##   Normal      19    388        52
##   Excellent    0      8        13
## 
## Overall Statistics
##                                           
##                Accuracy : 0.8354          
##                  95% CI : (0.7992, 0.8675)
##     No Information Rate : 0.825           
##     P-Value [Acc > NIR] : 0.2976          
##                                           
##                   Kappa : 0.1978          
##                                           
##  Mcnemar's Test P-Value : NA              
## 
## Statistics by Class:
## 
##                      Class: Poor Class: Normal Class: Excellent
## Sensitivity              0.00000        0.9798          0.20000
## Specificity              1.00000        0.1548          0.98072
## Pos Pred Value               NaN        0.8453          0.61905
## Neg Pred Value           0.96042        0.6190          0.88671
## Prevalence               0.03958        0.8250          0.13542
## Detection Rate           0.00000        0.8083          0.02708
## Detection Prevalence     0.00000        0.9563          0.04375
## Balanced Accuracy        0.50000        0.5673          0.59036
# ROC for the three classes
roc_poor <- roc(wine_test$quality == "Poor", probs[, 1])
## Setting levels: control = FALSE, case = TRUE
## Setting direction: controls < cases
roc_norm <- roc(wine_test$quality == "Normal", probs[, 2])
## Setting levels: control = FALSE, case = TRUE
## Setting direction: controls < cases
roc_ex   <- roc(wine_test$quality == "Excellent", probs[, 3])
## Setting levels: control = FALSE, case = TRUE
## Setting direction: controls < cases
auc(roc_poor); auc(roc_norm); auc(roc_ex)
## Area under the curve: 0.7884
## Area under the curve: 0.751
## Area under the curve: 0.8575
plot.roc(roc_norm, col="blue", main="One-vs-All ROC Curves")
lines.roc(roc_poor, col="red")
lines.roc(roc_ex, col="green")
legend("bottomright", legend=c("Poor","Normal","Excellent"),
       col=c("red","blue","green"), lwd=2)

# Comparing Models 
loo_compare(loo(fit_cs), loo(fit_ord))
## Warning: Category specific effects for this family should be considered
## experimental and may have convergence issues.
## Warning: Category specific effects for this family should be considered
## experimental and may have convergence issues.
## Warning: Category specific effects for this family should be considered
## experimental and may have convergence issues.
## Warning: Category specific effects for this family should be considered
## experimental and may have convergence issues.
##         elpd_diff se_diff
## fit_ord  0.0       0.0   
## fit_cs  -3.8       2.7
# Since no increase in model predictive power under the the smoothed model, we will use the simplier model. 
  # Output of the first model confusion Matrix: 
  
# Confusion Matrix Graphic 
# Draws x N matrix of category indices (1..K)
pp <- posterior_predict(fit_ord, newdata = wine_test)  

# N x K matrix of posterior mean probs
probs <- t(apply(pp, 2, function(v) prop.table(tabulate(v, nbins = length(labs)))))
# Hard labels via argmax
pred_lab <- factor(labs[max.col(probs, ties.method = "first")], levels = labs, ordered = TRUE)

# Confusion + accuracy
table(Truth = wine_test$quality, Pred = pred_lab)
##            Pred
## Truth       Poor Normal Excellent
##   Poor         0     19         0
##   Normal       0    384        12
##   Excellent    0     52        13
pred <- factor(pred_lab, levels = levels(wine_test$quality))
ref  <- factor(wine_test$quality, levels = levels(wine_test$quality))

cm<- confusionMatrix(pred, ref)
cm_table <- as.table(cm$table)
cm_df <- melt(cm_table)
colnames(cm_df) <- c("True", "Predicted", "Count")

ggplot(cm_df, aes(x = Predicted, y = True, fill = Count)) +
  geom_tile(color = "white") +
  geom_text(aes(label = Count), color = "black", size = 4) +
  scale_fill_viridis_c() +
  theme_minimal() +
  labs(title = "Confusion Matrix — Bayesian Ordinal Regression",
       x = "Predicted Class", y = "True Class")

Prior Sensitivity Analysis

# Baseline (used in above fit_ord model)
pri_base <- c(
  set_prior("normal(0,1)", class="b"),
  set_prior("student_t(3,0,2.5)", class="Intercept")
)

# Smaller variation from baseline prior
pri_tight <- c(
  set_prior("normal(0,0.5)", class="b"),
  set_prior("student_t(3,0,2.5)", class="Intercept")
)
# more variation from baseline prior
pri_wide <- c(
  set_prior("normal(0,2)", class="b"),
  set_prior("student_t(3,0,5)", class="Intercept")
)
# Sparsity-favoring (shrink weak predictors)
pri_hs <- c(
  set_prior("horseshoe(2)", class="b"),
  set_prior("student_t(3,0,2.5)", class="Intercept")
)
# To Improve efficiency (not having to write out same code for) 
fit_base <- fit_ord; fit_base
##  Family: cumulative 
##   Links: mu = logit 
## Formula: quality ~ fixed_acidity + volatile_acidity + citric_acid + residual_sugar + chlorides + free_sulfur_dioxide + total_sulfur_dioxide + density + p_h + sulphates + alcohol 
##    Data: wine_train (Number of observations: 1119) 
##   Draws: 4 chains, each with iter = 2000; warmup = 1000; thin = 1;
##          total post-warmup draws = 4000
## 
## Regression Coefficients:
##                      Estimate Est.Error l-95% CI u-95% CI Rhat Bulk_ESS
## Intercept[1]             2.97      2.73    -2.24     8.34 1.00     5469
## Intercept[2]             9.34      2.76     3.96    14.83 1.00     5266
## fixed_acidity            0.11      0.07    -0.03     0.24 1.00     3985
## volatile_acidity        -2.74      0.51    -3.73    -1.73 1.00     4755
## citric_acid              0.38      0.57    -0.77     1.46 1.00     4235
## residual_sugar          -0.01      0.06    -0.13     0.11 1.00     5914
## chlorides               -1.53      0.87    -3.21     0.18 1.00     6568
## free_sulfur_dioxide      0.02      0.01    -0.01     0.04 1.00     4957
## total_sulfur_dioxide    -0.01      0.00    -0.01     0.00 1.00     4629
## density                 -0.01      0.99    -1.97     1.90 1.00     6828
## p_h                     -0.82      0.64    -2.08     0.45 1.00     5061
## sulphates                2.20      0.46     1.30     3.08 1.00     5544
## alcohol                  0.84      0.09     0.67     1.03 1.00     4307
##                      Tail_ESS
## Intercept[1]             3197
## Intercept[2]             2967
## fixed_acidity            3416
## volatile_acidity         3085
## citric_acid              3305
## residual_sugar           3189
## chlorides                3082
## free_sulfur_dioxide      3169
## total_sulfur_dioxide     3054
## density                  2794
## p_h                      2872
## sulphates                3193
## alcohol                  3036
## 
## Further Distributional Parameters:
##      Estimate Est.Error l-95% CI u-95% CI Rhat Bulk_ESS Tail_ESS
## disc     1.00      0.00     1.00     1.00   NA       NA       NA
## 
## Draws were sampled using sample(hmc). For each parameter, Bulk_ESS
## and Tail_ESS are effective sample size measures, and Rhat is the potential
## scale reduction factor on split chains (at convergence, Rhat = 1).
# Smaller-variance prior (refit happens because prior changed)
fit_tight <- update(
  fit_base,
  prior = pri_tight,
  seed = 124,
  chains = 4,
  iter = 4000,
  refresh = 0,
  recompile = TRUE,   
  init = "random"    
)
## Start sampling
## Running MCMC with 4 sequential chains...
## 
## Chain 1 finished in 27.2 seconds.
## Chain 2 finished in 30.1 seconds.
## Chain 3 finished in 30.4 seconds.
## Chain 4 finished in 27.4 seconds.
## 
## All 4 chains finished successfully.
## Mean chain execution time: 28.8 seconds.
## Total execution time: 115.6 seconds.
# Wider variance Prior 
fit_wide <- update(
  fit_base,
  prior = pri_wide,
  seed = 125,
  chains = 4,
  iter = 4000,
  refresh = 0,
  recompile = TRUE,
  inits = "random"
)
## Warning: Argument 'inits' is deprecated. Please use argument 'init' instead.
## Start sampling
## Running MCMC with 4 sequential chains...
## 
## Chain 1 finished in 38.1 seconds.
## Chain 2 finished in 41.7 seconds.
## Chain 3 finished in 35.6 seconds.
## Chain 4 finished in 38.8 seconds.
## 
## All 4 chains finished successfully.
## Mean chain execution time: 38.5 seconds.
## Total execution time: 154.8 seconds.
# Horse Shoe Prior 
fit_hs <- update(
  fit_base,
  prior = pri_hs,
  seed = 126,
  chains = 4,
  iter = 4000,
  refresh = 0,
  recompile = TRUE,
  inits = "random"
)
## Warning: Argument 'inits' is deprecated. Please use argument 'init' instead.
## Start sampling
## Running MCMC with 4 sequential chains...
## 
## Chain 1 finished in 178.8 seconds.
## Chain 2 finished in 147.9 seconds.
## Chain 3 finished in 322.2 seconds.
## Chain 4 finished in 210.7 seconds.
## 
## All 4 chains finished successfully.
## Mean chain execution time: 214.9 seconds.
## Total execution time: 860.2 seconds.
## Warning: 2433 of 8000 (30.0%) transitions ended with a divergence.
## See https://mc-stan.org/misc/warnings for details.
## Warning: 1 of 8000 (0.0%) transitions hit the maximum treedepth limit of 10.
## See https://mc-stan.org/misc/warnings for details.
## Sanity Check fromm above: 
# Do the Priors actually differ? 
prior_summary(fit_base); prior_summary(fit_tight)
##               prior     class                 coef group resp dpar nlpar lb ub
##         normal(0,1)         b                                                 
##         normal(0,1)         b              alcohol                            
##         normal(0,1)         b            chlorides                            
##         normal(0,1)         b          citric_acid                            
##         normal(0,1)         b              density                            
##         normal(0,1)         b        fixed_acidity                            
##         normal(0,1)         b  free_sulfur_dioxide                            
##         normal(0,1)         b                  p_h                            
##         normal(0,1)         b       residual_sugar                            
##         normal(0,1)         b            sulphates                            
##         normal(0,1)         b total_sulfur_dioxide                            
##         normal(0,1)         b     volatile_acidity                            
##  student_t(3,0,2.5) Intercept                                                 
##  student_t(3,0,2.5) Intercept                    1                            
##  student_t(3,0,2.5) Intercept                    2                            
##  tag       source
##              user
##      (vectorized)
##      (vectorized)
##      (vectorized)
##      (vectorized)
##      (vectorized)
##      (vectorized)
##      (vectorized)
##      (vectorized)
##      (vectorized)
##      (vectorized)
##      (vectorized)
##              user
##      (vectorized)
##      (vectorized)
##               prior     class                 coef group resp dpar nlpar lb ub
##       normal(0,0.5)         b                                                 
##       normal(0,0.5)         b              alcohol                            
##       normal(0,0.5)         b            chlorides                            
##       normal(0,0.5)         b          citric_acid                            
##       normal(0,0.5)         b              density                            
##       normal(0,0.5)         b        fixed_acidity                            
##       normal(0,0.5)         b  free_sulfur_dioxide                            
##       normal(0,0.5)         b                  p_h                            
##       normal(0,0.5)         b       residual_sugar                            
##       normal(0,0.5)         b            sulphates                            
##       normal(0,0.5)         b total_sulfur_dioxide                            
##       normal(0,0.5)         b     volatile_acidity                            
##  student_t(3,0,2.5) Intercept                                                 
##  student_t(3,0,2.5) Intercept                    1                            
##  student_t(3,0,2.5) Intercept                    2                            
##  tag       source
##              user
##      (vectorized)
##      (vectorized)
##      (vectorized)
##      (vectorized)
##      (vectorized)
##      (vectorized)
##      (vectorized)
##      (vectorized)
##      (vectorized)
##      (vectorized)
##      (vectorized)
##              user
##      (vectorized)
##      (vectorized)
# Posteriors changed, will be False if they've changed. 
all.equal(
  posterior_summary(fit_base, pars="^b_"),
  posterior_summary(fit_tight, pars="^b_")
)  
## Warning: Argument 'pars' is deprecated. Please use 'variable' instead.
## Warning: Argument 'pars' is deprecated. Please use 'variable' instead.
## [1] "Mean relative difference: 0.3166304"
loo_base  <- loo(fit_base,  reloo = TRUE)
## No problematic observations found. Returning the original 'loo' object.
loo_tight <- loo(fit_tight, reloo = TRUE)
## No problematic observations found. Returning the original 'loo' object.
loo_hs <- loo(fit_hs, reloo = TRUE)
## No problematic observations found. Returning the original 'loo' object.
loo_wide <- loo(fit_wide, reloo = TRUE)
## No problematic observations found. Returning the original 'loo' object.
loo_compare(loo_base, loo_tight, loo_hs, loo_wide)
##           elpd_diff se_diff
## fit_hs      0.0       0.0  
## fit_wide   -0.2       0.7  
## fit_base   -3.9       1.9  
## fit_tight -12.3       4.3
# Horse shoe is the best model

# Inspect raw elpd_loo estimates (avoid rounding hiding tiny diffs)
c(base = loo_base$estimates[1,1],
  tight = loo_tight$estimates[1,1], 
  hs = loo_hs$estimates[1, 1], 
  wide = loo_wide$estimates[1, 1])
##      base     tight        hs      wide 
## -499.9911 -508.4596 -496.1311 -496.3388
# Horse shoe achieved the best predictive performance, followed closley by the wider standard normal prior. 

Predictive Power, Horse Prior

pp_check(fit_hs, type = "bars")    
## Using 10 posterior draws for ppc type 'bars' by default.

pp_check(fit_hs, type = "bars_grouped", group = "quality")
## Using 10 posterior draws for ppc type 'bars_grouped' by default.

labs <- c("Poor","Normal","Excellent")  # ensure this matches your factor order

# Draws x N matrix of category indices (1..K)
pp <- posterior_predict(fit_hs, newdata = wine_test)  

# N x K matrix of posterior mean probs
probs <- t(apply(pp, 2, function(v) prop.table(tabulate(v, nbins = length(labs)))))
# Hard labels via argmax
pred_lab <- factor(labs[max.col(probs, ties.method = "first")], levels = labs, ordered = TRUE)

# Confusion + accuracy
table(Truth = wine_test$quality, Pred = pred_lab)
##            Pred
## Truth       Poor Normal Excellent
##   Poor         1     18         0
##   Normal       0    383        13
##   Excellent    0     48        17
mean(pred_lab == wine_test$quality)
## [1] 0.8354167
pred <- factor(pred_lab, levels = levels(wine_test$quality))
ref  <- factor(wine_test$quality, levels = levels(wine_test$quality))
confusionMatrix(pred, ref)
## Confusion Matrix and Statistics
## 
##            Reference
## Prediction  Poor Normal Excellent
##   Poor         1      0         0
##   Normal      18    383        48
##   Excellent    0     13        17
## 
## Overall Statistics
##                                           
##                Accuracy : 0.8354          
##                  95% CI : (0.7992, 0.8675)
##     No Information Rate : 0.825           
##     P-Value [Acc > NIR] : 0.2976          
##                                           
##                   Kappa : 0.251           
##                                           
##  Mcnemar's Test P-Value : NA              
## 
## Statistics by Class:
## 
##                      Class: Poor Class: Normal Class: Excellent
## Sensitivity             0.052632        0.9672          0.26154
## Specificity             1.000000        0.2143          0.96867
## Pos Pred Value          1.000000        0.8530          0.56667
## Neg Pred Value          0.962422        0.5806          0.89333
## Prevalence              0.039583        0.8250          0.13542
## Detection Rate          0.002083        0.7979          0.03542
## Detection Prevalence    0.002083        0.9354          0.06250
## Balanced Accuracy       0.526316        0.5907          0.61511
# ROC for the three classes
roc_poor <- roc(wine_test$quality == "Poor", probs[, 1])
## Setting levels: control = FALSE, case = TRUE
## Setting direction: controls < cases
roc_norm <- roc(wine_test$quality == "Normal", probs[, 2])
## Setting levels: control = FALSE, case = TRUE
## Setting direction: controls < cases
roc_ex   <- roc(wine_test$quality == "Excellent", probs[, 3])
## Setting levels: control = FALSE, case = TRUE
## Setting direction: controls < cases
auc(roc_poor); auc(roc_norm); auc(roc_ex)
## Area under the curve: 0.7929
## Area under the curve: 0.771
## Area under the curve: 0.8552
plot.roc(roc_norm, col="blue", main="One-vs-All ROC Curves")
lines.roc(roc_poor, col="red")
lines.roc(roc_ex, col="green")
legend("bottomright", legend=c("Poor","Normal","Excellent"),
       col=c("red","blue","green"), lwd=2)

# Confusion Matrix Graphic 
# Draws x N matrix of category indices (1..K)
pp <- posterior_predict(fit_hs, newdata = wine_test)  

# N x K matrix of posterior mean probs
probs <- t(apply(pp, 2, function(v) prop.table(tabulate(v, nbins = length(labs)))))
# Hard labels via argmax
pred_lab <- factor(labs[max.col(probs, ties.method = "first")], levels = labs, ordered = TRUE)

# Confusion + accuracy
table(Truth = wine_test$quality, Pred = pred_lab)
##            Pred
## Truth       Poor Normal Excellent
##   Poor         1     18         0
##   Normal       0    382        14
##   Excellent    0     48        17
pred <- factor(pred_lab, levels = levels(wine_test$quality))
ref  <- factor(wine_test$quality, levels = levels(wine_test$quality))

cm<- confusionMatrix(pred, ref)
cm_table <- as.table(cm$table)
cm_df <- melt(cm_table)
colnames(cm_df) <- c("True", "Predicted", "Count")

ggplot(cm_df, aes(x = Predicted, y = True, fill = Count)) +
  geom_tile(color = "white") +
  geom_text(aes(label = Count), color = "black", size = 4) +
  scale_fill_viridis_c() +
  theme_minimal() +
  labs(title = "Confusion Matrix — Horse Shoe Bayesian Ordinal Regression",
       x = "Predicted Class", y = "True Class")