Filtered ML fit and the GDSTM with FRESA.CAD

Here we make use of the FRESA.CAD::filteredfit() function to train ML models with and without GDSTM.

Naive-Bayes (NB) and LASSO models are used in this demo.

This scrip uses FRESA.CAD and mlbench R packages:

knitr::opts_chunk$set(collapse = TRUE, warning = FALSE, message = FALSE,comment = "#>")

library("FRESA.CAD")
Loading required package: Rcpp
Loading required package: stringr
Loading required package: miscTools
Loading required package: Hmisc
Loading required package: lattice
Loading required package: survival
Loading required package: Formula
Loading required package: ggplot2
Registered S3 methods overwritten by 'htmltools':
  method               from         
  print.html           tools:rstudio
  print.shiny.tag      tools:rstudio
  print.shiny.tag.list tools:rstudio
Registered S3 method overwritten by 'htmlwidgets':
  method           from         
  print.htmlwidget tools:rstudio
Registered S3 method overwritten by 'data.table':
  method           from
  print.data.table     

Attaching package: ‘Hmisc’

The following objects are masked from ‘package:base’:

    format.pval, units

Loading required package: pROC
Type 'citation("pROC")' for a citation.

Attaching package: ‘pROC’

The following objects are masked from ‘package:stats’:

    cov, smooth, var
library(mlbench)

op <- par(no.readonly = TRUE)

I’ll load the Sonar data set

data("Sonar", package = "mlbench")
print(table(Sonar$Class))

  M   R 
111  97 

Setting some variables for downstream analysis

studyName = "Sonar"
datasetframe <- Sonar
Outcome <- "Class"

# 50% of subjects for training

trainFraction = 0.5

Setting the Training and Testing sets


tb <- table(datasetframe[,Outcome])
classNames <- unique(datasetframe[,Outcome])

allrowClass <- datasetframe[,Outcome]
names(allrowClass) <- rownames(datasetframe)

trainsize <- trainFraction*min(tb);
trainSamples <- NULL;
for (theClass in classNames)
{
  classSample <- allrowClass[allrowClass == theClass]
  trainSamples <- c(trainSamples,names(classSample[sample(length(classSample),trainsize)]))
}


datasetframe_train <- datasetframe[trainSamples,]
testSamples <- !(rownames(datasetframe) %in% trainSamples)
datasetframe_test <- datasetframe[testSamples,]

outcomes <- datasetframe_train[,Outcome]

pander::pander(table(datasetframe[,Outcome]),caption="All")
All
M R
111 97
pander::pander(table(datasetframe_train[,Outcome]),caption="Training")
Training
M R
48 48
pander::pander(table(datasetframe_test[,Outcome]),caption="Testing")
Testing
M R
63 49

Machine Learning with the filteredFit() function

Train a simple NB and LASSO model on the datasets

In FRESA.CAD all Binary classification task assume that the outcome is 0 and 1.


datasetframe_train[,Outcome] <- 1*(datasetframe_train[,Outcome] == classNames[2])
datasetframe_test[,Outcome] <- 1*(datasetframe_test[,Outcome] == classNames[2])

mNBRaw <- filteredFit(paste(Outcome,"~."),
                   datasetframe_train,
                   fitmethod=NAIVE_BAYES,
                     filtermethod=univariate_KS,
                     filtermethod.control=list(pvalue=0.01,limit= 0),
                     pca=FALSE
                   )

mLASSORaw <- filteredFit(paste(Outcome,"~."),
                   datasetframe_train,
                   fitmethod=LASSO_MIN,
                     filtermethod=univariate_KS,
                     filtermethod.control=list(pvalue=0.20,limit= -1),
                    family = "binomial"
                   )

With PCA

# With PCA
mNBPCA <- filteredFit(paste(Outcome,"~."),
                   datasetframe_train,
                   fitmethod=NAIVE_BAYES,
                     filtermethod=univariate_KS,
                     filtermethod.control=list(pvalue=0.01,limit= 0),
                     pca=TRUE
                   )


mLASSOPCA <- filteredFit(paste(Outcome,"~."),
                   datasetframe_train,
                   fitmethod=LASSO_MIN,
                     filtermethod=univariate_KS,
                     filtermethod.control=list(pvalue=0.20,limit= -1),
                     PCA = TRUE,
                    family = "binomial"
                   )

Now we run filteredFit with the decorrelation set to true and default parameters


mNBDecor <- filteredFit(paste(Outcome,"~."),
                   datasetframe_train,
                    fitmethod=NAIVE_BAYES,
                     filtermethod=univariate_KS,
                     filtermethod.control=list(pvalue=0.01,limit= 0),
                     DECOR = TRUE,
                     pca=FALSE
                   )

mLASSODecor <- filteredFit(paste(Outcome,"~."),
                   datasetframe_train,
                    fitmethod=LASSO_MIN,
                     filtermethod=univariate_KS,
                     filtermethod.control=list(pvalue=0.20,limit= -1),
                     DECOR = TRUE,
                    family = "binomial"
                   )

Decorrelation with parameters: Spearman correlation and Robust Fit.

mNBDecor2 <- filteredFit(paste(Outcome,"~."),
                   datasetframe_train,
                    fitmethod=NAIVE_BAYES,
                     filtermethod=univariate_KS,
                     filtermethod.control=list(pvalue=0.01,limit= 0),
                     DECOR = TRUE,
                     DECOR.control=list(method="spearman",type="RLM"),
                     pca=FALSE
                   )
mLASSODecor2 <- filteredFit(paste(Outcome,"~."),
                   datasetframe_train,
                    fitmethod=LASSO_MIN,
                     filtermethod=univariate_KS,
                     filtermethod.control=list(pvalue=0.20,limit= -1),
                     DECOR = TRUE,
                     DECOR.control=list(method="spearman",type="RLM"),
                    family = "binomial"
                   )

Once we have the transformed testing dataset we can make a side by side comparison of predictions


# Predict the raw testing set
prRAW <- predict(mNBRaw,datasetframe_test)

# Predict with PCA
prPCA <- predict(mNBPCA,datasetframe_test)

# Predict the transformed dataset
prDecor <- predict(mNBDecor,datasetframe_test)

# Predict the transformed dataset spearman
prDecor2 <- predict(mNBDecor2,datasetframe_test)

par(mfrow=c(2,2))
AllRocAUC <- NULL;

classoutcomes <- datasetframe_test[,Outcome]
psRaw <- predictionStats_binary(cbind(classoutcomes,prRAW),
                                "NB Raw",cex=0.75)

NB Raw

pander::pander(psRaw$aucs)
est lower upper
0.8578 0.7881 0.9275
AllRocAUC <- rbind(AllRocAUC,psRaw$aucs)

psPCA <- predictionStats_binary(cbind(classoutcomes,prPCA),
                                "NB PCA",cex=0.75)

NB PCA

pander::pander(psPCA$aucs)
est lower upper
0.8202 0.7397 0.9008
AllRocAUC <- rbind(AllRocAUC,psPCA$aucs)

psDecor <- predictionStats_binary(cbind(classoutcomes,prDecor),
                                "NB GDSTM",cex=0.75)

NB GDSTM

pander::pander(psDecor$aucs)
est lower upper
0.8614 0.7946 0.9281
AllRocAUC <- rbind(AllRocAUC,psDecor$aucs);


psDecor2 <- predictionStats_binary(cbind(classoutcomes,prDecor2),
                                "NB GDSTM Spearman",cex=0.75)

NB GDSTM Spearman

pander::pander(psDecor2$aucs)
est lower upper
0.8915 0.83 0.953
AllRocAUC <- rbind(AllRocAUC,psDecor2$aucs);


psRaw <- predictionStats_binary(cbind(classoutcomes,
                                      predict(mLASSORaw,datasetframe_test)),
                                "LASSO Raw",cex=0.75)

LASSO Raw

pander::pander(psRaw$aucs)
est lower upper
0.803 0.7215 0.8846
AllRocAUC <- rbind(AllRocAUC,psRaw$aucs)

psPCA <- predictionStats_binary(cbind(classoutcomes,
                                      predict(mLASSOPCA,datasetframe_test)),
                                "LASSO PCA",cex=0.75)

LASSO PCA

pander::pander(psPCA$aucs)
est lower upper
0.8335 0.7579 0.9091
AllRocAUC <- rbind(AllRocAUC,psPCA$aucs)

psDecor <- predictionStats_binary(cbind(classoutcomes,
                                        predict(mLASSODecor,datasetframe_test)),
                                "LASSO GDSTM",cex=0.75)

LASSO GDSTM

pander::pander(psDecor$aucs)
est lower upper
0.7849 0.6992 0.8706
AllRocAUC <- rbind(AllRocAUC,psDecor$aucs);


psDecor2 <- predictionStats_binary(cbind(classoutcomes,
                                         predict(mLASSODecor2,datasetframe_test)),
                                "LASSO GDSTM Spearman",cex=0.75)

LASSO GDSTM Spearman

pander::pander(psDecor2$aucs)
est lower upper
0.8115 0.7313 0.8916
AllRocAUC <- rbind(AllRocAUC,psDecor2$aucs);

Comparing ROCAUC


rownames(AllRocAUC) <- c("NB:Raw","NB:PCA","NB:GDSTM_P","NB:GDSTM_S",
                         "LASSO:Raw","LASSO:PCA","LASSO:GDSTM_P","LASSO:GDSTM_S")
pander::pander(AllRocAUC)
  est lower upper
NB:Raw 0.8578 0.7881 0.9275
NB:PCA 0.8202 0.7397 0.9008
NB:GDSTM_P 0.8614 0.7946 0.9281
NB:GDSTM_S 0.8915 0.83 0.953
LASSO:Raw 0.803 0.7215 0.8846
LASSO:PCA 0.8335 0.7579 0.9091
LASSO:GDSTM_P 0.7849 0.6992 0.8706
LASSO:GDSTM_S 0.8115 0.7313 0.8916
bpROCAUC <- barPlotCiError(as.matrix(AllRocAUC),
                          metricname = "ROCAUC",
                          thesets = "ROC AUC",
                          themethod = rownames(AllRocAUC),
                          main = "ROC AUC",
                          offsets = c(0.5,1),
                          scoreDirection = ">",
                          ho=0.5,
                          args.legend = list(bg = "white",x="bottomright",inset=c(0.0,0),cex=0.75),
                          col = terrain.colors(nrow(AllRocAUC))
                          )

Visualization of GDSTM

The GDSTM is stored in the filteredFit() object. Hence, we can analyze and display the matrix.


gplots::heatmap.2(mNBDecor$GDSTM,
                  trace = "none",
                  mar = c(10,10),
                  col=rev(heat.colors(7)),
                  main = paste("GDSTM Matrix (Pearson, LM):",studyName),
                  cexRow = 0.7,
                  cexCol = 0.7,
                  key.title=NA,
                  key.xlab="beta",
                  xlab="GDSTM Feature", ylab="Input Feature")


gplots::heatmap.2(mNBDecor2$GDSTM,
                  trace = "none",
                  mar = c(10,10),
                  col=rev(heat.colors(7)),
                  main = paste("GDSTM Matrix (Spearman, RLM):",studyName),
                  cexRow = 0.7,
                  cexCol = 0.7,
                  key.title=NA,
                  key.xlab="beta",
                  xlab="GDSTM Feature", ylab="Input Feature")

Repeated Holdout Cross-Validation




dataCV <- datasetframe
dataCV[,Outcome] <- 1*(dataCV[,Outcome] == classNames[2])


cvNBRaw <- randomCV(dataCV,
                Outcome,
                fittingFunction= filteredFit,
                classSamplingType = "Ba",
                trainFraction = 0.80,
                repetitions = 100,
                fitmethod=NAIVE_BAYES,
                filtermethod=univariate_KS,
                filtermethod.control=list(pvalue=0.01,limit= 0),
                pca = FALSE
            )

……….10 Tested: 193 Avg. Selected: 17.5 Min Tests: 1 Max Tests: 6 Mean Tests: 2.797927 . MAD: 0.2518187 ……….20 Tested: 205 Avg. Selected: 17 Min Tests: 1 Max Tests: 14 Mean Tests: 5.268293 . MAD: 0.2423145 ……….30 Tested: 208 Avg. Selected: 17.93333 Min Tests: 1 Max Tests: 19 Mean Tests: 7.788462 . MAD: 0.2534458 ……….40 Tested: 208 Avg. Selected: 17.8 Min Tests: 2 Max Tests: 21 Mean Tests: 10.38462 . MAD: 0.2568665 ……….50 Tested: 208 Avg. Selected: 17.54 Min Tests: 3 Max Tests: 24 Mean Tests: 12.98077 . MAD: 0.2585082 ……….60 Tested: 208 Avg. Selected: 17.35 Min Tests: 3 Max Tests: 27 Mean Tests: 15.57692 . MAD: 0.2587307 ……….70 Tested: 208 Avg. Selected: 17.18571 Min Tests: 5 Max Tests: 31 Mean Tests: 18.17308 . MAD: 0.2573407 ……….80 Tested: 208 Avg. Selected: 17.25 Min Tests: 8 Max Tests: 36 Mean Tests: 20.76923 . MAD: 0.258488 ……….90 Tested: 208 Avg. Selected: 17.36667 Min Tests: 8 Max Tests: 40 Mean Tests: 23.36538 . MAD: 0.2601201 ……….100 Tested: 208 Avg. Selected: 17.37 Min Tests: 9 Max Tests: 44 Mean Tests: 25.96154 . MAD: 0.2596599

cvNBPCA <- randomCV(dataCV,
                Outcome,
                trainSampleSets= cvNBRaw$trainSamplesSets,
                fittingFunction= filteredFit,
                fitmethod=NAIVE_BAYES,
                filtermethod=univariate_KS,
                filtermethod.control=list(pvalue=0.01,limit= 0),
                pca = TRUE
            )

……….10 Tested: 193 Avg. Selected: 17.5 Min Tests: 1 Max Tests: 6 Mean Tests: 2.797927 . MAD: 0.296886 ……….20 Tested: 205 Avg. Selected: 17 Min Tests: 1 Max Tests: 14 Mean Tests: 5.268293 . MAD: 0.281058 ……….30 Tested: 208 Avg. Selected: 17.93333 Min Tests: 1 Max Tests: 19 Mean Tests: 7.788462 . MAD: 0.2931561 ……….40 Tested: 208 Avg. Selected: 17.8 Min Tests: 2 Max Tests: 21 Mean Tests: 10.38462 . MAD: 0.2915794 ……….50 Tested: 208 Avg. Selected: 17.54 Min Tests: 3 Max Tests: 24 Mean Tests: 12.98077 . MAD: 0.2927341 ……….60 Tested: 208 Avg. Selected: 17.35 Min Tests: 3 Max Tests: 27 Mean Tests: 15.57692 . MAD: 0.2891591 ……….70 Tested: 208 Avg. Selected: 17.18571 Min Tests: 5 Max Tests: 31 Mean Tests: 18.17308 . MAD: 0.2891342 ……….80 Tested: 208 Avg. Selected: 17.25 Min Tests: 8 Max Tests: 36 Mean Tests: 20.76923 . MAD: 0.2840093 ……….90 Tested: 208 Avg. Selected: 17.36667 Min Tests: 8 Max Tests: 40 Mean Tests: 23.36538 . MAD: 0.2848521 ……….100 Tested: 208 Avg. Selected: 17.37 Min Tests: 9 Max Tests: 44 Mean Tests: 25.96154 . MAD: 0.2819397

cvNBDecor <- randomCV(dataCV,
                Outcome,
                trainSampleSets= cvNBRaw$trainSamplesSets,
                fittingFunction= filteredFit,
                fitmethod=NAIVE_BAYES,
                filtermethod=univariate_KS,
                filtermethod.control=list(pvalue=0.01,limit= 0),
                DECOR = TRUE,
                pca = FALSE
            )

……….10 Tested: 193 Avg. Selected: 11.1 Min Tests: 1 Max Tests: 6 Mean Tests: 2.797927 . MAD: 0.2629977 ……….20 Tested: 205 Avg. Selected: 10.55 Min Tests: 1 Max Tests: 14 Mean Tests: 5.268293 . MAD: 0.2479953 ……..no Fast..[ 1 ][ 1 ]….30 Tested: 208 Avg. Selected: 10.56667 Min Tests: 1 Max Tests: 19 Mean Tests: 7.788462 . MAD: 0.2530434 ……….40 Tested: 208 Avg. Selected: 10.725 Min Tests: 2 Max Tests: 21 Mean Tests: 10.38462 . MAD: 0.2596764 ………..no Fast..[ 1 ][ 1 ]…no Fast..[ 1 ][ 1 ]50 Tested: 208 Avg. Selected: 10.68 Min Tests: 3 Max Tests: 24 Mean Tests: 12.98077 . MAD: 0.2616769 ….no Fast..[ 1 ][ 1 ]….no Fast..[ 1 ][ 1 ]…..no Fast..[ 1 ][ 1 ]….no Fast..[ 1 ][ 1 ].60 Tested: 208 Avg. Selected: 10.61667 Min Tests: 3 Max Tests: 27 Mean Tests: 15.57692 . MAD: 0.2612051 …….no Fast..[ 1 ][ 1 ]…..70 Tested: 208 Avg. Selected: 10.55714 Min Tests: 5 Max Tests: 31 Mean Tests: 18.17308 . MAD: 0.2580412 ……….80 Tested: 208 Avg. Selected: 10.5 Min Tests: 8 Max Tests: 36 Mean Tests: 20.76923 . MAD: 0.2561888 ……….90 Tested: 208 Avg. Selected: 10.43333 Min Tests: 8 Max Tests: 40 Mean Tests: 23.36538 . MAD: 0.2576202 ……..no Fast..[ 1 ][ 1 ]….100 Tested: 208 Avg. Selected: 10.5 Min Tests: 9 Max Tests: 44 Mean Tests: 25.96154 . MAD: 0.2570722

The Aggregated Test Results


par(mfrow=c(1,3))
bpraw <- predictionStats_binary(cvNBRaw$testPredictions,"NB RAW",cex=0.75)

NB RAW

bpPCA <- predictionStats_binary(cvNBPCA$testPredictions,"NB PCA",cex=0.75)

NB PCA

bpdecor <- predictionStats_binary(cvNBDecor$testPredictions,"NB GDSTM",cex=0.75)

NB GDSTM

pander::pander(bpraw$aucs)
est lower upper
0.825 0.7671 0.8829
pander::pander(bpPCA$aucs)
est lower upper
0.8649 0.8151 0.9146
pander::pander(bpdecor$aucs)
est lower upper
0.8444 0.7907 0.8982

Using Feature Interactions.


signedsqrt <- function(x) { return (sign(x)*sqrt(abs(x)))}
data("Sonar", package = "mlbench")
sclass <- Sonar$Class


Sonar <- as.data.frame(model.matrix(Class ~ .*.,Sonar))
Sonar$`(Intercept)` <- NULL
Sonar[,1:ncol(Sonar)] <- sapply(Sonar,as.numeric)

fnames <- colnames(Sonar)
fnames <- str_replace_all(fnames," ","_")
fnames <- str_replace_all(fnames,"/","_")
fnames <- str_replace_all(fnames,":","_x_")
colnames(Sonar) <- fnames
squaredfeatures <- str_detect(fnames,"_x_")

Sonar[,squaredfeatures] <- as.data.frame(apply(Sonar[,squaredfeatures],2,signedsqrt));
Sonar$Class <- sclass

datasetframe <- Sonar

Setting the Training and Testing sets



datasetframe_train <- datasetframe[trainSamples,]
datasetframe_test <- datasetframe[testSamples,]

FI: Machine Learning with the filteredFit() function

Train a simple NB and LASSO model on the datasets

In FRESA.CAD all Binary classification task assume that the outcome is 0 and 1.


datasetframe_train[,Outcome] <- 1*(datasetframe_train[,Outcome] == classNames[2])
datasetframe_test[,Outcome] <- 1*(datasetframe_test[,Outcome] == classNames[2])

mNBRaw <- filteredFit(paste(Outcome,"~."),
                   datasetframe_train,
                   fitmethod=NAIVE_BAYES,
                     filtermethod=univariate_KS,
                     filtermethod.control=list(pvalue=0.01,limit= 0),
                     pca=FALSE
                   )

mLASSORaw <- filteredFit(paste(Outcome,"~."),
                   datasetframe_train,
                   fitmethod=LASSO_MIN,
                     filtermethod=univariate_KS,
                     filtermethod.control=list(pvalue=0.20,limit= -1),
                    family = "binomial"
                   )

With PCA

# With PCA
mNBPCA <- filteredFit(paste(Outcome,"~."),
                   datasetframe_train,
                   fitmethod=NAIVE_BAYES,
                     filtermethod=univariate_KS,
                     filtermethod.control=list(pvalue=0.01,limit= 0),
                     pca=TRUE
                   )


mLASSOPCA <- filteredFit(paste(Outcome,"~."),
                   datasetframe_train,
                   fitmethod=LASSO_MIN,
                     filtermethod=univariate_KS,
                     filtermethod.control=list(pvalue=0.20,limit= -1),
                     PCA = TRUE,
                    family = "binomial"
                   )

Now we run filteredFit with the decorrelation set to true and default parameters


mNBDecor <- filteredFit(paste(Outcome,"~."),
                   datasetframe_train,
                    fitmethod=NAIVE_BAYES,
                     filtermethod=univariate_KS,
                     filtermethod.control=list(pvalue=0.01,limit= 0),
                     DECOR = TRUE,
                     pca=FALSE
                   )

mLASSODecor <- filteredFit(paste(Outcome,"~."),
                   datasetframe_train,
                    fitmethod=LASSO_MIN,
                     filtermethod=univariate_KS,
                     filtermethod.control=list(pvalue=0.20,limit= -1),
                     DECOR = TRUE,
                    family = "binomial"
                   )

Decorrelation with parameters: Spearman correlation and Robust Fit.

mNBDecor2 <- filteredFit(paste(Outcome,"~."),
                   datasetframe_train,
                    fitmethod=NAIVE_BAYES,
                     filtermethod=univariate_KS,
                     filtermethod.control=list(pvalue=0.01,limit= 0),
                     DECOR = TRUE,
                     DECOR.control=list(method="spearman",type="RLM"),
                     pca=FALSE
                   )
mLASSODecor2 <- filteredFit(paste(Outcome,"~."),
                   datasetframe_train,
                    fitmethod=LASSO_MIN,
                     filtermethod=univariate_KS,
                     filtermethod.control=list(pvalue=0.20,limit= -1),
                     DECOR = TRUE,
                     DECOR.control=list(method="spearman",type="RLM"),
                    family = "binomial"
                   )

Once we have the transformed testing dataset we can make a side by side comparison of predictions


# Predict the raw testing set
prRAW <- predict(mNBRaw,datasetframe_test)

# Predict with PCA
prPCA <- predict(mNBPCA,datasetframe_test)

# Predict the transformed dataset
prDecor <- predict(mNBDecor,datasetframe_test)

# Predict the transformed dataset spearman
prDecor2 <- predict(mNBDecor2,datasetframe_test)

par(mfrow=c(2,2))
AllRocAUC <- NULL;

classoutcomes <- datasetframe_test[,Outcome]
psRaw <- predictionStats_binary(cbind(classoutcomes,prRAW),
                                "NB Raw",cex=0.75)

NB Raw

pander::pander(psRaw$aucs)
est lower upper
0.8264 0.7507 0.902
AllRocAUC <- rbind(AllRocAUC,psRaw$aucs)

psPCA <- predictionStats_binary(cbind(classoutcomes,prPCA),
                                "NB PCA",cex=0.75)

NB PCA

pander::pander(psPCA$aucs)
est lower upper
0.6276 0.5204 0.7349
AllRocAUC <- rbind(AllRocAUC,psPCA$aucs)

psDecor <- predictionStats_binary(cbind(classoutcomes,prDecor),
                                "NB GDSTM",cex=0.75)

NB GDSTM

pander::pander(psDecor$aucs)
est lower upper
0.8657 0.797 0.9344
AllRocAUC <- rbind(AllRocAUC,psDecor$aucs);


psDecor2 <- predictionStats_binary(cbind(classoutcomes,prDecor2),
                                "NB GDSTM Spearman",cex=0.75)

NB GDSTM Spearman

pander::pander(psDecor2$aucs)
est lower upper
0.856 0.7838 0.9282
AllRocAUC <- rbind(AllRocAUC,psDecor2$aucs);


psRaw <- predictionStats_binary(cbind(classoutcomes,
                                      predict(mLASSORaw,datasetframe_test)),
                                "LASSO Raw",cex=0.75)

LASSO Raw

pander::pander(psRaw$aucs)
est lower upper
0.8503 0.7756 0.9251
AllRocAUC <- rbind(AllRocAUC,psRaw$aucs)

psPCA <- predictionStats_binary(cbind(classoutcomes,
                                      predict(mLASSOPCA,datasetframe_test)),
                                "LASSO PCA",cex=0.75)

LASSO PCA

pander::pander(psPCA$aucs)
est lower upper
0.84 0.7645 0.9155
AllRocAUC <- rbind(AllRocAUC,psPCA$aucs)

psDecor <- predictionStats_binary(cbind(classoutcomes,
                                        predict(mLASSODecor,datasetframe_test)),
                                "LASSO GDSTM",cex=0.75)

LASSO GDSTM

pander::pander(psDecor$aucs)
est lower upper
0.8623 0.7931 0.9315
AllRocAUC <- rbind(AllRocAUC,psDecor$aucs);


psDecor2 <- predictionStats_binary(cbind(classoutcomes,
                                         predict(mLASSODecor2,datasetframe_test)),
                                "LASSO GDSTM Spearman",cex=0.75)

LASSO GDSTM Spearman

pander::pander(psDecor2$aucs)
est lower upper
0.8465 0.776 0.9169
AllRocAUC <- rbind(AllRocAUC,psDecor2$aucs);

FI: Comparing ROCAUC


rownames(AllRocAUC) <- c("NB:Raw","NB:PCA","NB:GDSTM_P","NB:GDSTM_S",
                         "LASSO:Raw","LASSO:PCA","LASSO:GDSTM_P","LASSO:GDSTM_S")
pander::pander(AllRocAUC)
  est lower upper
NB:Raw 0.8264 0.7507 0.902
NB:PCA 0.6276 0.5204 0.7349
NB:GDSTM_P 0.8657 0.797 0.9344
NB:GDSTM_S 0.856 0.7838 0.9282
LASSO:Raw 0.8503 0.7756 0.9251
LASSO:PCA 0.84 0.7645 0.9155
LASSO:GDSTM_P 0.8623 0.7931 0.9315
LASSO:GDSTM_S 0.8465 0.776 0.9169
bpROCAUC <- barPlotCiError(as.matrix(AllRocAUC),
                          metricname = "ROCAUC",
                          thesets = "ROC AUC",
                          themethod = rownames(AllRocAUC),
                          main = "ROC AUC",
                          offsets = c(0.5,1),
                          scoreDirection = ">",
                          ho=0.5,
                          args.legend = list(bg = "white",x="bottomright",inset=c(0.0,0),cex=0.75),
                          col = terrain.colors(nrow(AllRocAUC))
                          )

---
title: "FCA and the GDSTM"
output: html_notebook
---

## Filtered ML fit and the GDSTM with FRESA.CAD

Here we make use of the **FRESA.CAD::filteredfit()** function to train ML models with and without GDSTM.

Naive-Bayes (NB) and LASSO models are used in this demo.

This scrip uses FRESA.CAD and mlbench R packages:

```{r functions,echo = TRUE }
knitr::opts_chunk$set(collapse = TRUE, warning = FALSE, message = FALSE,comment = "#>")

library("FRESA.CAD")
library(mlbench)

op <- par(no.readonly = TRUE)

```

I'll load the Sonar data set

```{r}
data("Sonar", package = "mlbench")
print(table(Sonar$Class))



```

Setting some variables for downstream analysis

```{r}
studyName = "Sonar"
datasetframe <- Sonar
Outcome <- "Class"

# 50% of subjects for training

trainFraction = 0.5

```

Setting the Training and Testing sets

```{r, results = "asis", warning = FALSE, dpi=600, fig.height= 6.0, fig.width= 8.0}

tb <- table(datasetframe[,Outcome])
classNames <- unique(datasetframe[,Outcome])

allrowClass <- datasetframe[,Outcome]
names(allrowClass) <- rownames(datasetframe)

trainsize <- trainFraction*min(tb);
trainSamples <- NULL;
for (theClass in classNames)
{
  classSample <- allrowClass[allrowClass == theClass]
  trainSamples <- c(trainSamples,names(classSample[sample(length(classSample),trainsize)]))
}


datasetframe_train <- datasetframe[trainSamples,]
testSamples <- !(rownames(datasetframe) %in% trainSamples)
datasetframe_test <- datasetframe[testSamples,]

outcomes <- datasetframe_train[,Outcome]

pander::pander(table(datasetframe[,Outcome]),caption="All")
pander::pander(table(datasetframe_train[,Outcome]),caption="Training")
pander::pander(table(datasetframe_test[,Outcome]),caption="Testing")


```

## Machine Learning with the filteredFit() function

Train a simple NB and LASSO model on the datasets

In FRESA.CAD all Binary classification task assume that the outcome is 0 and 1.

```{r results = "asis", warning = FALSE, dpi=600, fig.height= 6.0, fig.width= 8.0}

datasetframe_train[,Outcome] <- 1*(datasetframe_train[,Outcome] == classNames[2])
datasetframe_test[,Outcome] <- 1*(datasetframe_test[,Outcome] == classNames[2])

mNBRaw <- filteredFit(paste(Outcome,"~."),
                   datasetframe_train,
                   fitmethod=NAIVE_BAYES,
                     filtermethod=univariate_KS,
                     filtermethod.control=list(pvalue=0.01,limit= 0),
                     pca=FALSE
                   )

mLASSORaw <- filteredFit(paste(Outcome,"~."),
                   datasetframe_train,
                   fitmethod=LASSO_MIN,
                     filtermethod=univariate_KS,
                     filtermethod.control=list(pvalue=0.20,limit= -1),
                    family = "binomial"
                   )


```

With PCA

```{r results = "asis", warning = FALSE, dpi=600, fig.height= 6.0, fig.width= 8.0}
# With PCA
mNBPCA <- filteredFit(paste(Outcome,"~."),
                   datasetframe_train,
                   fitmethod=NAIVE_BAYES,
                     filtermethod=univariate_KS,
                     filtermethod.control=list(pvalue=0.01,limit= 0),
                     pca=TRUE
                   )


mLASSOPCA <- filteredFit(paste(Outcome,"~."),
                   datasetframe_train,
                   fitmethod=LASSO_MIN,
                     filtermethod=univariate_KS,
                     filtermethod.control=list(pvalue=0.20,limit= -1),
                     PCA = TRUE,
                    family = "binomial"
                   )
```

Now we run filteredFit with the decorrelation set to true and default parameters

```{r results = "asis", warning = FALSE, dpi=600, fig.height= 6.0, fig.width= 8.0}

mNBDecor <- filteredFit(paste(Outcome,"~."),
                   datasetframe_train,
                    fitmethod=NAIVE_BAYES,
                     filtermethod=univariate_KS,
                     filtermethod.control=list(pvalue=0.01,limit= 0),
                     DECOR = TRUE,
                     pca=FALSE
                   )

mLASSODecor <- filteredFit(paste(Outcome,"~."),
                   datasetframe_train,
                    fitmethod=LASSO_MIN,
                     filtermethod=univariate_KS,
                     filtermethod.control=list(pvalue=0.20,limit= -1),
                     DECOR = TRUE,
                    family = "binomial"
                   )


```

Decorrelation with parameters: Spearman correlation and Robust Fit.

```{r results = "asis", warning = FALSE, dpi=600, fig.height= 6.0, fig.width= 8.0}
mNBDecor2 <- filteredFit(paste(Outcome,"~."),
                   datasetframe_train,
                    fitmethod=NAIVE_BAYES,
                     filtermethod=univariate_KS,
                     filtermethod.control=list(pvalue=0.01,limit= 0),
                     DECOR = TRUE,
                     DECOR.control=list(method="spearman",type="RLM"),
                     pca=FALSE
                   )
mLASSODecor2 <- filteredFit(paste(Outcome,"~."),
                   datasetframe_train,
                    fitmethod=LASSO_MIN,
                     filtermethod=univariate_KS,
                     filtermethod.control=list(pvalue=0.20,limit= -1),
                     DECOR = TRUE,
                     DECOR.control=list(method="spearman",type="RLM"),
                    family = "binomial"
                   )


```

Once we have the transformed testing dataset we can make a side by side comparison of predictions

```{r results = "asis", warning = FALSE, dpi=600, fig.height= 6.0, fig.width= 8.0}

# Predict the raw testing set
prRAW <- predict(mNBRaw,datasetframe_test)

# Predict with PCA
prPCA <- predict(mNBPCA,datasetframe_test)

# Predict the transformed dataset
prDecor <- predict(mNBDecor,datasetframe_test)

# Predict the transformed dataset spearman
prDecor2 <- predict(mNBDecor2,datasetframe_test)

par(mfrow=c(2,2))
AllRocAUC <- NULL;

classoutcomes <- datasetframe_test[,Outcome]
psRaw <- predictionStats_binary(cbind(classoutcomes,prRAW),
                                "NB Raw",cex=0.75)
pander::pander(psRaw$aucs)
AllRocAUC <- rbind(AllRocAUC,psRaw$aucs)

psPCA <- predictionStats_binary(cbind(classoutcomes,prPCA),
                                "NB PCA",cex=0.75)
pander::pander(psPCA$aucs)
AllRocAUC <- rbind(AllRocAUC,psPCA$aucs)

psDecor <- predictionStats_binary(cbind(classoutcomes,prDecor),
                                "NB GDSTM",cex=0.75)
pander::pander(psDecor$aucs)
AllRocAUC <- rbind(AllRocAUC,psDecor$aucs);


psDecor2 <- predictionStats_binary(cbind(classoutcomes,prDecor2),
                                "NB GDSTM Spearman",cex=0.75)
pander::pander(psDecor2$aucs)
AllRocAUC <- rbind(AllRocAUC,psDecor2$aucs);


psRaw <- predictionStats_binary(cbind(classoutcomes,
                                      predict(mLASSORaw,datasetframe_test)),
                                "LASSO Raw",cex=0.75)
pander::pander(psRaw$aucs)
AllRocAUC <- rbind(AllRocAUC,psRaw$aucs)

psPCA <- predictionStats_binary(cbind(classoutcomes,
                                      predict(mLASSOPCA,datasetframe_test)),
                                "LASSO PCA",cex=0.75)
pander::pander(psPCA$aucs)
AllRocAUC <- rbind(AllRocAUC,psPCA$aucs)

psDecor <- predictionStats_binary(cbind(classoutcomes,
                                        predict(mLASSODecor,datasetframe_test)),
                                "LASSO GDSTM",cex=0.75)
pander::pander(psDecor$aucs)
AllRocAUC <- rbind(AllRocAUC,psDecor$aucs);


psDecor2 <- predictionStats_binary(cbind(classoutcomes,
                                         predict(mLASSODecor2,datasetframe_test)),
                                "LASSO GDSTM Spearman",cex=0.75)
pander::pander(psDecor2$aucs)
AllRocAUC <- rbind(AllRocAUC,psDecor2$aucs);

```

## Comparing ROCAUC

```{r results = "asis", warning = FALSE, dpi=600, fig.height= 6.0, fig.width= 8.0}

rownames(AllRocAUC) <- c("NB:Raw","NB:PCA","NB:GDSTM_P","NB:GDSTM_S",
                         "LASSO:Raw","LASSO:PCA","LASSO:GDSTM_P","LASSO:GDSTM_S")
pander::pander(AllRocAUC)
bpROCAUC <- barPlotCiError(as.matrix(AllRocAUC),
                          metricname = "ROCAUC",
                          thesets = "ROC AUC",
                          themethod = rownames(AllRocAUC),
                          main = "ROC AUC",
                          offsets = c(0.5,1),
                          scoreDirection = ">",
                          ho=0.5,
                          args.legend = list(bg = "white",x="bottomright",inset=c(0.0,0),cex=0.75),
                          col = terrain.colors(nrow(AllRocAUC))
                          )

```

## Visualization of GDSTM

The GDSTM is stored in the filteredFit() object. Hence, we can analyze and display the matrix.

```{r results = "asis", warning = FALSE, dpi=600, fig.height= 6.0, fig.width= 8.0}

gplots::heatmap.2(mNBDecor$GDSTM,
                  trace = "none",
                  mar = c(10,10),
                  col=rev(heat.colors(7)),
                  main = paste("GDSTM Matrix (Pearson, LM):",studyName),
                  cexRow = 0.7,
                  cexCol = 0.7,
                  key.title=NA,
                  key.xlab="beta",
                  xlab="GDSTM Feature", ylab="Input Feature")

gplots::heatmap.2(mNBDecor2$GDSTM,
                  trace = "none",
                  mar = c(10,10),
                  col=rev(heat.colors(7)),
                  main = paste("GDSTM Matrix (Spearman, RLM):",studyName),
                  cexRow = 0.7,
                  cexCol = 0.7,
                  key.title=NA,
                  key.xlab="beta",
                  xlab="GDSTM Feature", ylab="Input Feature")
```

## Repeated Holdout Cross-Validation

```{r results = "asis", warning = FALSE, dpi=600, fig.height= 6.0, fig.width= 8.0}



dataCV <- datasetframe
dataCV[,Outcome] <- 1*(dataCV[,Outcome] == classNames[2])


cvNBRaw <- randomCV(dataCV,
                Outcome,
                fittingFunction= filteredFit,
                classSamplingType = "Ba",
                trainFraction = 0.80,
                repetitions = 100,
                fitmethod=NAIVE_BAYES,
                filtermethod=univariate_KS,
                filtermethod.control=list(pvalue=0.01,limit= 0),
                pca = FALSE
            )

cvNBPCA <- randomCV(dataCV,
                Outcome,
                trainSampleSets= cvNBRaw$trainSamplesSets,
                fittingFunction= filteredFit,
                fitmethod=NAIVE_BAYES,
                filtermethod=univariate_KS,
                filtermethod.control=list(pvalue=0.01,limit= 0),
                pca = TRUE
            )

cvNBDecor <- randomCV(dataCV,
                Outcome,
                trainSampleSets= cvNBRaw$trainSamplesSets,
                fittingFunction= filteredFit,
                fitmethod=NAIVE_BAYES,
                filtermethod=univariate_KS,
                filtermethod.control=list(pvalue=0.01,limit= 0),
                DECOR = TRUE,
                pca = FALSE
            )



```

The Aggregated Test Results

```{r results = "asis", warning = FALSE, dpi=600, fig.height= 4.0, fig.width= 8.0}

par(mfrow=c(1,3))
bpraw <- predictionStats_binary(cvNBRaw$testPredictions,"NB RAW",cex=0.75)
bpPCA <- predictionStats_binary(cvNBPCA$testPredictions,"NB PCA",cex=0.75)
bpdecor <- predictionStats_binary(cvNBDecor$testPredictions,"NB GDSTM",cex=0.75)

pander::pander(bpraw$aucs)
pander::pander(bpPCA$aucs)
pander::pander(bpdecor$aucs)
```

## Using Feature Interactions.

```{r}

signedsqrt <- function(x) { return (sign(x)*sqrt(abs(x)))}
data("Sonar", package = "mlbench")
sclass <- Sonar$Class


Sonar <- as.data.frame(model.matrix(Class ~ .*.,Sonar))
Sonar$`(Intercept)` <- NULL
Sonar[,1:ncol(Sonar)] <- sapply(Sonar,as.numeric)

fnames <- colnames(Sonar)
fnames <- str_replace_all(fnames," ","_")
fnames <- str_replace_all(fnames,"/","_")
fnames <- str_replace_all(fnames,":","_x_")
colnames(Sonar) <- fnames
squaredfeatures <- str_detect(fnames,"_x_")

Sonar[,squaredfeatures] <- as.data.frame(apply(Sonar[,squaredfeatures],2,signedsqrt));
Sonar$Class <- sclass

datasetframe <- Sonar

```

Setting the Training and Testing sets

```{r, results = "asis", warning = FALSE, dpi=600, fig.height= 6.0, fig.width= 8.0}


datasetframe_train <- datasetframe[trainSamples,]
datasetframe_test <- datasetframe[testSamples,]



```

### FI: Machine Learning with the filteredFit() function

Train a simple NB and LASSO model on the datasets

In FRESA.CAD all Binary classification task assume that the outcome is 0 and 1.

```{r results = "asis", warning = FALSE, dpi=600, fig.height= 6.0, fig.width= 8.0}

datasetframe_train[,Outcome] <- 1*(datasetframe_train[,Outcome] == classNames[2])
datasetframe_test[,Outcome] <- 1*(datasetframe_test[,Outcome] == classNames[2])

mNBRaw <- filteredFit(paste(Outcome,"~."),
                   datasetframe_train,
                   fitmethod=NAIVE_BAYES,
                     filtermethod=univariate_KS,
                     filtermethod.control=list(pvalue=0.01,limit= 0),
                     pca=FALSE
                   )

mLASSORaw <- filteredFit(paste(Outcome,"~."),
                   datasetframe_train,
                   fitmethod=LASSO_MIN,
                     filtermethod=univariate_KS,
                     filtermethod.control=list(pvalue=0.20,limit= -1),
                    family = "binomial"
                   )


```

With PCA

```{r results = "asis", warning = FALSE, dpi=600, fig.height= 6.0, fig.width= 8.0}
# With PCA
mNBPCA <- filteredFit(paste(Outcome,"~."),
                   datasetframe_train,
                   fitmethod=NAIVE_BAYES,
                     filtermethod=univariate_KS,
                     filtermethod.control=list(pvalue=0.01,limit= 0),
                     pca=TRUE
                   )


mLASSOPCA <- filteredFit(paste(Outcome,"~."),
                   datasetframe_train,
                   fitmethod=LASSO_MIN,
                     filtermethod=univariate_KS,
                     filtermethod.control=list(pvalue=0.20,limit= -1),
                     PCA = TRUE,
                    family = "binomial"
                   )
```

Now we run filteredFit with the decorrelation set to true and default parameters

```{r results = "asis", warning = FALSE, dpi=600, fig.height= 6.0, fig.width= 8.0}

mNBDecor <- filteredFit(paste(Outcome,"~."),
                   datasetframe_train,
                    fitmethod=NAIVE_BAYES,
                     filtermethod=univariate_KS,
                     filtermethod.control=list(pvalue=0.01,limit= 0),
                     DECOR = TRUE,
                     pca=FALSE
                   )

mLASSODecor <- filteredFit(paste(Outcome,"~."),
                   datasetframe_train,
                    fitmethod=LASSO_MIN,
                     filtermethod=univariate_KS,
                     filtermethod.control=list(pvalue=0.20,limit= -1),
                     DECOR = TRUE,
                    family = "binomial"
                   )


```

Decorrelation with parameters: Spearman correlation and Robust Fit.

```{r results = "asis", warning = FALSE, dpi=600, fig.height= 6.0, fig.width= 8.0}
mNBDecor2 <- filteredFit(paste(Outcome,"~."),
                   datasetframe_train,
                    fitmethod=NAIVE_BAYES,
                     filtermethod=univariate_KS,
                     filtermethod.control=list(pvalue=0.01,limit= 0),
                     DECOR = TRUE,
                     DECOR.control=list(method="spearman",type="RLM"),
                     pca=FALSE
                   )
mLASSODecor2 <- filteredFit(paste(Outcome,"~."),
                   datasetframe_train,
                    fitmethod=LASSO_MIN,
                     filtermethod=univariate_KS,
                     filtermethod.control=list(pvalue=0.20,limit= -1),
                     DECOR = TRUE,
                     DECOR.control=list(method="spearman",type="RLM"),
                    family = "binomial"
                   )


```

Once we have the transformed testing dataset we can make a side by side comparison of predictions

```{r results = "asis", warning = FALSE, dpi=600, fig.height= 6.0, fig.width= 8.0}

# Predict the raw testing set
prRAW <- predict(mNBRaw,datasetframe_test)

# Predict with PCA
prPCA <- predict(mNBPCA,datasetframe_test)

# Predict the transformed dataset
prDecor <- predict(mNBDecor,datasetframe_test)

# Predict the transformed dataset spearman
prDecor2 <- predict(mNBDecor2,datasetframe_test)

par(mfrow=c(2,2))
AllRocAUC <- NULL;

classoutcomes <- datasetframe_test[,Outcome]
psRaw <- predictionStats_binary(cbind(classoutcomes,prRAW),
                                "NB Raw",cex=0.75)
pander::pander(psRaw$aucs)
AllRocAUC <- rbind(AllRocAUC,psRaw$aucs)

psPCA <- predictionStats_binary(cbind(classoutcomes,prPCA),
                                "NB PCA",cex=0.75)
pander::pander(psPCA$aucs)
AllRocAUC <- rbind(AllRocAUC,psPCA$aucs)

psDecor <- predictionStats_binary(cbind(classoutcomes,prDecor),
                                "NB GDSTM",cex=0.75)
pander::pander(psDecor$aucs)
AllRocAUC <- rbind(AllRocAUC,psDecor$aucs);


psDecor2 <- predictionStats_binary(cbind(classoutcomes,prDecor2),
                                "NB GDSTM Spearman",cex=0.75)
pander::pander(psDecor2$aucs)
AllRocAUC <- rbind(AllRocAUC,psDecor2$aucs);


psRaw <- predictionStats_binary(cbind(classoutcomes,
                                      predict(mLASSORaw,datasetframe_test)),
                                "LASSO Raw",cex=0.75)
pander::pander(psRaw$aucs)
AllRocAUC <- rbind(AllRocAUC,psRaw$aucs)

psPCA <- predictionStats_binary(cbind(classoutcomes,
                                      predict(mLASSOPCA,datasetframe_test)),
                                "LASSO PCA",cex=0.75)
pander::pander(psPCA$aucs)
AllRocAUC <- rbind(AllRocAUC,psPCA$aucs)

psDecor <- predictionStats_binary(cbind(classoutcomes,
                                        predict(mLASSODecor,datasetframe_test)),
                                "LASSO GDSTM",cex=0.75)
pander::pander(psDecor$aucs)
AllRocAUC <- rbind(AllRocAUC,psDecor$aucs);


psDecor2 <- predictionStats_binary(cbind(classoutcomes,
                                         predict(mLASSODecor2,datasetframe_test)),
                                "LASSO GDSTM Spearman",cex=0.75)
pander::pander(psDecor2$aucs)
AllRocAUC <- rbind(AllRocAUC,psDecor2$aucs);

```

## FI: Comparing ROCAUC

```{r results = "asis", warning = FALSE, dpi=600, fig.height= 6.0, fig.width= 8.0}

rownames(AllRocAUC) <- c("NB:Raw","NB:PCA","NB:GDSTM_P","NB:GDSTM_S",
                         "LASSO:Raw","LASSO:PCA","LASSO:GDSTM_P","LASSO:GDSTM_S")
pander::pander(AllRocAUC)
bpROCAUC <- barPlotCiError(as.matrix(AllRocAUC),
                          metricname = "ROCAUC",
                          thesets = "ROC AUC",
                          themethod = rownames(AllRocAUC),
                          main = "ROC AUC",
                          offsets = c(0.5,1),
                          scoreDirection = ">",
                          ho=0.5,
                          args.legend = list(bg = "white",x="bottomright",inset=c(0.0,0),cex=0.75),
                          col = terrain.colors(nrow(AllRocAUC))
                          )

```

## 
