## Could  not use the dataset weather . Had issues in terms of data reading
## learning the xgboost using the example provided
## Courtesy : http://dmlc.ml/rstats/2016/03/10/xgboost.html
## Developing Model 

require(xgboost)
## Loading required package: xgboost
loglossobj <- function(preds, dtrain) {
  # dtrain is the internal format of the training data
  # We extract the labels from the training data
  labels <- getinfo(dtrain, "label")
  # We compute the 1st and 2nd gradient, as grad and hess
  preds <- 1/(1 + exp(-preds))
  grad <- preds - labels
  hess <- preds * (1 - preds)
  # Return the result as a list
  return(list(grad = grad, hess = hess))
}

data(agaricus.train, package='xgboost')
data(agaricus.test, package='xgboost')
train <- agaricus.train
test <- agaricus.test

model <- xgboost(data = train$data, label = train$label,
                 nrounds = 2, objective = "binary:logistic")
## [0]  train-error:0.000614
## [1]  train-error:0.001228
cv.res <- xgb.cv(data = train$data, label = train$label, nfold = 5,
                 nrounds = 2, objective = "binary:logistic")
## [0]  train-error:0.000768+0.000272   test-error:0.001228+0.001284
## [1]  train-error:0.000922+0.000210   test-error:0.001228+0.001284
## Convinient Interface

model <- xgboost(data = train$data, label = train$label,
                 nrounds = 2, objective = loglossobj, eval_metric = "error")
## [0]  train-error:0.001228
## [1]  train-error:0.001228
bst <- xgb.cv(data = train$data, label = train$label, nfold = 5,
              nrounds = 20, objective = "binary:logistic",
              early.stop.round = 3, maximize = FALSE)
## [0]  train-error:0.000922+0.000250   test-error:0.001228+0.000875
## [1]  train-error:0.001075+0.000219   test-error:0.001228+0.000875
## [2]  train-error:0.000768+0.000490   test-error:0.001228+0.000875
## [3]  train-error:0.000307+0.000442   test-error:0.000614+0.001001
## [4]  train-error:0.000192+0.000429   test-error:0.000460+0.001029
## [5]  train-error:0.000000+0.000000   test-error:0.000000+0.000000
## [6]  train-error:0.000000+0.000000   test-error:0.000000+0.000000
## [7]  train-error:0.000000+0.000000   test-error:0.000000+0.000000
## [8]  train-error:0.000000+0.000000   test-error:0.000000+0.000000
## Stopping. Best iteration: 6
dtrain <- xgb.DMatrix(train$data, label = train$label)

model <- xgboost(data = dtrain, nrounds = 2, objective = "binary:logistic")
## [0]  train-error:0.000614
## [1]  train-error:0.001228
pred_train <- predict(model, dtrain, outputmargin=TRUE)

setinfo(dtrain, "base_margin", pred_train)
## [1] TRUE
model <- xgboost(data = dtrain, nrounds = 2, objective = "binary:logistic")
## [0]  train-error:0.000614
## [1]  train-error:0.000614
## Handle missing values 

dat <- matrix(rnorm(128), 64, 2)
label <- sample(0:1, nrow(dat), replace = TRUE)
for (i in 1:nrow(dat)) {
  ind <- sample(2, 1)
  dat[i, ind] <- NA
}

model <- xgboost(data = dat, label = label, missing = NA,
                 nrounds = 2, objective = "binary:logistic")
## [0]  train-error:0.281250
## [1]  train-error:0.281250
## Developing Visualization

bst <- xgboost(data = train$data, label = train$label, max.depth = 2,
               eta = 1, nthread = 2, nround = 2, objective = "binary:logistic")
## [0]  train-error:0.046522
## [1]  train-error:0.022263
xgb.plot.tree(feature_names = agaricus.train$data@Dimnames[[2]], model = bst)
## Tree Visualization

bst <- xgboost(data = train$data, label = train$label, max.depth = 15,
               eta = 1, nthread = 2, nround = 30, objective = "binary:logistic",
               min_child_weight = 50)
## [0]  train-error:0.039306
## [1]  train-error:0.024413
## [2]  train-error:0.015507
## [3]  train-error:0.015507
## [4]  train-error:0.012283
## [5]  train-error:0.015507
## [6]  train-error:0.017350
## [7]  train-error:0.014740
## [8]  train-error:0.015354
## [9]  train-error:0.013051
## [10] train-error:0.016889
## [11] train-error:0.015354
## [12] train-error:0.015815
## [13] train-error:0.015815
## [14] train-error:0.015815
## [15] train-error:0.015815
## [16] train-error:0.015815
## [17] train-error:0.015815
## [18] train-error:0.015815
## [19] train-error:0.015815
## [20] train-error:0.015815
## [21] train-error:0.015815
## [22] train-error:0.015815
## [23] train-error:0.015815
## [24] train-error:0.015815
## [25] train-error:0.015815
## [26] train-error:0.015815
## [27] train-error:0.015815
## [28] train-error:0.015815
## [29] train-error:0.015815
##xgb.plot.multi.trees(model = bst, feature_names = agaricus.train$data@Dimnames[[2]], features.keep = 3)


## Understand the Depth 

bst <- xgboost(data = train$data, label = train$label, max.depth = 15,
               eta = 1, nthread = 2, nround = 30, objective = "binary:logistic",
               min_child_weight = 50)
## [0]  train-error:0.039306
## [1]  train-error:0.024413
## [2]  train-error:0.015507
## [3]  train-error:0.015507
## [4]  train-error:0.012283
## [5]  train-error:0.015507
## [6]  train-error:0.017350
## [7]  train-error:0.014740
## [8]  train-error:0.015354
## [9]  train-error:0.013051
## [10] train-error:0.016889
## [11] train-error:0.015354
## [12] train-error:0.015815
## [13] train-error:0.015815
## [14] train-error:0.015815
## [15] train-error:0.015815
## [16] train-error:0.015815
## [17] train-error:0.015815
## [18] train-error:0.015815
## [19] train-error:0.015815
## [20] train-error:0.015815
## [21] train-error:0.015815
## [22] train-error:0.015815
## [23] train-error:0.015815
## [24] train-error:0.015815
## [25] train-error:0.015815
## [26] train-error:0.015815
## [27] train-error:0.015815
## [28] train-error:0.015815
## [29] train-error:0.015815
##xgb.plot.deepness(model = bst)