Import Library and Data

library(xgboost)
# load in the agaricus dataset
data(agaricus.train, package='xgboost')
data(agaricus.test, package='xgboost')
dtrain <- xgb.DMatrix(agaricus.train$data, label = agaricus.train$label)
dtest <- xgb.DMatrix(agaricus.test$data, label = agaricus.test$label)

Parameter

watchlist <- list(eval = dtest, train = dtrain)
num_round <- 20

Running the Training

param     <- list(max_depth=2, eta=1, nthread = 2, verbosity=0, 
                 objective="my:loss")
bst       <- xgb.train(param, dtrain, num_round, watchlist)
## [1]  eval-my_loss:0.032816   train-my_loss:0.034209 
## [2]  eval-my_loss:0.017759   train-my_loss:0.015349 
## [3]  eval-my_loss:0.010793   train-my_loss:0.010074 
## [4]  eval-my_loss:0.006098   train-my_loss:0.005906 
## [5]  eval-my_loss:0.003424   train-my_loss:0.003295 
## [6]  eval-my_loss:0.003142   train-my_loss:0.003108 
## [7]  eval-my_loss:0.003028   train-my_loss:0.002983 
## [8]  eval-my_loss:0.001775   train-my_loss:0.001772 
## [9]  eval-my_loss:0.001525   train-my_loss:0.001544 
## [10] eval-my_loss:0.001399   train-my_loss:0.001478 
## [11] eval-my_loss:0.001388   train-my_loss:0.001404 
## [12] eval-my_loss:0.001464   train-my_loss:0.001491 
## [13] eval-my_loss:0.001385   train-my_loss:0.001375 
## [14] eval-my_loss:0.001316   train-my_loss:0.001307 
## [15] eval-my_loss:0.001326   train-my_loss:0.001319 
## [16] eval-my_loss:0.001051   train-my_loss:0.001050 
## [17] eval-my_loss:0.001028   train-my_loss:0.001031 
## [18] eval-my_loss:0.001020   train-my_loss:0.001107 
## [19] eval-my_loss:0.001030   train-my_loss:0.000986 
## [20] eval-my_loss:0.001002   train-my_loss:0.000930