#install.packages("xgboost")
library(xgboost)
require(xgboost)
data(agaricus.train, package='xgboost')
data(agaricus.test, package='xgboost')
train <- agaricus.train
test <- agaricus.test
model <- xgboost(data = train$data, label = train$label,
nrounds = 2, objective = "binary:logistic")
## [0] train-error:0.000614
## [1] train-error:0.001228
preds = predict(model, test$data)
cv.res <- xgb.cv(data = train$data, label = train$label, nfold = 5,
nrounds = 2, objective = "binary:logistic")
## [0] train-error:0.000921+0.000416 test-error:0.001228+0.001164
## [1] train-error:0.001228+0.000291 test-error:0.001228+0.001164
loglossobj <- function(preds, dtrain) {
# dtrain is the internal format of the training data
# We extract the labels from the training data
labels <- getinfo(dtrain, "label")
# We compute the 1st and 2nd gradient, as grad and hess
preds <- 1/(1 + exp(-preds))
grad <- preds - labels
hess <- preds * (1 - preds)
# Return the result as a list
return(list(grad = grad, hess = hess))
}
model <- xgboost(data = train$data, label = train$label,
nrounds = 2, objective = loglossobj, eval_metric = "error")
## [0] train-error:0.001228
## [1] train-error:0.001228
bst <- xgb.cv(data = train$data, label = train$label, nfold = 5,
nrounds = 20, objective = "binary:logistic",
early.stop.round = 3, maximize = FALSE)
## [0] train-error:0.001075+0.000321 test-error:0.001228+0.000687
## [1] train-error:0.001229+0.000172 test-error:0.001228+0.000687
## [2] train-error:0.001075+0.000321 test-error:0.001228+0.000687
## [3] train-error:0.000307+0.000442 test-error:0.000614+0.001001
## [4] train-error:0.000192+0.000429 test-error:0.000461+0.001030
## [5] train-error:0.000000+0.000000 test-error:0.000000+0.000000
## [6] train-error:0.000000+0.000000 test-error:0.000000+0.000000
## [7] train-error:0.000000+0.000000 test-error:0.000000+0.000000
## [8] train-error:0.000000+0.000000 test-error:0.000000+0.000000
## Stopping. Best iteration: 6
dtrain <- xgb.DMatrix(train$data, label = train$label)
model <- xgboost(data = dtrain, nrounds = 2, objective = "binary:logistic")
## [0] train-error:0.000614
## [1] train-error:0.001228
pred_train <- predict(model, dtrain, outputmargin=TRUE)
setinfo(dtrain, "base_margin", pred_train)
## [1] TRUE
model <- xgboost(data = dtrain, nrounds = 2, objective = "binary:logistic")
## [0] train-error:0.000614
## [1] train-error:0.000614
dat <- matrix(rnorm(128), 64, 2)
label <- sample(0:1, nrow(dat), replace = TRUE)
for (i in 1:nrow(dat)) {
ind <- sample(2, 1)
dat[i, ind] <- NA
}
model <- xgboost(data = dat, label = label, missing = NA,
nrounds = 2, objective = "binary:logistic")
## [0] train-error:0.281250
## [1] train-error:0.250000
bst <- xgboost(data = train$data, label = train$label, max.depth = 2,
eta = 1, nthread = 2, nround = 2, objective = "binary:logistic")
## [0] train-error:0.046522
## [1] train-error:0.022263
#install.packages("DiagrammeR")
library(DiagrammeR)
xgb.plot.tree(feature_names = agaricus.train$data@Dimnames[[2]], model = bst)
bst <- xgboost(data = train$data, label = train$label, max.depth = 2,
eta = 1, nthread = 2, nround = 10, objective = "binary:logistic")
## [0] train-error:0.046522
## [1] train-error:0.022263
## [2] train-error:0.007063
## [3] train-error:0.015200
## [4] train-error:0.007063
## [5] train-error:0.001228
## [6] train-error:0.001228
## [7] train-error:0.001228
## [8] train-error:0.001228
## [9] train-error:0.000000
xgb.plot.tree(feature_names = agaricus.train$data@Dimnames[[2]], model = bst)
bst <- xgboost(data = train$data, label = train$label, max.depth = 15,
eta = 1, nthread = 2, nround = 30, objective = "binary:logistic",
min_child_weight = 50)
## [0] train-error:0.039306
## [1] train-error:0.024413
## [2] train-error:0.015507
## [3] train-error:0.015507
## [4] train-error:0.012283
## [5] train-error:0.015507
## [6] train-error:0.017350
## [7] train-error:0.014740
## [8] train-error:0.015354
## [9] train-error:0.013051
## [10] train-error:0.016889
## [11] train-error:0.015354
## [12] train-error:0.015815
## [13] train-error:0.015815
## [14] train-error:0.015815
## [15] train-error:0.015815
## [16] train-error:0.015815
## [17] train-error:0.015815
## [18] train-error:0.015815
## [19] train-error:0.015815
## [20] train-error:0.015815
## [21] train-error:0.015815
## [22] train-error:0.015815
## [23] train-error:0.015815
## [24] train-error:0.015815
## [25] train-error:0.015815
## [26] train-error:0.015815
## [27] train-error:0.015815
## [28] train-error:0.015815
## [29] train-error:0.015815
bst <- xgboost(data = train$data, label = train$label, max.depth = 2,
eta = 1, nthread = 2, nround = 2,objective = "binary:logistic")
## [0] train-error:0.046522
## [1] train-error:0.022263
importance_matrix <- xgb.importance(agaricus.train$data@Dimnames[[2]], model = bst)
#install.packages("Ckmeans.1d.dp")
library(Ckmeans.1d.dp)
xgb.plot.importance(importance_matrix)

bst <- xgboost(data = train$data, label = train$label, max.depth = 15,
eta = 1, nthread = 2, nround = 30, objective = "binary:logistic",
min_child_weight = 50)
## [0] train-error:0.039306
## [1] train-error:0.024413
## [2] train-error:0.015507
## [3] train-error:0.015507
## [4] train-error:0.012283
## [5] train-error:0.015507
## [6] train-error:0.017350
## [7] train-error:0.014740
## [8] train-error:0.015354
## [9] train-error:0.013051
## [10] train-error:0.016889
## [11] train-error:0.015354
## [12] train-error:0.015815
## [13] train-error:0.015815
## [14] train-error:0.015815
## [15] train-error:0.015815
## [16] train-error:0.015815
## [17] train-error:0.015815
## [18] train-error:0.015815
## [19] train-error:0.015815
## [20] train-error:0.015815
## [21] train-error:0.015815
## [22] train-error:0.015815
## [23] train-error:0.015815
## [24] train-error:0.015815
## [25] train-error:0.015815
## [26] train-error:0.015815
## [27] train-error:0.015815
## [28] train-error:0.015815
## [29] train-error:0.015815