Аннотация

В этом соревновании мы прогнозируем насколько популярна аренда квартиры на основе предоставленных данных, таких как текстовое описание, фотографии, количество спален, цена и т.д. Данные берутся с сайта renthop.com. Апартаменты расположены в Нью-Йорке.

Целевая переменная, interest_level, определяется количеством запросов на сайте за рассматриваемый период.

Описание файлов

Train.json - тренировочный набор; Test.json - тестовый набор

Параметры

  • bathrooms: количество ванных комнат
  • bedrooms: количество ванных комнат
  • building_id
  • created
  • description
  • display_address
  • features: список возможностей об этой квартире
  • latitude
  • listing_id
  • longitude
  • manager_id
  • photos: список ссылок на фото. Вы можете загрузить фотографии с сайта renthop
  • price: цена в долларах США
  • street_address
  • Interest_level: это целевая переменная. Он имеет 3 категории: «высокий», «средний», «низкий».

Загрузка данных и предварительная обработка данных

gc()  ## сборщик мусора
##          used (Mb) gc trigger (Mb) max used (Mb)
## Ncells 369123 19.8     592000 31.7   460000 24.6
## Vcells 568322  4.4    1308461 10.0   786367  6.0
assign("last.warning", NULL, envir = baseenv()) # очистим лист предупреждений

library(lubridate)
library(jsonlite)
library(xgboost)
library(dplyr)
library(purrr)
library(knitr)
require(stringr)
library(reshape2)

setwd("d:/Kaggle/CFT NSK/")

train <- fromJSON("train.json")
test <- fromJSON("test.json")

# Сделаем по общим рекомендациям, чтобы облегчить работу с данными
# https://www.kaggle.com/danjordan/how-to-correctly-load-data-into-r

# unlist every variable except `photos` and `features` and convert to tibble
#Train
vars <- setdiff(names(train), c("photos", "features"))
train <- map_at(train, vars, unlist) %>% tibble::as_tibble(.)
train_id <-train$listing_id

#Test
vars <- setdiff(names(test), c("photos", "features"))
test <- map_at(test, vars, unlist) %>% tibble::as_tibble(.)
test_id <-test$listing_id

# Добавим длину каждого поля в список переменных
train$feature_count <- lengths(train$features)
test$feature_count <- lengths(test$features)
train$photo_count <- lengths(train$photos)
test$photo_count <- lengths(test$photos)

# Заполним пропущенные значения
train[unlist(map(train$features,is_empty)),]$features = 'Nofeat'
test[unlist(map(test$features,is_empty)),]$features = 'Nofeat'
test$interest_level <- 'none'

# Объединим тестовые и обучающие данные в один массив
train_test <- rbind(train,test)
rm(train,test)

Обработка данных

Сгруппируем описания квартир и посчитаем количество повторяющих признаков:

feature = data.frame(feature = tolower(unlist(train_test$features))) %>% # convert all features to lower case
  group_by(feature) %>%
  summarise(feature_count = n()) %>%
  arrange(desc(feature_count)) %>%
  filter(feature_count >= 20)

Выведем первые 20 признаков:

dt <- kable(feature, caption = "Feature Count")
head(dt,20)
##  [1] "Table: Feature Count"                                  
##  [2] ""                                                      
##  [3] "feature                                  feature_count"
##  [4] "--------------------------------------  --------------"
##  [5] "elevator                                         65833"
##  [6] "cats allowed                                     59194"
##  [7] "hardwood floors                                  59155"
##  [8] "dogs allowed                                     55207"
##  [9] "doorman                                          52505"
## [10] "dishwasher                                       52035"
## [11] "laundry in building                              47483"
## [12] "no fee                                           45450"
## [13] "fitness center                                   33420"
## [14] "laundry in unit                                  23752"
## [15] "pre-war                                          23111"
## [16] "roof deck                                        16466"
## [17] "outdoor space                                    13414"
## [18] "dining room                                      12847"
## [19] "high speed internet                              10622"
## [20] "nofeat                                            8135"
features_list <- feature;

Применим методы работы с регулярными выражениями, для повышения качества признаков в описании квартир:

features_list <- feature;

for (i in 1:length(train_test$features))
{
  feature <- train_test$features[i]
  lst <- tolower(unlist(feature, use.names = FALSE))
  lst[grepl("hardwood+", lst, perl=TRUE)]<-"Hardwood"
  lst[grepl("roof+", lst, perl=TRUE)]<-"Roof Deck"
  lst[grepl("outdoor+", lst, perl=TRUE)]<-"Outdoor Space"
  lst[grepl("garden+", lst, perl=TRUE)]<-"Garden"
  lst[grepl("park+", lst, perl=TRUE)]<-"Parking"

  lst[grepl(paste(c('laundry', 'dryer', 'washer'), collapse="|"), lst, perl=TRUE)
      & !grepl(paste(c("dishwasher", "in building", "room"), collapse="|"), lst, perl=TRUE)]<-"Laundry in unit"
  lst[grepl(paste(c('laundry', 'dryer', 'washer'), collapse="|"), lst, perl=TRUE)
      & !grepl(paste(c("dishwasher", "in unit"), collapse="|"), lst, perl=TRUE)]<-"Laundry in building"

  lst[grepl("pre+", lst, perl=TRUE)]<-"Pre-war"
  lst[grepl(paste(c('gym', 'fitness'), collapse="|"), lst, perl=TRUE)]<-"Fitness"
  lst[grepl("super+", lst, perl=TRUE)]<-"Live-in super"
  lst[grepl("doorman+", lst, perl=TRUE)]<-"Doorman"

  # Minor changes
  lst[grepl("yoga+", lst, perl=TRUE)]<-"Yoga"
  lst[grepl("garage+", lst, perl=TRUE)]<-"Garage"
  lst[grepl("balcony+", lst, perl=TRUE)]<-"Balcony"
  lst[grepl("renovated+", lst, perl=TRUE)]<-"Renovated"
  lst[grepl("central a+", lst, perl=TRUE)]<-"Central a/c"
  lst[grepl("wifi+", lst, perl=TRUE)]<-"Wifi"

  lst[grepl("storage+", lst, perl=TRUE)]<-"Storage"
  lst[grepl("valet services+", lst, perl=TRUE)]<-"Valet services"
  lst[grepl("closet+", lst, perl=TRUE)]<-"Walk-in closet"
  lst[grepl("wheelchair+", lst, perl=TRUE)]<-"Wheelchair access"
  lst[grepl("internet+", lst, perl=TRUE)]<-"Internet"
  lst[grepl("high ceiling+", lst, perl=TRUE)]<-"High ceiling"

  #print(lst)
  train_test$features[i] <- list(lst)
}

Уберем лишние поля в массиведанных и оставим только необходимые для дальнейшего анализа поля:

feat <- c("bathrooms","bedrooms","building_id", "created","latitude", "description",
          "listing_id","longitude","manager_id", "price", "features",
          "display_address", "street_address","feature_count","photo_count", "interest_level"
          )

train_test = train_test[,names(train_test) %in% feat]

Дополнительно обработаем признаки. Переведём все признаки в нижний регистр, переведем в вектор и отсортируем.

Затем сформируем матрицу, где каждый признак будет отдельным столбцом и заполним эту матрицу в соответствии с описаниями квартир в исходном массиве.

feature = data.frame(feature = tolower(unlist(train_test$features))) %>% # convert all features to lower case
  group_by(feature) %>%
  summarise(feature_count = n()) %>%
  arrange(desc(feature_count)) %>%
  filter(feature_count >= 20)

#kable(feature, caption = "Feature Count")
features_list <- feature;

#From previous script - changed to 1000 for runtimes
features = data.frame(features_list$feature)
feature_vector<-sort(as.character(features$feature))
#feature_vector

#One-Hot Features (trickier than I initially thought but would be happy to see other variations)
train_features<-data.frame(listing_id=rep(train_test$listing_id,sapply(train_test$features,length)),
                           features=tolower(unlist(train_test$features)))
train_features$features<-as.character(train_features$features)
train_features<-train_features[train_features$features %in% feature_vector,]
features_matrix<-data.frame(cbind(listing_id=train_features$listing_id,model.matrix(~features-1,data=train_features)))

features_matrix<- features_matrix %>% 
  group_by(listing_id) %>% 
  summarise_each(funs(sum(.,na.rm=T)))

names(features_matrix)<-c("listing_id",feature_vector)

train_test<-merge(train_test,features_matrix, by = "listing_id", sort = FALSE,all.x=TRUE)

По опыту решения подобных задач, очень важным параметрами модели явлюятся географические координаты.

Попробуем сгруппировать предложения по географическим координатам. Для этого наложим сетку с шагом (dx,dy) на карту и сформируем группы “соседей” по их координатам.

В этом смысле алгоритм кластеризации дает значительно более качественную группировку, но в языке R пока нет удобной и быстрой реализации этого алгоритма, в отличие от Python.

# Геогрфические границы карты Нью-Йорка
# -74.06 < x < -73.8 
# 39.7 < y < 40.9

while(1) {
  # Outlier removal
  x = median(train_test$latitude)
  train_test$tmp <- abs(train_test$latitude - x) > 3* sd(train_test$latitude)
  
  if (sum(train_test$tmp)==0) break # no more outliers -> stop
  train_test$latitude[train_test$tmp==TRUE] = x # exclude outliers
}
while(1) {
  y = median(train_test$longitude)
  train_test$tmp <- abs(train_test$longitude - y) > 3* sd(train_test$longitude)
  
  if (sum(train_test$tmp)==0) break # no more outliers -> stop
  train_test$longitude[train_test$tmp==TRUE] = y # exclude outliers
}
train_test$tmp <- NULL

dx <- (max(train_test$latitude) - min(train_test$latitude))/10000
dy <- (max(train_test$longitude) - min(train_test$longitude))/5000

train_test$NeighGroup <- as.factor(paste(abs(train_test$latitude%/%dx), abs(train_test$longitude%/%dy)))

Теперь, когда мы знаем “соседей” у каждой квартиры, мы можем получить новые интересные параметры для модели, такие как средняя цена квартиры среди соседей, среднее количество комнат и т.д.

# Посчитаем среднее
NGMean <- aggregate(.~NeighGroup, train_test[,c("NeighGroup", "bathrooms", "bedrooms",
                                                "price", "feature_count", "photo_count")], mean)
# Посчитаем количество соседей
NGCnt <- aggregate(.~NeighGroup, train_test[,c("NeighGroup", "bathrooms")], length)
names(NGCnt) <- c("NeighGroup", "Neighbour_count")

# Единая таблица
NG <- merge(NGCnt, NGMean, by = "NeighGroup")
names(NG) <- c("NeighGroup", "Neighbour_count", "Mbathrooms", "Mbedrooms", "Mprice", "Mfeature_count", "Mphoto_count")

# Добавим параметры каждой группы в общий маасив данных
train_test <- merge(train_test, NG, by="NeighGroup")

# Создадим новые параметры модели
train_test$RelBath <- train_test$bathrooms / train_test$Mbathrooms
train_test$RelBedr <- train_test$bedrooms / train_test$Mbedrooms
train_test$RelPrice <- train_test$price / train_test$Mprice
train_test$RelFCount <- train_test$feature_count / train_test$Mfeature_count
train_test$RelPCount <- train_test$photo_count / train_test$Mphoto_count

Обработаем остальные параметры модели. Строковые переменные необходимо преобразовать в численные. А так как абсолютные значения параметров зачастую не несут большой информации в себе, то создадим синтетические параметры, которые содержат относительные величины по каждому из параметров и их комбинаций.

# Преобразуем строку в число
train_test$building_id<-as.integer(factor(train_test$building_id))
train_test$manager_id<-as.integer(factor(train_test$manager_id))

# Преобразуем строку в число
train_test$display_address<-as.integer(factor(train_test$display_address))
train_test$street_address<-as.integer(factor(train_test$street_address))

# Преобразуем дату в дни, месяцы и годы
train_test$created<-ymd_hms(train_test$created)
train_test$month<- month(train_test$created)
train_test$day<- day(train_test$created)
train_test$hour<- hour(train_test$created)
train_test$created = NULL

# Получим длину описания квартиры в количестве слов, а само описание удалим из модели
train_test$description_len<-sapply(strsplit(train_test$description, "\\s+"), length)
train_test$description = NULL

# Стоимость каждой спальни в стоимости квартиры
train_test$bed_price <- train_test$price/train_test$bedrooms
train_test[which(is.infinite(train_test$bed_price)),]$bed_price = train_test[which(is.infinite(train_test$bed_price)),]$price

# добавим количество комнат и их стоимость
train_test$room_sum <- train_test$bedrooms + train_test$bathrooms
train_test$room_diff <- train_test$bedrooms - train_test$bathrooms
train_test$room_price <- train_test$price/train_test$room_sum
train_test$bed_ratio <- train_test$bedrooms/train_test$room_sum
train_test[which(is.infinite(train_test$room_price)),]$room_price = train_test[which(is.infinite(train_test$room_price)),]$price

# Переведем все параметры со значительными величинами в логарифм
train_test$photo_count <- log(train_test$photo_count + 1)
train_test$feature_count <- log(train_test$feature_count + 1)
train_test$price <- log(train_test$price + 1)
train_test$room_price <- log(train_test$room_price + 1)
train_test$bed_price <- log(train_test$bed_price + 1)

Посмотрим доступные лучшие решения и попытаемся повторить.
В первую очередь посчитаем количество экземпляров в каждой группе low, medium и high. Затем получим сренее значение для каждо группы, т.е. частоту появления в нашем массиве.

######################################################################
# https://www.kaggle.com/vigilanf/forked-script

train_test$low <- as.integer(train_test$interest_level=="low")
train_test$medium <- as.integer(train_test$interest_level=="medium")
train_test$high <- as.integer(train_test$interest_level=="high")
train_test$display_address <- trimws(tolower(train_test$display_address))
train_test$street_address <- trimws(tolower(train_test$street_address))

train_test$pred0_low <- sum(train_test$interest_level=="low")/length(train_test[train_test$interest_level != "none", 1])
train_test$pred0_medium <- sum(train_test$interest_level=="medium")/length(train_test[train_test$interest_level != "none", 1])
train_test$pred0_high <- sum(train_test$interest_level=="high")/length(train_test[train_test$interest_level != "none", 1])
# https://www.kaggle.com/lujing/cv-statistics-better-parameters

Среди параметров есть сведения о менеджере этой квартиры. Посчитаем частоту появления соответствующих предложений по каждому менеджеру, созадим новые относительные переменные и добавим их в модель

managers<-train_test[train_test$interest_level != "none", c("manager_id", "interest_level")]

managers$mngr_cnt = 1
managers$mngr_low <- as.integer(managers$interest_level=="low")
managers$mngr_medium <- as.integer(managers$interest_level=="medium")
managers$mngr_high <- as.integer(managers$interest_level=="high")

managers_agr <- aggregate(.~manager_id, managers[,c(1,3:6)], sum)
managers_agr$mngr_low <- managers_agr$mngr_low/managers_agr$mngr_cnt
managers_agr$mngr_medium <- managers_agr$mngr_medium/managers_agr$mngr_cnt
managers_agr$mngr_high <- managers_agr$mngr_high/managers_agr$mngr_cnt
#managers_agr[order(-managers_agr$mngr_cnt),]

train_test<- merge(train_test, managers_agr, by="manager_id", all = TRUE)

Наконец, чистим модель и решаем задачу с помощью библиотеки XGBoost

train_test$low <- NULL
train_test$medium <- NULL
train_test$high <- NULL

#Convert labels to integers
train_test$interest_level<-as.integer(factor(train_test$interest_level))
train_test$NeighGroup <- as.integer(factor(train_test$NeighGroup))

#split train test
train <- train_test[train_test$listing_id %in%train_id,]
test <- train_test[train_test$listing_id %in%test_id,]

#Convert labels to integers
y <- train$interest_level
y = y - 1
train$interest_level = NULL
test$interest_level = NULL

train$features = NULL;
test$features = NULL;

train[is.na(train)] <- 0
test[is.na(test)] <- 0


###########################################################################################################
#convert xgbmatrix
dtest <- xgb.DMatrix(data.matrix(test))

library(caret)

#create folds
kfolds<- 5
folds<-createFolds(y, k = kfolds, list = TRUE, returnTrain = FALSE)
fold <- as.numeric(unlist(folds[1]))

x_train<-train[-fold,] #Train set
x_val<-train[fold,] #Out of fold validation set

y_train<-y[-fold]
y_val<-y[fold]


#convert to xgbmatrix
dtrain = xgb.DMatrix(data.matrix(x_train), label=y_train)
dval = xgb.DMatrix(data.matrix(x_val), label=y_val)

Сама процедура расчета и 30 наиболее значимых параметров модели:

seed = 42
set.seed(seed)

#Parameters for XGB
xgb_params = list(
 colsample_bytree= 0.7,
 subsample = 0.7,
 eta = 0.02,
 objective= 'multi:softprob',
 max_depth= 6,
 min_child_weight= 1,
 eval_metric= "mlogloss",
 num_class = 3,
 seed = seed
)

#perform training
gbdt = xgb.train(params = xgb_params,
                data = dtrain,
                nrounds = 1000,
                watchlist = list(train = dtrain, val=dval),
                verbose = 1,
                print_every_n = 50,
                early_stopping_rounds=50)
## [0]  train-mlogloss:1.083721 val-mlogloss:1.084039
## [1]  train-mlogloss:1.069210 val-mlogloss:1.069918
## [2]  train-mlogloss:1.055551 val-mlogloss:1.056596
## [3]  train-mlogloss:1.041964 val-mlogloss:1.043378
## [4]  train-mlogloss:1.028480 val-mlogloss:1.030259
## [5]  train-mlogloss:1.015473 val-mlogloss:1.017564
## [6]  train-mlogloss:1.003348 val-mlogloss:1.005780
## [7]  train-mlogloss:0.991124 val-mlogloss:0.993890
## [8]  train-mlogloss:0.979315 val-mlogloss:0.982411
## [9]  train-mlogloss:0.967953 val-mlogloss:0.971422
## [10] train-mlogloss:0.956917 val-mlogloss:0.960714
## [11] train-mlogloss:0.946157 val-mlogloss:0.950310
## [12] train-mlogloss:0.935621 val-mlogloss:0.940076
## [13] train-mlogloss:0.925598 val-mlogloss:0.930353
## [14] train-mlogloss:0.915842 val-mlogloss:0.920881
## [15] train-mlogloss:0.906344 val-mlogloss:0.911659
## [16] train-mlogloss:0.897150 val-mlogloss:0.902744
## [17] train-mlogloss:0.888092 val-mlogloss:0.893960
## [18] train-mlogloss:0.879298 val-mlogloss:0.885485
## [19] train-mlogloss:0.870658 val-mlogloss:0.877104
## [20] train-mlogloss:0.862342 val-mlogloss:0.869111
## [21] train-mlogloss:0.854398 val-mlogloss:0.861426
## [22] train-mlogloss:0.846740 val-mlogloss:0.854031
## [23] train-mlogloss:0.839204 val-mlogloss:0.846769
## [24] train-mlogloss:0.831673 val-mlogloss:0.839540
## [25] train-mlogloss:0.824320 val-mlogloss:0.832484
## [26] train-mlogloss:0.817149 val-mlogloss:0.825599
## [27] train-mlogloss:0.810476 val-mlogloss:0.819225
## [28] train-mlogloss:0.803807 val-mlogloss:0.812820
## [29] train-mlogloss:0.797210 val-mlogloss:0.806504
## [30] train-mlogloss:0.790742 val-mlogloss:0.800305
## [31] train-mlogloss:0.784407 val-mlogloss:0.794300
## [32] train-mlogloss:0.778269 val-mlogloss:0.788451
## [33] train-mlogloss:0.772338 val-mlogloss:0.782831
## [34] train-mlogloss:0.766528 val-mlogloss:0.777206
## [35] train-mlogloss:0.760940 val-mlogloss:0.771874
## [36] train-mlogloss:0.755390 val-mlogloss:0.766588
## [37] train-mlogloss:0.750022 val-mlogloss:0.761481
## [38] train-mlogloss:0.744775 val-mlogloss:0.756469
## [39] train-mlogloss:0.739941 val-mlogloss:0.751894
## [40] train-mlogloss:0.735221 val-mlogloss:0.747406
## [41] train-mlogloss:0.730316 val-mlogloss:0.742719
## [42] train-mlogloss:0.725479 val-mlogloss:0.738120
## [43] train-mlogloss:0.720849 val-mlogloss:0.733682
## [44] train-mlogloss:0.716335 val-mlogloss:0.729393
## [45] train-mlogloss:0.711953 val-mlogloss:0.725218
## [46] train-mlogloss:0.707550 val-mlogloss:0.721041
## [47] train-mlogloss:0.703286 val-mlogloss:0.717014
## [48] train-mlogloss:0.699289 val-mlogloss:0.713281
## [49] train-mlogloss:0.695171 val-mlogloss:0.709418
## [50] train-mlogloss:0.691354 val-mlogloss:0.705828
## [51] train-mlogloss:0.687598 val-mlogloss:0.702303
## [52] train-mlogloss:0.683766 val-mlogloss:0.698727
## [53] train-mlogloss:0.680033 val-mlogloss:0.695245
## [54] train-mlogloss:0.676282 val-mlogloss:0.691723
## [55] train-mlogloss:0.672633 val-mlogloss:0.688324
## [56] train-mlogloss:0.669194 val-mlogloss:0.685129
## [57] train-mlogloss:0.665759 val-mlogloss:0.681906
## [58] train-mlogloss:0.662636 val-mlogloss:0.679039
## [59] train-mlogloss:0.659346 val-mlogloss:0.676019
## [60] train-mlogloss:0.656071 val-mlogloss:0.672974
## [61] train-mlogloss:0.652888 val-mlogloss:0.670005
## [62] train-mlogloss:0.649711 val-mlogloss:0.667037
## [63] train-mlogloss:0.646668 val-mlogloss:0.664229
## [64] train-mlogloss:0.643774 val-mlogloss:0.661574
## [65] train-mlogloss:0.640955 val-mlogloss:0.658930
## [66] train-mlogloss:0.638046 val-mlogloss:0.656282
## [67] train-mlogloss:0.635227 val-mlogloss:0.653714
## [68] train-mlogloss:0.632537 val-mlogloss:0.651266
## [69] train-mlogloss:0.629986 val-mlogloss:0.648924
## [70] train-mlogloss:0.627445 val-mlogloss:0.646567
## [71] train-mlogloss:0.624834 val-mlogloss:0.644187
## [72] train-mlogloss:0.622363 val-mlogloss:0.641892
## [73] train-mlogloss:0.619951 val-mlogloss:0.639668
## [74] train-mlogloss:0.617525 val-mlogloss:0.637430
## [75] train-mlogloss:0.615175 val-mlogloss:0.635224
## [76] train-mlogloss:0.612887 val-mlogloss:0.633108
## [77] train-mlogloss:0.610636 val-mlogloss:0.631027
## [78] train-mlogloss:0.608441 val-mlogloss:0.629030
## [79] train-mlogloss:0.606355 val-mlogloss:0.627159
## [80] train-mlogloss:0.604097 val-mlogloss:0.625148
## [81] train-mlogloss:0.601990 val-mlogloss:0.623253
## [82] train-mlogloss:0.599940 val-mlogloss:0.621410
## [83] train-mlogloss:0.597970 val-mlogloss:0.619663
## [84] train-mlogloss:0.596108 val-mlogloss:0.618030
## [85] train-mlogloss:0.594139 val-mlogloss:0.616311
## [86] train-mlogloss:0.592336 val-mlogloss:0.614680
## [87] train-mlogloss:0.590505 val-mlogloss:0.613045
## [88] train-mlogloss:0.588691 val-mlogloss:0.611426
## [89] train-mlogloss:0.586813 val-mlogloss:0.609766
## [90] train-mlogloss:0.585049 val-mlogloss:0.608210
## [91] train-mlogloss:0.583410 val-mlogloss:0.606738
## [92] train-mlogloss:0.581654 val-mlogloss:0.605228
## [93] train-mlogloss:0.579953 val-mlogloss:0.603714
## [94] train-mlogloss:0.578364 val-mlogloss:0.602322
## [95] train-mlogloss:0.576786 val-mlogloss:0.600904
## [96] train-mlogloss:0.575147 val-mlogloss:0.599494
## [97] train-mlogloss:0.573579 val-mlogloss:0.598125
## [98] train-mlogloss:0.571965 val-mlogloss:0.596722
## [99] train-mlogloss:0.570408 val-mlogloss:0.595349
## [100]    train-mlogloss:0.568898 val-mlogloss:0.594008
## [101]    train-mlogloss:0.567372 val-mlogloss:0.592657
## [102]    train-mlogloss:0.565935 val-mlogloss:0.591379
## [103]    train-mlogloss:0.564488 val-mlogloss:0.590100
## [104]    train-mlogloss:0.563139 val-mlogloss:0.588954
## [105]    train-mlogloss:0.561790 val-mlogloss:0.587807
## [106]    train-mlogloss:0.560430 val-mlogloss:0.586638
## [107]    train-mlogloss:0.559112 val-mlogloss:0.585518
## [108]    train-mlogloss:0.557789 val-mlogloss:0.584391
## [109]    train-mlogloss:0.556509 val-mlogloss:0.583325
## [110]    train-mlogloss:0.555271 val-mlogloss:0.582213
## [111]    train-mlogloss:0.553988 val-mlogloss:0.581102
## [112]    train-mlogloss:0.552731 val-mlogloss:0.579997
## [113]    train-mlogloss:0.551577 val-mlogloss:0.578998
## [114]    train-mlogloss:0.550400 val-mlogloss:0.577996
## [115]    train-mlogloss:0.549264 val-mlogloss:0.577026
## [116]    train-mlogloss:0.548211 val-mlogloss:0.576166
## [117]    train-mlogloss:0.547139 val-mlogloss:0.575255
## [118]    train-mlogloss:0.546056 val-mlogloss:0.574314
## [119]    train-mlogloss:0.544898 val-mlogloss:0.573318
## [120]    train-mlogloss:0.543821 val-mlogloss:0.572423
## [121]    train-mlogloss:0.542746 val-mlogloss:0.571520
## [122]    train-mlogloss:0.541741 val-mlogloss:0.570667
## [123]    train-mlogloss:0.540691 val-mlogloss:0.569821
## [124]    train-mlogloss:0.539702 val-mlogloss:0.569006
## [125]    train-mlogloss:0.538753 val-mlogloss:0.568232
## [126]    train-mlogloss:0.537772 val-mlogloss:0.567424
## [127]    train-mlogloss:0.536817 val-mlogloss:0.566628
## [128]    train-mlogloss:0.535832 val-mlogloss:0.565823
## [129]    train-mlogloss:0.534880 val-mlogloss:0.565000
## [130]    train-mlogloss:0.533916 val-mlogloss:0.564165
## [131]    train-mlogloss:0.532961 val-mlogloss:0.563361
## [132]    train-mlogloss:0.532087 val-mlogloss:0.562622
## [133]    train-mlogloss:0.531278 val-mlogloss:0.561960
## [134]    train-mlogloss:0.530395 val-mlogloss:0.561224
## [135]    train-mlogloss:0.529517 val-mlogloss:0.560525
## [136]    train-mlogloss:0.528652 val-mlogloss:0.559867
## [137]    train-mlogloss:0.527829 val-mlogloss:0.559207
## [138]    train-mlogloss:0.526946 val-mlogloss:0.558486
## [139]    train-mlogloss:0.526178 val-mlogloss:0.557854
## [140]    train-mlogloss:0.525375 val-mlogloss:0.557196
## [141]    train-mlogloss:0.524531 val-mlogloss:0.556524
## [142]    train-mlogloss:0.523679 val-mlogloss:0.555840
## [143]    train-mlogloss:0.522899 val-mlogloss:0.555229
## [144]    train-mlogloss:0.522109 val-mlogloss:0.554623
## [145]    train-mlogloss:0.521320 val-mlogloss:0.553990
## [146]    train-mlogloss:0.520622 val-mlogloss:0.553437
## [147]    train-mlogloss:0.519870 val-mlogloss:0.552837
## [148]    train-mlogloss:0.519120 val-mlogloss:0.552257
## [149]    train-mlogloss:0.518447 val-mlogloss:0.551733
## [150]    train-mlogloss:0.517731 val-mlogloss:0.551149
## [151]    train-mlogloss:0.517003 val-mlogloss:0.550530
## [152]    train-mlogloss:0.516331 val-mlogloss:0.550005
## [153]    train-mlogloss:0.515676 val-mlogloss:0.549487
## [154]    train-mlogloss:0.515054 val-mlogloss:0.549021
## [155]    train-mlogloss:0.514369 val-mlogloss:0.548513
## [156]    train-mlogloss:0.513754 val-mlogloss:0.548052
## [157]    train-mlogloss:0.513031 val-mlogloss:0.547465
## [158]    train-mlogloss:0.512400 val-mlogloss:0.547011
## [159]    train-mlogloss:0.511770 val-mlogloss:0.546548
## [160]    train-mlogloss:0.511180 val-mlogloss:0.546051
## [161]    train-mlogloss:0.510593 val-mlogloss:0.545600
## [162]    train-mlogloss:0.510002 val-mlogloss:0.545166
## [163]    train-mlogloss:0.509376 val-mlogloss:0.544691
## [164]    train-mlogloss:0.508815 val-mlogloss:0.544245
## [165]    train-mlogloss:0.508191 val-mlogloss:0.543769
## [166]    train-mlogloss:0.507630 val-mlogloss:0.543315
## [167]    train-mlogloss:0.506999 val-mlogloss:0.542861
## [168]    train-mlogloss:0.506469 val-mlogloss:0.542480
## [169]    train-mlogloss:0.505905 val-mlogloss:0.542019
## [170]    train-mlogloss:0.505385 val-mlogloss:0.541585
## [171]    train-mlogloss:0.504830 val-mlogloss:0.541197
## [172]    train-mlogloss:0.504310 val-mlogloss:0.540827
## [173]    train-mlogloss:0.503746 val-mlogloss:0.540406
## [174]    train-mlogloss:0.503212 val-mlogloss:0.540009
## [175]    train-mlogloss:0.502685 val-mlogloss:0.539618
## [176]    train-mlogloss:0.502154 val-mlogloss:0.539239
## [177]    train-mlogloss:0.501653 val-mlogloss:0.538899
## [178]    train-mlogloss:0.501175 val-mlogloss:0.538562
## [179]    train-mlogloss:0.500662 val-mlogloss:0.538189
## [180]    train-mlogloss:0.500121 val-mlogloss:0.537786
## [181]    train-mlogloss:0.499653 val-mlogloss:0.537420
## [182]    train-mlogloss:0.499190 val-mlogloss:0.537063
## [183]    train-mlogloss:0.498691 val-mlogloss:0.536664
## [184]    train-mlogloss:0.498157 val-mlogloss:0.536281
## [185]    train-mlogloss:0.497598 val-mlogloss:0.535909
## [186]    train-mlogloss:0.497117 val-mlogloss:0.535543
## [187]    train-mlogloss:0.496655 val-mlogloss:0.535202
## [188]    train-mlogloss:0.496208 val-mlogloss:0.534842
## [189]    train-mlogloss:0.495755 val-mlogloss:0.534492
## [190]    train-mlogloss:0.495301 val-mlogloss:0.534157
## [191]    train-mlogloss:0.494865 val-mlogloss:0.533821
## [192]    train-mlogloss:0.494406 val-mlogloss:0.533506
## [193]    train-mlogloss:0.493929 val-mlogloss:0.533163
## [194]    train-mlogloss:0.493436 val-mlogloss:0.532828
## [195]    train-mlogloss:0.493032 val-mlogloss:0.532580
## [196]    train-mlogloss:0.492608 val-mlogloss:0.532253
## [197]    train-mlogloss:0.492177 val-mlogloss:0.531957
## [198]    train-mlogloss:0.491726 val-mlogloss:0.531659
## [199]    train-mlogloss:0.491335 val-mlogloss:0.531419
## [200]    train-mlogloss:0.490893 val-mlogloss:0.531086
## [201]    train-mlogloss:0.490529 val-mlogloss:0.530854
## [202]    train-mlogloss:0.490110 val-mlogloss:0.530531
## [203]    train-mlogloss:0.489732 val-mlogloss:0.530269
## [204]    train-mlogloss:0.489324 val-mlogloss:0.529976
## [205]    train-mlogloss:0.488932 val-mlogloss:0.529709
## [206]    train-mlogloss:0.488539 val-mlogloss:0.529433
## [207]    train-mlogloss:0.488204 val-mlogloss:0.529177
## [208]    train-mlogloss:0.487808 val-mlogloss:0.528893
## [209]    train-mlogloss:0.487420 val-mlogloss:0.528624
## [210]    train-mlogloss:0.487027 val-mlogloss:0.528365
## [211]    train-mlogloss:0.486665 val-mlogloss:0.528083
## [212]    train-mlogloss:0.486298 val-mlogloss:0.527853
## [213]    train-mlogloss:0.485911 val-mlogloss:0.527628
## [214]    train-mlogloss:0.485555 val-mlogloss:0.527432
## [215]    train-mlogloss:0.485174 val-mlogloss:0.527169
## [216]    train-mlogloss:0.484813 val-mlogloss:0.526919
## [217]    train-mlogloss:0.484475 val-mlogloss:0.526712
## [218]    train-mlogloss:0.484108 val-mlogloss:0.526436
## [219]    train-mlogloss:0.483726 val-mlogloss:0.526189
## [220]    train-mlogloss:0.483351 val-mlogloss:0.525939
## [221]    train-mlogloss:0.482942 val-mlogloss:0.525685
## [222]    train-mlogloss:0.482616 val-mlogloss:0.525436
## [223]    train-mlogloss:0.482282 val-mlogloss:0.525197
## [224]    train-mlogloss:0.481866 val-mlogloss:0.524930
## [225]    train-mlogloss:0.481508 val-mlogloss:0.524697
## [226]    train-mlogloss:0.481214 val-mlogloss:0.524495
## [227]    train-mlogloss:0.480898 val-mlogloss:0.524272
## [228]    train-mlogloss:0.480566 val-mlogloss:0.524081
## [229]    train-mlogloss:0.480231 val-mlogloss:0.523888
## [230]    train-mlogloss:0.479875 val-mlogloss:0.523631
## [231]    train-mlogloss:0.479570 val-mlogloss:0.523419
## [232]    train-mlogloss:0.479272 val-mlogloss:0.523192
## [233]    train-mlogloss:0.478969 val-mlogloss:0.523010
## [234]    train-mlogloss:0.478625 val-mlogloss:0.522828
## [235]    train-mlogloss:0.478302 val-mlogloss:0.522658
## [236]    train-mlogloss:0.477893 val-mlogloss:0.522449
## [237]    train-mlogloss:0.477545 val-mlogloss:0.522244
## [238]    train-mlogloss:0.477271 val-mlogloss:0.522072
## [239]    train-mlogloss:0.476969 val-mlogloss:0.521874
## [240]    train-mlogloss:0.476603 val-mlogloss:0.521706
## [241]    train-mlogloss:0.476280 val-mlogloss:0.521499
## [242]    train-mlogloss:0.475995 val-mlogloss:0.521333
## [243]    train-mlogloss:0.475746 val-mlogloss:0.521157
## [244]    train-mlogloss:0.475436 val-mlogloss:0.520978
## [245]    train-mlogloss:0.475120 val-mlogloss:0.520814
## [246]    train-mlogloss:0.474844 val-mlogloss:0.520638
## [247]    train-mlogloss:0.474597 val-mlogloss:0.520469
## [248]    train-mlogloss:0.474323 val-mlogloss:0.520288
## [249]    train-mlogloss:0.474100 val-mlogloss:0.520119
## [250]    train-mlogloss:0.473753 val-mlogloss:0.519934
## [251]    train-mlogloss:0.473463 val-mlogloss:0.519777
## [252]    train-mlogloss:0.473166 val-mlogloss:0.519604
## [253]    train-mlogloss:0.472864 val-mlogloss:0.519432
## [254]    train-mlogloss:0.472565 val-mlogloss:0.519276
## [255]    train-mlogloss:0.472317 val-mlogloss:0.519130
## [256]    train-mlogloss:0.471995 val-mlogloss:0.518960
## [257]    train-mlogloss:0.471716 val-mlogloss:0.518826
## [258]    train-mlogloss:0.471472 val-mlogloss:0.518657
## [259]    train-mlogloss:0.471196 val-mlogloss:0.518507
## [260]    train-mlogloss:0.470872 val-mlogloss:0.518312
## [261]    train-mlogloss:0.470580 val-mlogloss:0.518141
## [262]    train-mlogloss:0.470345 val-mlogloss:0.518010
## [263]    train-mlogloss:0.470105 val-mlogloss:0.517877
## [264]    train-mlogloss:0.469741 val-mlogloss:0.517659
## [265]    train-mlogloss:0.469453 val-mlogloss:0.517521
## [266]    train-mlogloss:0.469235 val-mlogloss:0.517398
## [267]    train-mlogloss:0.468977 val-mlogloss:0.517245
## [268]    train-mlogloss:0.468717 val-mlogloss:0.517072
## [269]    train-mlogloss:0.468450 val-mlogloss:0.516927
## [270]    train-mlogloss:0.468172 val-mlogloss:0.516782
## [271]    train-mlogloss:0.467858 val-mlogloss:0.516692
## [272]    train-mlogloss:0.467490 val-mlogloss:0.516537
## [273]    train-mlogloss:0.467224 val-mlogloss:0.516425
## [274]    train-mlogloss:0.466977 val-mlogloss:0.516292
## [275]    train-mlogloss:0.466744 val-mlogloss:0.516191
## [276]    train-mlogloss:0.466416 val-mlogloss:0.516015
## [277]    train-mlogloss:0.466186 val-mlogloss:0.515904
## [278]    train-mlogloss:0.465941 val-mlogloss:0.515785
## [279]    train-mlogloss:0.465662 val-mlogloss:0.515614
## [280]    train-mlogloss:0.465434 val-mlogloss:0.515463
## [281]    train-mlogloss:0.465206 val-mlogloss:0.515341
## [282]    train-mlogloss:0.464987 val-mlogloss:0.515214
## [283]    train-mlogloss:0.464722 val-mlogloss:0.515062
## [284]    train-mlogloss:0.464529 val-mlogloss:0.514949
## [285]    train-mlogloss:0.464246 val-mlogloss:0.514765
## [286]    train-mlogloss:0.464045 val-mlogloss:0.514670
## [287]    train-mlogloss:0.463748 val-mlogloss:0.514501
## [288]    train-mlogloss:0.463556 val-mlogloss:0.514412
## [289]    train-mlogloss:0.463233 val-mlogloss:0.514279
## [290]    train-mlogloss:0.462949 val-mlogloss:0.514158
## [291]    train-mlogloss:0.462644 val-mlogloss:0.514030
## [292]    train-mlogloss:0.462401 val-mlogloss:0.513925
## [293]    train-mlogloss:0.462232 val-mlogloss:0.513799
## [294]    train-mlogloss:0.461985 val-mlogloss:0.513659
## [295]    train-mlogloss:0.461718 val-mlogloss:0.513520
## [296]    train-mlogloss:0.461426 val-mlogloss:0.513372
## [297]    train-mlogloss:0.461218 val-mlogloss:0.513251
## [298]    train-mlogloss:0.460877 val-mlogloss:0.513090
## [299]    train-mlogloss:0.460603 val-mlogloss:0.512934
## [300]    train-mlogloss:0.460324 val-mlogloss:0.512809
## [301]    train-mlogloss:0.460032 val-mlogloss:0.512693
## [302]    train-mlogloss:0.459778 val-mlogloss:0.512609
## [303]    train-mlogloss:0.459492 val-mlogloss:0.512436
## [304]    train-mlogloss:0.459258 val-mlogloss:0.512330
## [305]    train-mlogloss:0.458974 val-mlogloss:0.512258
## [306]    train-mlogloss:0.458732 val-mlogloss:0.512100
## [307]    train-mlogloss:0.458468 val-mlogloss:0.512008
## [308]    train-mlogloss:0.458294 val-mlogloss:0.511931
## [309]    train-mlogloss:0.458087 val-mlogloss:0.511811
## [310]    train-mlogloss:0.457827 val-mlogloss:0.511705
## [311]    train-mlogloss:0.457569 val-mlogloss:0.511558
## [312]    train-mlogloss:0.457347 val-mlogloss:0.511493
## [313]    train-mlogloss:0.457119 val-mlogloss:0.511388
## [314]    train-mlogloss:0.456930 val-mlogloss:0.511292
## [315]    train-mlogloss:0.456676 val-mlogloss:0.511181
## [316]    train-mlogloss:0.456409 val-mlogloss:0.511099
## [317]    train-mlogloss:0.456173 val-mlogloss:0.510993
## [318]    train-mlogloss:0.455947 val-mlogloss:0.510880
## [319]    train-mlogloss:0.455782 val-mlogloss:0.510768
## [320]    train-mlogloss:0.455554 val-mlogloss:0.510675
## [321]    train-mlogloss:0.455333 val-mlogloss:0.510576
## [322]    train-mlogloss:0.455022 val-mlogloss:0.510472
## [323]    train-mlogloss:0.454730 val-mlogloss:0.510299
## [324]    train-mlogloss:0.454495 val-mlogloss:0.510221
## [325]    train-mlogloss:0.454261 val-mlogloss:0.510162
## [326]    train-mlogloss:0.454049 val-mlogloss:0.510074
## [327]    train-mlogloss:0.453833 val-mlogloss:0.509995
## [328]    train-mlogloss:0.453553 val-mlogloss:0.509864
## [329]    train-mlogloss:0.453291 val-mlogloss:0.509766
## [330]    train-mlogloss:0.453053 val-mlogloss:0.509674
## [331]    train-mlogloss:0.452878 val-mlogloss:0.509564
## [332]    train-mlogloss:0.452683 val-mlogloss:0.509488
## [333]    train-mlogloss:0.452452 val-mlogloss:0.509389
## [334]    train-mlogloss:0.452256 val-mlogloss:0.509288
## [335]    train-mlogloss:0.452064 val-mlogloss:0.509211
## [336]    train-mlogloss:0.451852 val-mlogloss:0.509111
## [337]    train-mlogloss:0.451581 val-mlogloss:0.508993
## [338]    train-mlogloss:0.451321 val-mlogloss:0.508912
## [339]    train-mlogloss:0.451043 val-mlogloss:0.508790
## [340]    train-mlogloss:0.450786 val-mlogloss:0.508706
## [341]    train-mlogloss:0.450548 val-mlogloss:0.508611
## [342]    train-mlogloss:0.450334 val-mlogloss:0.508515
## [343]    train-mlogloss:0.450134 val-mlogloss:0.508441
## [344]    train-mlogloss:0.449867 val-mlogloss:0.508348
## [345]    train-mlogloss:0.449642 val-mlogloss:0.508239
## [346]    train-mlogloss:0.449399 val-mlogloss:0.508130
## [347]    train-mlogloss:0.449199 val-mlogloss:0.508071
## [348]    train-mlogloss:0.449019 val-mlogloss:0.507972
## [349]    train-mlogloss:0.448810 val-mlogloss:0.507883
## [350]    train-mlogloss:0.448599 val-mlogloss:0.507772
## [351]    train-mlogloss:0.448382 val-mlogloss:0.507682
## [352]    train-mlogloss:0.448203 val-mlogloss:0.507595
## [353]    train-mlogloss:0.448020 val-mlogloss:0.507544
## [354]    train-mlogloss:0.447877 val-mlogloss:0.507483
## [355]    train-mlogloss:0.447656 val-mlogloss:0.507396
## [356]    train-mlogloss:0.447479 val-mlogloss:0.507312
## [357]    train-mlogloss:0.447255 val-mlogloss:0.507243
## [358]    train-mlogloss:0.447082 val-mlogloss:0.507150
## [359]    train-mlogloss:0.446871 val-mlogloss:0.507058
## [360]    train-mlogloss:0.446638 val-mlogloss:0.506999
## [361]    train-mlogloss:0.446473 val-mlogloss:0.506928
## [362]    train-mlogloss:0.446298 val-mlogloss:0.506851
## [363]    train-mlogloss:0.446156 val-mlogloss:0.506773
## [364]    train-mlogloss:0.445981 val-mlogloss:0.506724
## [365]    train-mlogloss:0.445730 val-mlogloss:0.506648
## [366]    train-mlogloss:0.445489 val-mlogloss:0.506566
## [367]    train-mlogloss:0.445361 val-mlogloss:0.506490
## [368]    train-mlogloss:0.445162 val-mlogloss:0.506413
## [369]    train-mlogloss:0.444965 val-mlogloss:0.506355
## [370]    train-mlogloss:0.444774 val-mlogloss:0.506273
## [371]    train-mlogloss:0.444538 val-mlogloss:0.506207
## [372]    train-mlogloss:0.444343 val-mlogloss:0.506152
## [373]    train-mlogloss:0.444091 val-mlogloss:0.506067
## [374]    train-mlogloss:0.443877 val-mlogloss:0.505998
## [375]    train-mlogloss:0.443621 val-mlogloss:0.505916
## [376]    train-mlogloss:0.443383 val-mlogloss:0.505848
## [377]    train-mlogloss:0.443219 val-mlogloss:0.505746
## [378]    train-mlogloss:0.443001 val-mlogloss:0.505666
## [379]    train-mlogloss:0.442794 val-mlogloss:0.505583
## [380]    train-mlogloss:0.442616 val-mlogloss:0.505488
## [381]    train-mlogloss:0.442381 val-mlogloss:0.505424
## [382]    train-mlogloss:0.442206 val-mlogloss:0.505314
## [383]    train-mlogloss:0.442040 val-mlogloss:0.505244
## [384]    train-mlogloss:0.441822 val-mlogloss:0.505149
## [385]    train-mlogloss:0.441571 val-mlogloss:0.505071
## [386]    train-mlogloss:0.441367 val-mlogloss:0.504978
## [387]    train-mlogloss:0.441178 val-mlogloss:0.504917
## [388]    train-mlogloss:0.441007 val-mlogloss:0.504828
## [389]    train-mlogloss:0.440808 val-mlogloss:0.504763
## [390]    train-mlogloss:0.440639 val-mlogloss:0.504677
## [391]    train-mlogloss:0.440478 val-mlogloss:0.504585
## [392]    train-mlogloss:0.440321 val-mlogloss:0.504501
## [393]    train-mlogloss:0.440164 val-mlogloss:0.504433
## [394]    train-mlogloss:0.439982 val-mlogloss:0.504383
## [395]    train-mlogloss:0.439849 val-mlogloss:0.504330
## [396]    train-mlogloss:0.439646 val-mlogloss:0.504253
## [397]    train-mlogloss:0.439492 val-mlogloss:0.504206
## [398]    train-mlogloss:0.439258 val-mlogloss:0.504139
## [399]    train-mlogloss:0.439097 val-mlogloss:0.504084
## [400]    train-mlogloss:0.438855 val-mlogloss:0.504037
## [401]    train-mlogloss:0.438653 val-mlogloss:0.503988
## [402]    train-mlogloss:0.438396 val-mlogloss:0.503917
## [403]    train-mlogloss:0.438184 val-mlogloss:0.503855
## [404]    train-mlogloss:0.437971 val-mlogloss:0.503804
## [405]    train-mlogloss:0.437847 val-mlogloss:0.503764
## [406]    train-mlogloss:0.437669 val-mlogloss:0.503704
## [407]    train-mlogloss:0.437488 val-mlogloss:0.503634
## [408]    train-mlogloss:0.437295 val-mlogloss:0.503535
## [409]    train-mlogloss:0.437099 val-mlogloss:0.503471
## [410]    train-mlogloss:0.436849 val-mlogloss:0.503407
## [411]    train-mlogloss:0.436711 val-mlogloss:0.503345
## [412]    train-mlogloss:0.436550 val-mlogloss:0.503286
## [413]    train-mlogloss:0.436320 val-mlogloss:0.503217
## [414]    train-mlogloss:0.436193 val-mlogloss:0.503161
## [415]    train-mlogloss:0.436073 val-mlogloss:0.503115
## [416]    train-mlogloss:0.435955 val-mlogloss:0.503062
## [417]    train-mlogloss:0.435741 val-mlogloss:0.502983
## [418]    train-mlogloss:0.435573 val-mlogloss:0.502917
## [419]    train-mlogloss:0.435277 val-mlogloss:0.502826
## [420]    train-mlogloss:0.435061 val-mlogloss:0.502752
## [421]    train-mlogloss:0.434925 val-mlogloss:0.502699
## [422]    train-mlogloss:0.434759 val-mlogloss:0.502640
## [423]    train-mlogloss:0.434535 val-mlogloss:0.502547
## [424]    train-mlogloss:0.434343 val-mlogloss:0.502481
## [425]    train-mlogloss:0.434215 val-mlogloss:0.502430
## [426]    train-mlogloss:0.434010 val-mlogloss:0.502357
## [427]    train-mlogloss:0.433883 val-mlogloss:0.502312
## [428]    train-mlogloss:0.433662 val-mlogloss:0.502212
## [429]    train-mlogloss:0.433525 val-mlogloss:0.502166
## [430]    train-mlogloss:0.433348 val-mlogloss:0.502112
## [431]    train-mlogloss:0.433187 val-mlogloss:0.502055
## [432]    train-mlogloss:0.433006 val-mlogloss:0.502026
## [433]    train-mlogloss:0.432858 val-mlogloss:0.502004
## [434]    train-mlogloss:0.432641 val-mlogloss:0.501928
## [435]    train-mlogloss:0.432445 val-mlogloss:0.501867
## [436]    train-mlogloss:0.432286 val-mlogloss:0.501822
## [437]    train-mlogloss:0.432118 val-mlogloss:0.501761
## [438]    train-mlogloss:0.431951 val-mlogloss:0.501728
## [439]    train-mlogloss:0.431733 val-mlogloss:0.501682
## [440]    train-mlogloss:0.431549 val-mlogloss:0.501619
## [441]    train-mlogloss:0.431382 val-mlogloss:0.501586
## [442]    train-mlogloss:0.431151 val-mlogloss:0.501516
## [443]    train-mlogloss:0.431033 val-mlogloss:0.501481
## [444]    train-mlogloss:0.430872 val-mlogloss:0.501445
## [445]    train-mlogloss:0.430666 val-mlogloss:0.501403
## [446]    train-mlogloss:0.430440 val-mlogloss:0.501342
## [447]    train-mlogloss:0.430228 val-mlogloss:0.501272
## [448]    train-mlogloss:0.430072 val-mlogloss:0.501216
## [449]    train-mlogloss:0.429911 val-mlogloss:0.501156
## [450]    train-mlogloss:0.429690 val-mlogloss:0.501060
## [451]    train-mlogloss:0.429510 val-mlogloss:0.500988
## [452]    train-mlogloss:0.429360 val-mlogloss:0.500927
## [453]    train-mlogloss:0.429106 val-mlogloss:0.500851
## [454]    train-mlogloss:0.428943 val-mlogloss:0.500801
## [455]    train-mlogloss:0.428790 val-mlogloss:0.500754
## [456]    train-mlogloss:0.428614 val-mlogloss:0.500713
## [457]    train-mlogloss:0.428404 val-mlogloss:0.500646
## [458]    train-mlogloss:0.428226 val-mlogloss:0.500622
## [459]    train-mlogloss:0.428035 val-mlogloss:0.500559
## [460]    train-mlogloss:0.427857 val-mlogloss:0.500504
## [461]    train-mlogloss:0.427642 val-mlogloss:0.500447
## [462]    train-mlogloss:0.427479 val-mlogloss:0.500398
## [463]    train-mlogloss:0.427252 val-mlogloss:0.500378
## [464]    train-mlogloss:0.427043 val-mlogloss:0.500326
## [465]    train-mlogloss:0.426922 val-mlogloss:0.500279
## [466]    train-mlogloss:0.426762 val-mlogloss:0.500211
## [467]    train-mlogloss:0.426528 val-mlogloss:0.500136
## [468]    train-mlogloss:0.426386 val-mlogloss:0.500116
## [469]    train-mlogloss:0.426228 val-mlogloss:0.500083
## [470]    train-mlogloss:0.426055 val-mlogloss:0.500062
## [471]    train-mlogloss:0.425819 val-mlogloss:0.500049
## [472]    train-mlogloss:0.425656 val-mlogloss:0.500026
## [473]    train-mlogloss:0.425478 val-mlogloss:0.499960
## [474]    train-mlogloss:0.425282 val-mlogloss:0.499920
## [475]    train-mlogloss:0.425121 val-mlogloss:0.499876
## [476]    train-mlogloss:0.424962 val-mlogloss:0.499839
## [477]    train-mlogloss:0.424762 val-mlogloss:0.499800
## [478]    train-mlogloss:0.424616 val-mlogloss:0.499760
## [479]    train-mlogloss:0.424495 val-mlogloss:0.499686
## [480]    train-mlogloss:0.424325 val-mlogloss:0.499631
## [481]    train-mlogloss:0.424193 val-mlogloss:0.499595
## [482]    train-mlogloss:0.423988 val-mlogloss:0.499533
## [483]    train-mlogloss:0.423844 val-mlogloss:0.499511
## [484]    train-mlogloss:0.423652 val-mlogloss:0.499482
## [485]    train-mlogloss:0.423461 val-mlogloss:0.499440
## [486]    train-mlogloss:0.423315 val-mlogloss:0.499378
## [487]    train-mlogloss:0.423130 val-mlogloss:0.499341
## [488]    train-mlogloss:0.422974 val-mlogloss:0.499281
## [489]    train-mlogloss:0.422834 val-mlogloss:0.499244
## [490]    train-mlogloss:0.422664 val-mlogloss:0.499207
## [491]    train-mlogloss:0.422497 val-mlogloss:0.499202
## [492]    train-mlogloss:0.422340 val-mlogloss:0.499149
## [493]    train-mlogloss:0.422157 val-mlogloss:0.499111
## [494]    train-mlogloss:0.421981 val-mlogloss:0.499084
## [495]    train-mlogloss:0.421805 val-mlogloss:0.499060
## [496]    train-mlogloss:0.421579 val-mlogloss:0.499015
## [497]    train-mlogloss:0.421424 val-mlogloss:0.498962
## [498]    train-mlogloss:0.421289 val-mlogloss:0.498912
## [499]    train-mlogloss:0.421093 val-mlogloss:0.498901
## [500]    train-mlogloss:0.420945 val-mlogloss:0.498863
## [501]    train-mlogloss:0.420822 val-mlogloss:0.498818
## [502]    train-mlogloss:0.420652 val-mlogloss:0.498781
## [503]    train-mlogloss:0.420425 val-mlogloss:0.498736
## [504]    train-mlogloss:0.420325 val-mlogloss:0.498696
## [505]    train-mlogloss:0.420137 val-mlogloss:0.498640
## [506]    train-mlogloss:0.419911 val-mlogloss:0.498563
## [507]    train-mlogloss:0.419726 val-mlogloss:0.498562
## [508]    train-mlogloss:0.419539 val-mlogloss:0.498542
## [509]    train-mlogloss:0.419395 val-mlogloss:0.498505
## [510]    train-mlogloss:0.419172 val-mlogloss:0.498473
## [511]    train-mlogloss:0.419022 val-mlogloss:0.498438
## [512]    train-mlogloss:0.418847 val-mlogloss:0.498417
## [513]    train-mlogloss:0.418681 val-mlogloss:0.498405
## [514]    train-mlogloss:0.418569 val-mlogloss:0.498400
## [515]    train-mlogloss:0.418421 val-mlogloss:0.498334
## [516]    train-mlogloss:0.418238 val-mlogloss:0.498273
## [517]    train-mlogloss:0.418063 val-mlogloss:0.498251
## [518]    train-mlogloss:0.417960 val-mlogloss:0.498209
## [519]    train-mlogloss:0.417765 val-mlogloss:0.498181
## [520]    train-mlogloss:0.417582 val-mlogloss:0.498184
## [521]    train-mlogloss:0.417494 val-mlogloss:0.498159
## [522]    train-mlogloss:0.417367 val-mlogloss:0.498113
## [523]    train-mlogloss:0.417179 val-mlogloss:0.498062
## [524]    train-mlogloss:0.417033 val-mlogloss:0.498053
## [525]    train-mlogloss:0.416841 val-mlogloss:0.498042
## [526]    train-mlogloss:0.416658 val-mlogloss:0.498012
## [527]    train-mlogloss:0.416454 val-mlogloss:0.497978
## [528]    train-mlogloss:0.416328 val-mlogloss:0.497954
## [529]    train-mlogloss:0.416140 val-mlogloss:0.497890
## [530]    train-mlogloss:0.415974 val-mlogloss:0.497873
## [531]    train-mlogloss:0.415787 val-mlogloss:0.497849
## [532]    train-mlogloss:0.415633 val-mlogloss:0.497783
## [533]    train-mlogloss:0.415479 val-mlogloss:0.497733
## [534]    train-mlogloss:0.415335 val-mlogloss:0.497687
## [535]    train-mlogloss:0.415149 val-mlogloss:0.497611
## [536]    train-mlogloss:0.414961 val-mlogloss:0.497561
## [537]    train-mlogloss:0.414831 val-mlogloss:0.497538
## [538]    train-mlogloss:0.414638 val-mlogloss:0.497520
## [539]    train-mlogloss:0.414534 val-mlogloss:0.497521
## [540]    train-mlogloss:0.414411 val-mlogloss:0.497494
## [541]    train-mlogloss:0.414287 val-mlogloss:0.497455
## [542]    train-mlogloss:0.414103 val-mlogloss:0.497407
## [543]    train-mlogloss:0.413996 val-mlogloss:0.497373
## [544]    train-mlogloss:0.413780 val-mlogloss:0.497360
## [545]    train-mlogloss:0.413594 val-mlogloss:0.497353
## [546]    train-mlogloss:0.413420 val-mlogloss:0.497339
## [547]    train-mlogloss:0.413291 val-mlogloss:0.497307
## [548]    train-mlogloss:0.413190 val-mlogloss:0.497265
## [549]    train-mlogloss:0.413033 val-mlogloss:0.497201
## [550]    train-mlogloss:0.412854 val-mlogloss:0.497172
## [551]    train-mlogloss:0.412657 val-mlogloss:0.497128
## [552]    train-mlogloss:0.412480 val-mlogloss:0.497087
## [553]    train-mlogloss:0.412295 val-mlogloss:0.497056
## [554]    train-mlogloss:0.412125 val-mlogloss:0.497026
## [555]    train-mlogloss:0.411944 val-mlogloss:0.496992
## [556]    train-mlogloss:0.411791 val-mlogloss:0.496975
## [557]    train-mlogloss:0.411667 val-mlogloss:0.496954
## [558]    train-mlogloss:0.411536 val-mlogloss:0.496937
## [559]    train-mlogloss:0.411391 val-mlogloss:0.496902
## [560]    train-mlogloss:0.411246 val-mlogloss:0.496895
## [561]    train-mlogloss:0.411091 val-mlogloss:0.496882
## [562]    train-mlogloss:0.410969 val-mlogloss:0.496868
## [563]    train-mlogloss:0.410809 val-mlogloss:0.496834
## [564]    train-mlogloss:0.410632 val-mlogloss:0.496815
## [565]    train-mlogloss:0.410428 val-mlogloss:0.496769
## [566]    train-mlogloss:0.410353 val-mlogloss:0.496753
## [567]    train-mlogloss:0.410222 val-mlogloss:0.496718
## [568]    train-mlogloss:0.410022 val-mlogloss:0.496696
## [569]    train-mlogloss:0.409824 val-mlogloss:0.496673
## [570]    train-mlogloss:0.409612 val-mlogloss:0.496634
## [571]    train-mlogloss:0.409408 val-mlogloss:0.496596
## [572]    train-mlogloss:0.409202 val-mlogloss:0.496563
## [573]    train-mlogloss:0.408999 val-mlogloss:0.496534
## [574]    train-mlogloss:0.408802 val-mlogloss:0.496530
## [575]    train-mlogloss:0.408583 val-mlogloss:0.496494
## [576]    train-mlogloss:0.408390 val-mlogloss:0.496441
## [577]    train-mlogloss:0.408235 val-mlogloss:0.496406
## [578]    train-mlogloss:0.408107 val-mlogloss:0.496390
## [579]    train-mlogloss:0.407932 val-mlogloss:0.496341
## [580]    train-mlogloss:0.407773 val-mlogloss:0.496321
## [581]    train-mlogloss:0.407649 val-mlogloss:0.496315
## [582]    train-mlogloss:0.407544 val-mlogloss:0.496300
## [583]    train-mlogloss:0.407349 val-mlogloss:0.496284
## [584]    train-mlogloss:0.407238 val-mlogloss:0.496265
## [585]    train-mlogloss:0.407101 val-mlogloss:0.496215
## [586]    train-mlogloss:0.406978 val-mlogloss:0.496184
## [587]    train-mlogloss:0.406837 val-mlogloss:0.496142
## [588]    train-mlogloss:0.406689 val-mlogloss:0.496096
## [589]    train-mlogloss:0.406521 val-mlogloss:0.496062
## [590]    train-mlogloss:0.406331 val-mlogloss:0.496062
## [591]    train-mlogloss:0.406202 val-mlogloss:0.496045
## [592]    train-mlogloss:0.406050 val-mlogloss:0.496035
## [593]    train-mlogloss:0.405907 val-mlogloss:0.495996
## [594]    train-mlogloss:0.405763 val-mlogloss:0.495955
## [595]    train-mlogloss:0.405530 val-mlogloss:0.495954
## [596]    train-mlogloss:0.405456 val-mlogloss:0.495939
## [597]    train-mlogloss:0.405367 val-mlogloss:0.495914
## [598]    train-mlogloss:0.405146 val-mlogloss:0.495901
## [599]    train-mlogloss:0.404967 val-mlogloss:0.495886
## [600]    train-mlogloss:0.404796 val-mlogloss:0.495834
## [601]    train-mlogloss:0.404677 val-mlogloss:0.495812
## [602]    train-mlogloss:0.404527 val-mlogloss:0.495772
## [603]    train-mlogloss:0.404395 val-mlogloss:0.495725
## [604]    train-mlogloss:0.404252 val-mlogloss:0.495714
## [605]    train-mlogloss:0.404067 val-mlogloss:0.495680
## [606]    train-mlogloss:0.403904 val-mlogloss:0.495641
## [607]    train-mlogloss:0.403716 val-mlogloss:0.495617
## [608]    train-mlogloss:0.403565 val-mlogloss:0.495585
## [609]    train-mlogloss:0.403365 val-mlogloss:0.495573
## [610]    train-mlogloss:0.403211 val-mlogloss:0.495536
## [611]    train-mlogloss:0.403066 val-mlogloss:0.495515
## [612]    train-mlogloss:0.402878 val-mlogloss:0.495473
## [613]    train-mlogloss:0.402710 val-mlogloss:0.495468
## [614]    train-mlogloss:0.402550 val-mlogloss:0.495460
## [615]    train-mlogloss:0.402354 val-mlogloss:0.495445
## [616]    train-mlogloss:0.402206 val-mlogloss:0.495436
## [617]    train-mlogloss:0.402049 val-mlogloss:0.495413
## [618]    train-mlogloss:0.401941 val-mlogloss:0.495409
## [619]    train-mlogloss:0.401790 val-mlogloss:0.495389
## [620]    train-mlogloss:0.401627 val-mlogloss:0.495356
## [621]    train-mlogloss:0.401442 val-mlogloss:0.495324
## [622]    train-mlogloss:0.401272 val-mlogloss:0.495268
## [623]    train-mlogloss:0.401168 val-mlogloss:0.495253
## [624]    train-mlogloss:0.401016 val-mlogloss:0.495246
## [625]    train-mlogloss:0.400896 val-mlogloss:0.495210
## [626]    train-mlogloss:0.400744 val-mlogloss:0.495191
## [627]    train-mlogloss:0.400517 val-mlogloss:0.495158
## [628]    train-mlogloss:0.400349 val-mlogloss:0.495147
## [629]    train-mlogloss:0.400205 val-mlogloss:0.495151
## [630]    train-mlogloss:0.400076 val-mlogloss:0.495127
## [631]    train-mlogloss:0.399934 val-mlogloss:0.495085
## [632]    train-mlogloss:0.399763 val-mlogloss:0.495084
## [633]    train-mlogloss:0.399603 val-mlogloss:0.495071
## [634]    train-mlogloss:0.399448 val-mlogloss:0.495051
## [635]    train-mlogloss:0.399337 val-mlogloss:0.495032
## [636]    train-mlogloss:0.399183 val-mlogloss:0.495003
## [637]    train-mlogloss:0.398999 val-mlogloss:0.494984
## [638]    train-mlogloss:0.398893 val-mlogloss:0.494951
## [639]    train-mlogloss:0.398749 val-mlogloss:0.494931
## [640]    train-mlogloss:0.398531 val-mlogloss:0.494896
## [641]    train-mlogloss:0.398416 val-mlogloss:0.494883
## [642]    train-mlogloss:0.398329 val-mlogloss:0.494867
## [643]    train-mlogloss:0.398133 val-mlogloss:0.494868
## [644]    train-mlogloss:0.397932 val-mlogloss:0.494809
## [645]    train-mlogloss:0.397827 val-mlogloss:0.494775
## [646]    train-mlogloss:0.397698 val-mlogloss:0.494767
## [647]    train-mlogloss:0.397562 val-mlogloss:0.494750
## [648]    train-mlogloss:0.397406 val-mlogloss:0.494733
## [649]    train-mlogloss:0.397257 val-mlogloss:0.494700
## [650]    train-mlogloss:0.397102 val-mlogloss:0.494691
## [651]    train-mlogloss:0.396956 val-mlogloss:0.494720
## [652]    train-mlogloss:0.396795 val-mlogloss:0.494695
## [653]    train-mlogloss:0.396651 val-mlogloss:0.494687
## [654]    train-mlogloss:0.396553 val-mlogloss:0.494664
## [655]    train-mlogloss:0.396394 val-mlogloss:0.494651
## [656]    train-mlogloss:0.396239 val-mlogloss:0.494639
## [657]    train-mlogloss:0.396016 val-mlogloss:0.494599
## [658]    train-mlogloss:0.395886 val-mlogloss:0.494563
## [659]    train-mlogloss:0.395770 val-mlogloss:0.494545
## [660]    train-mlogloss:0.395589 val-mlogloss:0.494527
## [661]    train-mlogloss:0.395418 val-mlogloss:0.494512
## [662]    train-mlogloss:0.395205 val-mlogloss:0.494500
## [663]    train-mlogloss:0.395085 val-mlogloss:0.494488
## [664]    train-mlogloss:0.394953 val-mlogloss:0.494453
## [665]    train-mlogloss:0.394889 val-mlogloss:0.494422
## [666]    train-mlogloss:0.394788 val-mlogloss:0.494429
## [667]    train-mlogloss:0.394651 val-mlogloss:0.494406
## [668]    train-mlogloss:0.394529 val-mlogloss:0.494376
## [669]    train-mlogloss:0.394370 val-mlogloss:0.494327
## [670]    train-mlogloss:0.394253 val-mlogloss:0.494299
## [671]    train-mlogloss:0.394082 val-mlogloss:0.494287
## [672]    train-mlogloss:0.393891 val-mlogloss:0.494304
## [673]    train-mlogloss:0.393789 val-mlogloss:0.494271
## [674]    train-mlogloss:0.393651 val-mlogloss:0.494259
## [675]    train-mlogloss:0.393528 val-mlogloss:0.494267
## [676]    train-mlogloss:0.393406 val-mlogloss:0.494264
## [677]    train-mlogloss:0.393291 val-mlogloss:0.494240
## [678]    train-mlogloss:0.393161 val-mlogloss:0.494226
## [679]    train-mlogloss:0.392979 val-mlogloss:0.494217
## [680]    train-mlogloss:0.392840 val-mlogloss:0.494216
## [681]    train-mlogloss:0.392689 val-mlogloss:0.494199
## [682]    train-mlogloss:0.392514 val-mlogloss:0.494160
## [683]    train-mlogloss:0.392379 val-mlogloss:0.494134
## [684]    train-mlogloss:0.392200 val-mlogloss:0.494103
## [685]    train-mlogloss:0.392053 val-mlogloss:0.494104
## [686]    train-mlogloss:0.391889 val-mlogloss:0.494082
## [687]    train-mlogloss:0.391740 val-mlogloss:0.494062
## [688]    train-mlogloss:0.391603 val-mlogloss:0.494070
## [689]    train-mlogloss:0.391391 val-mlogloss:0.494083
## [690]    train-mlogloss:0.391295 val-mlogloss:0.494096
## [691]    train-mlogloss:0.391179 val-mlogloss:0.494079
## [692]    train-mlogloss:0.391046 val-mlogloss:0.494075
## [693]    train-mlogloss:0.390889 val-mlogloss:0.494072
## [694]    train-mlogloss:0.390789 val-mlogloss:0.494034
## [695]    train-mlogloss:0.390612 val-mlogloss:0.494040
## [696]    train-mlogloss:0.390494 val-mlogloss:0.494011
## [697]    train-mlogloss:0.390272 val-mlogloss:0.493983
## [698]    train-mlogloss:0.390188 val-mlogloss:0.493979
## [699]    train-mlogloss:0.390065 val-mlogloss:0.493967
## [700]    train-mlogloss:0.389980 val-mlogloss:0.493959
## [701]    train-mlogloss:0.389859 val-mlogloss:0.493917
## [702]    train-mlogloss:0.389730 val-mlogloss:0.493894
## [703]    train-mlogloss:0.389601 val-mlogloss:0.493884
## [704]    train-mlogloss:0.389449 val-mlogloss:0.493847
## [705]    train-mlogloss:0.389312 val-mlogloss:0.493848
## [706]    train-mlogloss:0.389120 val-mlogloss:0.493798
## [707]    train-mlogloss:0.388917 val-mlogloss:0.493783
## [708]    train-mlogloss:0.388812 val-mlogloss:0.493770
## [709]    train-mlogloss:0.388699 val-mlogloss:0.493750
## [710]    train-mlogloss:0.388574 val-mlogloss:0.493737
## [711]    train-mlogloss:0.388460 val-mlogloss:0.493734
## [712]    train-mlogloss:0.388291 val-mlogloss:0.493709
## [713]    train-mlogloss:0.388125 val-mlogloss:0.493718
## [714]    train-mlogloss:0.387961 val-mlogloss:0.493721
## [715]    train-mlogloss:0.387834 val-mlogloss:0.493683
## [716]    train-mlogloss:0.387688 val-mlogloss:0.493623
## [717]    train-mlogloss:0.387522 val-mlogloss:0.493642
## [718]    train-mlogloss:0.387356 val-mlogloss:0.493607
## [719]    train-mlogloss:0.387204 val-mlogloss:0.493603
## [720]    train-mlogloss:0.387092 val-mlogloss:0.493606
## [721]    train-mlogloss:0.386965 val-mlogloss:0.493590
## [722]    train-mlogloss:0.386819 val-mlogloss:0.493573
## [723]    train-mlogloss:0.386687 val-mlogloss:0.493530
## [724]    train-mlogloss:0.386498 val-mlogloss:0.493507
## [725]    train-mlogloss:0.386381 val-mlogloss:0.493514
## [726]    train-mlogloss:0.386209 val-mlogloss:0.493521
## [727]    train-mlogloss:0.386065 val-mlogloss:0.493515
## [728]    train-mlogloss:0.385867 val-mlogloss:0.493507
## [729]    train-mlogloss:0.385747 val-mlogloss:0.493505
## [730]    train-mlogloss:0.385574 val-mlogloss:0.493536
## [731]    train-mlogloss:0.385454 val-mlogloss:0.493515
## [732]    train-mlogloss:0.385329 val-mlogloss:0.493507
## [733]    train-mlogloss:0.385255 val-mlogloss:0.493487
## [734]    train-mlogloss:0.385084 val-mlogloss:0.493466
## [735]    train-mlogloss:0.384942 val-mlogloss:0.493457
## [736]    train-mlogloss:0.384846 val-mlogloss:0.493452
## [737]    train-mlogloss:0.384727 val-mlogloss:0.493463
## [738]    train-mlogloss:0.384574 val-mlogloss:0.493442
## [739]    train-mlogloss:0.384470 val-mlogloss:0.493431
## [740]    train-mlogloss:0.384319 val-mlogloss:0.493418
## [741]    train-mlogloss:0.384219 val-mlogloss:0.493429
## [742]    train-mlogloss:0.384064 val-mlogloss:0.493424
## [743]    train-mlogloss:0.383964 val-mlogloss:0.493401
## [744]    train-mlogloss:0.383873 val-mlogloss:0.493396
## [745]    train-mlogloss:0.383742 val-mlogloss:0.493392
## [746]    train-mlogloss:0.383539 val-mlogloss:0.493382
## [747]    train-mlogloss:0.383441 val-mlogloss:0.493382
## [748]    train-mlogloss:0.383332 val-mlogloss:0.493381
## [749]    train-mlogloss:0.383190 val-mlogloss:0.493401
## [750]    train-mlogloss:0.383043 val-mlogloss:0.493375
## [751]    train-mlogloss:0.382904 val-mlogloss:0.493377
## [752]    train-mlogloss:0.382796 val-mlogloss:0.493373
## [753]    train-mlogloss:0.382672 val-mlogloss:0.493346
## [754]    train-mlogloss:0.382554 val-mlogloss:0.493357
## [755]    train-mlogloss:0.382408 val-mlogloss:0.493328
## [756]    train-mlogloss:0.382289 val-mlogloss:0.493299
## [757]    train-mlogloss:0.382162 val-mlogloss:0.493295
## [758]    train-mlogloss:0.382035 val-mlogloss:0.493294
## [759]    train-mlogloss:0.381925 val-mlogloss:0.493259
## [760]    train-mlogloss:0.381779 val-mlogloss:0.493224
## [761]    train-mlogloss:0.381595 val-mlogloss:0.493195
## [762]    train-mlogloss:0.381462 val-mlogloss:0.493176
## [763]    train-mlogloss:0.381297 val-mlogloss:0.493177
## [764]    train-mlogloss:0.381172 val-mlogloss:0.493156
## [765]    train-mlogloss:0.380998 val-mlogloss:0.493139
## [766]    train-mlogloss:0.380812 val-mlogloss:0.493114
## [767]    train-mlogloss:0.380693 val-mlogloss:0.493101
## [768]    train-mlogloss:0.380532 val-mlogloss:0.493080
## [769]    train-mlogloss:0.380386 val-mlogloss:0.493100
## [770]    train-mlogloss:0.380201 val-mlogloss:0.493063
## [771]    train-mlogloss:0.380046 val-mlogloss:0.493054
## [772]    train-mlogloss:0.379933 val-mlogloss:0.493024
## [773]    train-mlogloss:0.379786 val-mlogloss:0.493010
## [774]    train-mlogloss:0.379662 val-mlogloss:0.492994
## [775]    train-mlogloss:0.379547 val-mlogloss:0.493009
## [776]    train-mlogloss:0.379390 val-mlogloss:0.493000
## [777]    train-mlogloss:0.379269 val-mlogloss:0.493003
## [778]    train-mlogloss:0.379120 val-mlogloss:0.493021
## [779]    train-mlogloss:0.379029 val-mlogloss:0.493017
## [780]    train-mlogloss:0.378870 val-mlogloss:0.492998
## [781]    train-mlogloss:0.378754 val-mlogloss:0.492970
## [782]    train-mlogloss:0.378667 val-mlogloss:0.492944
## [783]    train-mlogloss:0.378533 val-mlogloss:0.492936
## [784]    train-mlogloss:0.378452 val-mlogloss:0.492921
## [785]    train-mlogloss:0.378349 val-mlogloss:0.492916
## [786]    train-mlogloss:0.378236 val-mlogloss:0.492908
## [787]    train-mlogloss:0.378098 val-mlogloss:0.492905
## [788]    train-mlogloss:0.377914 val-mlogloss:0.492914
## [789]    train-mlogloss:0.377783 val-mlogloss:0.492881
## [790]    train-mlogloss:0.377675 val-mlogloss:0.492896
## [791]    train-mlogloss:0.377583 val-mlogloss:0.492882
## [792]    train-mlogloss:0.377455 val-mlogloss:0.492891
## [793]    train-mlogloss:0.377300 val-mlogloss:0.492871
## [794]    train-mlogloss:0.377138 val-mlogloss:0.492857
## [795]    train-mlogloss:0.377005 val-mlogloss:0.492856
## [796]    train-mlogloss:0.376882 val-mlogloss:0.492869
## [797]    train-mlogloss:0.376773 val-mlogloss:0.492858
## [798]    train-mlogloss:0.376663 val-mlogloss:0.492839
## [799]    train-mlogloss:0.376539 val-mlogloss:0.492842
## [800]    train-mlogloss:0.376407 val-mlogloss:0.492829
## [801]    train-mlogloss:0.376294 val-mlogloss:0.492834
## [802]    train-mlogloss:0.376176 val-mlogloss:0.492811
## [803]    train-mlogloss:0.376026 val-mlogloss:0.492799
## [804]    train-mlogloss:0.375879 val-mlogloss:0.492809
## [805]    train-mlogloss:0.375747 val-mlogloss:0.492791
## [806]    train-mlogloss:0.375592 val-mlogloss:0.492774
## [807]    train-mlogloss:0.375411 val-mlogloss:0.492751
## [808]    train-mlogloss:0.375274 val-mlogloss:0.492720
## [809]    train-mlogloss:0.375147 val-mlogloss:0.492721
## [810]    train-mlogloss:0.374995 val-mlogloss:0.492694
## [811]    train-mlogloss:0.374815 val-mlogloss:0.492670
## [812]    train-mlogloss:0.374703 val-mlogloss:0.492651
## [813]    train-mlogloss:0.374578 val-mlogloss:0.492639
## [814]    train-mlogloss:0.374483 val-mlogloss:0.492626
## [815]    train-mlogloss:0.374372 val-mlogloss:0.492611
## [816]    train-mlogloss:0.374279 val-mlogloss:0.492597
## [817]    train-mlogloss:0.374162 val-mlogloss:0.492582
## [818]    train-mlogloss:0.374043 val-mlogloss:0.492580
## [819]    train-mlogloss:0.373856 val-mlogloss:0.492582
## [820]    train-mlogloss:0.373776 val-mlogloss:0.492583
## [821]    train-mlogloss:0.373687 val-mlogloss:0.492591
## [822]    train-mlogloss:0.373572 val-mlogloss:0.492604
## [823]    train-mlogloss:0.373454 val-mlogloss:0.492599
## [824]    train-mlogloss:0.373293 val-mlogloss:0.492597
## [825]    train-mlogloss:0.373157 val-mlogloss:0.492579
## [826]    train-mlogloss:0.373023 val-mlogloss:0.492590
## [827]    train-mlogloss:0.372864 val-mlogloss:0.492593
## [828]    train-mlogloss:0.372705 val-mlogloss:0.492598
## [829]    train-mlogloss:0.372625 val-mlogloss:0.492600
## [830]    train-mlogloss:0.372562 val-mlogloss:0.492593
## [831]    train-mlogloss:0.372423 val-mlogloss:0.492587
## [832]    train-mlogloss:0.372271 val-mlogloss:0.492582
## [833]    train-mlogloss:0.372106 val-mlogloss:0.492581
## [834]    train-mlogloss:0.371965 val-mlogloss:0.492583
## [835]    train-mlogloss:0.371808 val-mlogloss:0.492588
## [836]    train-mlogloss:0.371656 val-mlogloss:0.492597
## [837]    train-mlogloss:0.371596 val-mlogloss:0.492596
## [838]    train-mlogloss:0.371463 val-mlogloss:0.492554
## [839]    train-mlogloss:0.371366 val-mlogloss:0.492539
## [840]    train-mlogloss:0.371271 val-mlogloss:0.492519
## [841]    train-mlogloss:0.371121 val-mlogloss:0.492484
## [842]    train-mlogloss:0.371072 val-mlogloss:0.492482
## [843]    train-mlogloss:0.370958 val-mlogloss:0.492485
## [844]    train-mlogloss:0.370826 val-mlogloss:0.492481
## [845]    train-mlogloss:0.370674 val-mlogloss:0.492486
## [846]    train-mlogloss:0.370554 val-mlogloss:0.492493
## [847]    train-mlogloss:0.370451 val-mlogloss:0.492517
## [848]    train-mlogloss:0.370355 val-mlogloss:0.492512
## [849]    train-mlogloss:0.370265 val-mlogloss:0.492513
## [850]    train-mlogloss:0.370179 val-mlogloss:0.492505
## [851]    train-mlogloss:0.370048 val-mlogloss:0.492531
## [852]    train-mlogloss:0.369906 val-mlogloss:0.492500
## [853]    train-mlogloss:0.369775 val-mlogloss:0.492471
## [854]    train-mlogloss:0.369624 val-mlogloss:0.492460
## [855]    train-mlogloss:0.369526 val-mlogloss:0.492462
## [856]    train-mlogloss:0.369391 val-mlogloss:0.492454
## [857]    train-mlogloss:0.369280 val-mlogloss:0.492467
## [858]    train-mlogloss:0.369176 val-mlogloss:0.492465
## [859]    train-mlogloss:0.369020 val-mlogloss:0.492443
## [860]    train-mlogloss:0.368902 val-mlogloss:0.492435
## [861]    train-mlogloss:0.368767 val-mlogloss:0.492401
## [862]    train-mlogloss:0.368636 val-mlogloss:0.492353
## [863]    train-mlogloss:0.368521 val-mlogloss:0.492349
## [864]    train-mlogloss:0.368408 val-mlogloss:0.492354
## [865]    train-mlogloss:0.368322 val-mlogloss:0.492337
## [866]    train-mlogloss:0.368203 val-mlogloss:0.492313
## [867]    train-mlogloss:0.368069 val-mlogloss:0.492309
## [868]    train-mlogloss:0.367991 val-mlogloss:0.492298
## [869]    train-mlogloss:0.367848 val-mlogloss:0.492296
## [870]    train-mlogloss:0.367638 val-mlogloss:0.492299
## [871]    train-mlogloss:0.367547 val-mlogloss:0.492306
## [872]    train-mlogloss:0.367389 val-mlogloss:0.492306
## [873]    train-mlogloss:0.367290 val-mlogloss:0.492284
## [874]    train-mlogloss:0.367151 val-mlogloss:0.492287
## [875]    train-mlogloss:0.367002 val-mlogloss:0.492289
## [876]    train-mlogloss:0.366880 val-mlogloss:0.492288
## [877]    train-mlogloss:0.366737 val-mlogloss:0.492294
## [878]    train-mlogloss:0.366593 val-mlogloss:0.492282
## [879]    train-mlogloss:0.366458 val-mlogloss:0.492305
## [880]    train-mlogloss:0.366331 val-mlogloss:0.492318
## [881]    train-mlogloss:0.366211 val-mlogloss:0.492324
## [882]    train-mlogloss:0.366083 val-mlogloss:0.492320
## [883]    train-mlogloss:0.365985 val-mlogloss:0.492337
## [884]    train-mlogloss:0.365843 val-mlogloss:0.492329
## [885]    train-mlogloss:0.365735 val-mlogloss:0.492335
## [886]    train-mlogloss:0.365647 val-mlogloss:0.492335
## [887]    train-mlogloss:0.365519 val-mlogloss:0.492335
## [888]    train-mlogloss:0.365415 val-mlogloss:0.492309
## [889]    train-mlogloss:0.365318 val-mlogloss:0.492327
## [890]    train-mlogloss:0.365162 val-mlogloss:0.492324
## [891]    train-mlogloss:0.365044 val-mlogloss:0.492303
## [892]    train-mlogloss:0.364907 val-mlogloss:0.492317
## [893]    train-mlogloss:0.364742 val-mlogloss:0.492310
## [894]    train-mlogloss:0.364584 val-mlogloss:0.492286
## [895]    train-mlogloss:0.364466 val-mlogloss:0.492260
## [896]    train-mlogloss:0.364308 val-mlogloss:0.492265
## [897]    train-mlogloss:0.364141 val-mlogloss:0.492268
## [898]    train-mlogloss:0.364051 val-mlogloss:0.492295
## [899]    train-mlogloss:0.363951 val-mlogloss:0.492298
## [900]    train-mlogloss:0.363810 val-mlogloss:0.492312
## [901]    train-mlogloss:0.363676 val-mlogloss:0.492292
## [902]    train-mlogloss:0.363530 val-mlogloss:0.492276
## [903]    train-mlogloss:0.363374 val-mlogloss:0.492266
## [904]    train-mlogloss:0.363261 val-mlogloss:0.492256
## [905]    train-mlogloss:0.363144 val-mlogloss:0.492271
## [906]    train-mlogloss:0.363020 val-mlogloss:0.492252
## [907]    train-mlogloss:0.362884 val-mlogloss:0.492241
## [908]    train-mlogloss:0.362722 val-mlogloss:0.492225
## [909]    train-mlogloss:0.362619 val-mlogloss:0.492232
## [910]    train-mlogloss:0.362482 val-mlogloss:0.492199
## [911]    train-mlogloss:0.362370 val-mlogloss:0.492198
## [912]    train-mlogloss:0.362288 val-mlogloss:0.492182
## [913]    train-mlogloss:0.362159 val-mlogloss:0.492170
## [914]    train-mlogloss:0.362016 val-mlogloss:0.492174
## [915]    train-mlogloss:0.361891 val-mlogloss:0.492174
## [916]    train-mlogloss:0.361711 val-mlogloss:0.492193
## [917]    train-mlogloss:0.361575 val-mlogloss:0.492187
## [918]    train-mlogloss:0.361433 val-mlogloss:0.492186
## [919]    train-mlogloss:0.361311 val-mlogloss:0.492185
## [920]    train-mlogloss:0.361227 val-mlogloss:0.492164
## [921]    train-mlogloss:0.361146 val-mlogloss:0.492154
## [922]    train-mlogloss:0.361048 val-mlogloss:0.492158
## [923]    train-mlogloss:0.360972 val-mlogloss:0.492154
## [924]    train-mlogloss:0.360837 val-mlogloss:0.492157
## [925]    train-mlogloss:0.360746 val-mlogloss:0.492160
## [926]    train-mlogloss:0.360609 val-mlogloss:0.492151
## [927]    train-mlogloss:0.360493 val-mlogloss:0.492136
## [928]    train-mlogloss:0.360330 val-mlogloss:0.492097
## [929]    train-mlogloss:0.360190 val-mlogloss:0.492105
## [930]    train-mlogloss:0.360069 val-mlogloss:0.492119
## [931]    train-mlogloss:0.359981 val-mlogloss:0.492113
## [932]    train-mlogloss:0.359833 val-mlogloss:0.492099
## [933]    train-mlogloss:0.359697 val-mlogloss:0.492078
## [934]    train-mlogloss:0.359546 val-mlogloss:0.492074
## [935]    train-mlogloss:0.359403 val-mlogloss:0.492061
## [936]    train-mlogloss:0.359326 val-mlogloss:0.492050
## [937]    train-mlogloss:0.359239 val-mlogloss:0.492053
## [938]    train-mlogloss:0.359125 val-mlogloss:0.492066
## [939]    train-mlogloss:0.359055 val-mlogloss:0.492067
## [940]    train-mlogloss:0.358926 val-mlogloss:0.492076
## [941]    train-mlogloss:0.358789 val-mlogloss:0.492075
## [942]    train-mlogloss:0.358613 val-mlogloss:0.492094
## [943]    train-mlogloss:0.358497 val-mlogloss:0.492089
## [944]    train-mlogloss:0.358387 val-mlogloss:0.492103
## [945]    train-mlogloss:0.358277 val-mlogloss:0.492123
## [946]    train-mlogloss:0.358142 val-mlogloss:0.492128
## [947]    train-mlogloss:0.358035 val-mlogloss:0.492124
## [948]    train-mlogloss:0.357917 val-mlogloss:0.492118
## [949]    train-mlogloss:0.357806 val-mlogloss:0.492097
## [950]    train-mlogloss:0.357650 val-mlogloss:0.492108
## [951]    train-mlogloss:0.357492 val-mlogloss:0.492131
## [952]    train-mlogloss:0.357363 val-mlogloss:0.492131
## [953]    train-mlogloss:0.357248 val-mlogloss:0.492134
## [954]    train-mlogloss:0.357096 val-mlogloss:0.492124
## [955]    train-mlogloss:0.356989 val-mlogloss:0.492135
## [956]    train-mlogloss:0.356868 val-mlogloss:0.492146
## [957]    train-mlogloss:0.356743 val-mlogloss:0.492132
## [958]    train-mlogloss:0.356568 val-mlogloss:0.492135
## [959]    train-mlogloss:0.356470 val-mlogloss:0.492140
## [960]    train-mlogloss:0.356333 val-mlogloss:0.492154
## [961]    train-mlogloss:0.356233 val-mlogloss:0.492158
## [962]    train-mlogloss:0.356105 val-mlogloss:0.492164
## [963]    train-mlogloss:0.355961 val-mlogloss:0.492160
## [964]    train-mlogloss:0.355867 val-mlogloss:0.492127
## [965]    train-mlogloss:0.355745 val-mlogloss:0.492130
## [966]    train-mlogloss:0.355620 val-mlogloss:0.492128
## [967]    train-mlogloss:0.355524 val-mlogloss:0.492109
## [968]    train-mlogloss:0.355395 val-mlogloss:0.492100
## [969]    train-mlogloss:0.355302 val-mlogloss:0.492100
## [970]    train-mlogloss:0.355178 val-mlogloss:0.492086
## [971]    train-mlogloss:0.355069 val-mlogloss:0.492092
## [972]    train-mlogloss:0.354889 val-mlogloss:0.492107
## [973]    train-mlogloss:0.354753 val-mlogloss:0.492097
## [974]    train-mlogloss:0.354603 val-mlogloss:0.492085
## [975]    train-mlogloss:0.354507 val-mlogloss:0.492079
## [976]    train-mlogloss:0.354370 val-mlogloss:0.492089
## [977]    train-mlogloss:0.354264 val-mlogloss:0.492102
## [978]    train-mlogloss:0.354195 val-mlogloss:0.492074
## [979]    train-mlogloss:0.354063 val-mlogloss:0.492074
## [980]    train-mlogloss:0.353934 val-mlogloss:0.492078
## [981]    train-mlogloss:0.353851 val-mlogloss:0.492067
## [982]    train-mlogloss:0.353759 val-mlogloss:0.492075
## [983]    train-mlogloss:0.353654 val-mlogloss:0.492056
## [984]    train-mlogloss:0.353501 val-mlogloss:0.492043
## [985]    train-mlogloss:0.353398 val-mlogloss:0.492042
## [986]    train-mlogloss:0.353258 val-mlogloss:0.492038
## [987]    train-mlogloss:0.353155 val-mlogloss:0.492051
## [988]    train-mlogloss:0.353038 val-mlogloss:0.492036
## [989]    train-mlogloss:0.352884 val-mlogloss:0.492033
## [990]    train-mlogloss:0.352802 val-mlogloss:0.492032
## [991]    train-mlogloss:0.352660 val-mlogloss:0.492039
## [992]    train-mlogloss:0.352545 val-mlogloss:0.492041
## [993]    train-mlogloss:0.352429 val-mlogloss:0.492052
## [994]    train-mlogloss:0.352293 val-mlogloss:0.492055
## [995]    train-mlogloss:0.352130 val-mlogloss:0.492060
## [996]    train-mlogloss:0.352045 val-mlogloss:0.492051
## [997]    train-mlogloss:0.351928 val-mlogloss:0.492050
## [998]    train-mlogloss:0.351822 val-mlogloss:0.492065
## [999]    train-mlogloss:0.351762 val-mlogloss:0.492071
allpredictions =  (as.data.frame(matrix(predict(gbdt,dtest), nrow=dim(test), byrow=TRUE)))

######################
##Generate Submission
allpredictions = cbind (test$listing_id, allpredictions)
names(allpredictions)<-c("listing_id","high","low","medium")
allpredictions=allpredictions[,c(1,2,4,3)]
# write.csv(allpredictions,paste0("ivkrasnikov-",Sys.Date(),".csv"),row.names = FALSE)

importance_matrix <- xgb.importance(colnames(train), model = gbdt)
xgb.plot.importance(importance_matrix[1:30])

Получаем довольно высокую оценку на этапе кросс-валидации. Однако при загрузке результатов на сайт, обнаруживаем свою оценку не ниже 0.63, что находится даже не средним результатом.
Можно предположить причины столь низкой оценки:

Наиболее высокие результаты показывают модели без предварительной обработки текстовых признаков, поиска соседей и другие обработки, которые были продемонстрированы в работе.
В этом случае наиболее значимыми параметрами являются следующие 30 переменных:

Текущий высший балл полученный с помощью данной модели 0.55358.
В листинге представлен полный код программы, указанные дополнительные обрабработки закомментированы.