Import Package

library(dslabs)
library(dplyr)
## 
## Attaching package: 'dplyr'
## The following objects are masked from 'package:stats':
## 
##     filter, lag
## The following objects are masked from 'package:base':
## 
##     intersect, setdiff, setequal, union
library(tidyr)
library(lubridate)
## 
## Attaching package: 'lubridate'
## The following objects are masked from 'package:base':
## 
##     date, intersect, setdiff, union
library(caret)
## Loading required package: ggplot2
## Loading required package: lattice
library(stats)
library(randomForest)
## randomForest 4.7-1.2
## Type rfNews() to see new features/changes/bug fixes.
## 
## Attaching package: 'randomForest'
## The following object is masked from 'package:ggplot2':
## 
##     margin
## The following object is masked from 'package:dplyr':
## 
##     combine
library(xgboost)
library(Ckmeans.1d.dp)
library(gbm)
## Loaded gbm 2.2.2
## This version of gbm is no longer under development. Consider transitioning to gbm3, https://github.com/gbm-developers/gbm3
library(e1071)
## 
## Attaching package: 'e1071'
## The following object is masked from 'package:ggplot2':
## 
##     element

1. Data Transformation

1.1 Load Data

credit_limit_data <- readRDS("CleanedDataSet/credit_limit_data.rds")

1.2 GEO CLUSTERING (K-MEANS)

Latitude and longitude are continuous numbers which the machine learning models do not naturally understand spatial proximity and treat them as independent numbers. Hence, by using K-means to convert them into meaningful features which helps the machine learning models to capture geographical patterns.

# ---------- GEO CLUSTERING (K-MEANS) ----------
set.seed(42)

coords <- credit_limit_data %>%
  select(latitude, longitude)

kmeans_model <- kmeans(coords, centers = 8)

credit_limit_data$geo_cluster <- NA
credit_limit_data$geo_cluster[as.numeric(rownames(coords))] <- kmeans_model$cluster
credit_limit_data$geo_cluster <- as.factor(credit_limit_data$geo_cluster)

# One-hot encode geo_cluster
geo_dummies <- model.matrix(~ geo_cluster - 1, data = credit_limit_data)
credit_limit_data <- bind_cols(
  credit_limit_data %>% select(-geo_cluster),
  as.data.frame(geo_dummies)
)

1.3 ONE-HOT ENCODING

To convert the non-ordinal categorical variables into binary numerical format which suitable for machine learning models and avoid false ordering issue.

card_type_dummies  <- model.matrix(~ card_type - 1, data = credit_limit_data)
card_brand_dummies <- model.matrix(~ card_brand - 1, data = credit_limit_data)

colnames(card_type_dummies)  <- gsub("^card_type", "", colnames(card_type_dummies))
colnames(card_brand_dummies) <- gsub("^card_brand", "", colnames(card_brand_dummies))

credit_limit_data <- bind_cols(
  credit_limit_data,
  as.data.frame(card_type_dummies),
  as.data.frame(card_brand_dummies)
)

head(credit_limit_data[, c("card_type", "Credit", "Debit", "Debit (Prepaid)")], 20)
##     card_type Credit Debit Debit (Prepaid)
##        <char>  <num> <num>           <num>
##  1:    Credit      1     0               0
##  2:    Credit      1     0               0
##  3:     Debit      0     1               0
##  4:    Credit      1     0               0
##  5:     Debit      0     1               0
##  6:     Debit      0     1               0
##  7:    Credit      1     0               0
##  8:     Debit      0     1               0
##  9:    Credit      1     0               0
## 10:    Credit      1     0               0
## 11:     Debit      0     1               0
## 12:     Debit      0     1               0
## 13:    Credit      1     0               0
## 14:    Credit      1     0               0
## 15:     Debit      0     1               0
## 16:     Debit      0     1               0
## 17:     Debit      0     1               0
## 18:     Debit      0     1               0
## 19:    Credit      1     0               0
## 20:     Debit      0     1               0
##     card_type Credit Debit Debit (Prepaid)
head(credit_limit_data[, c("card_brand", "Amex", "Discover", "Mastercard", "Visa")], 20)
##     card_brand  Amex Discover Mastercard  Visa
##         <char> <num>    <num>      <num> <num>
##  1:       Amex     1        0          0     0
##  2: Mastercard     0        0          1     0
##  3: Mastercard     0        0          1     0
##  4:       Visa     0        0          0     1
##  5: Mastercard     0        0          1     0
##  6:       Visa     0        0          0     1
##  7:       Amex     1        0          0     0
##  8: Mastercard     0        0          1     0
##  9: Mastercard     0        0          1     0
## 10:       Amex     1        0          0     0
## 11: Mastercard     0        0          1     0
## 12: Mastercard     0        0          1     0
## 13:       Visa     0        0          0     1
## 14: Mastercard     0        0          1     0
## 15: Mastercard     0        0          1     0
## 16: Mastercard     0        0          1     0
## 17:       Visa     0        0          0     1
## 18:       Visa     0        0          0     1
## 19: Mastercard     0        0          1     0
## 20: Mastercard     0        0          1     0
##     card_brand  Amex Discover Mastercard  Visa

1.4 DATA CONVERSION

  • New variable “retired” is created if current age is greater than or equal to the retirement age then classified as retired (1)
  • New variable “age_group” is created by grouping the exact ages into ranges to represent different life stages
  • New variable “credit_score_category” is created by categorizing credit scores into standard risk levels
  • New variable “gender_binary” is created by converting gender from string variable into a numeric format
  • New variable “acct_tenure_years” is created by calculating the time difference between the current date and the account opening date to represent the length of time of the account has been active
credit_limit_data <- credit_limit_data %>%
  mutate(
    retired = ifelse(current_age >= retirement_age, 1, 0)
  )

head(credit_limit_data[, c("retired", "current_age", "retirement_age")], 20)
##     retired current_age retirement_age
##       <num>       <int>          <int>
##  1:       0          58             67
##  2:       1          76             70
##  3:       0          46             66
##  4:       1          65             62
##  5:       0          48             65
##  6:       0          54             65
##  7:       0          44             66
##  8:       0          66             68
##  9:       1          71             69
## 10:       0          44             66
## 11:       0          52             70
## 12:       1          81             70
## 13:       0          56             69
## 14:       0          47             67
## 15:       0          62             68
## 16:       0          54             61
## 17:       0          60             66
## 18:       0          64             67
## 19:       0          39             67
## 20:       0          61             69
##     retired current_age retirement_age
credit_limit_data <- credit_limit_data %>%
  mutate(
    age_group = cut(
      current_age,
      breaks = c(0, 24, 40, 60, Inf),
      labels = c(1, 2, 3, 4),
      right = TRUE
    )
  )

credit_limit_data$age_group <- as.integer(as.character(credit_limit_data$age_group))

head(credit_limit_data[, c("current_age", "age_group")], 20)
##     current_age age_group
##           <int>     <int>
##  1:          58         3
##  2:          76         4
##  3:          46         3
##  4:          65         4
##  5:          48         3
##  6:          54         3
##  7:          44         3
##  8:          66         4
##  9:          71         4
## 10:          44         3
## 11:          52         3
## 12:          81         4
## 13:          56         3
## 14:          47         3
## 15:          62         4
## 16:          54         3
## 17:          60         3
## 18:          64         4
## 19:          39         2
## 20:          61         4
##     current_age age_group
credit_limit_data <- credit_limit_data %>%
  mutate(
    credit_score_category = cut(
      credit_score,
      breaks = c(0, 579, 669, 739, 799, 850),
      labels = c(1, 2, 3, 4, 5),
      right = TRUE,
      include.lowest = TRUE
    )
  )

credit_limit_data$credit_score_category <- as.integer(
  as.character(credit_limit_data$credit_score_category)
)

head(credit_limit_data[, c("credit_score", "credit_score_category")], 20)
##     credit_score credit_score_category
##            <int>                 <int>
##  1:          727                     3
##  2:          763                     4
##  3:          715                     3
##  4:          667                     2
##  5:          702                     3
##  6:          748                     4
##  7:          547                     1
##  8:          661                     2
##  9:          698                     3
## 10:          547                     1
## 11:          749                     4
## 12:          728                     3
## 13:          752                     4
## 14:          700                     3
## 15:          735                     3
## 16:          726                     3
## 17:          715                     3
## 18:          812                     5
## 19:          714                     3
## 20:          748                     4
##     credit_score credit_score_category
credit_limit_data <- credit_limit_data %>%
  mutate(gender_binary = ifelse(gender == "Male", 1, 0))

head(credit_limit_data[, c("gender", "gender_binary")], 20)
##     gender gender_binary
##     <char>         <num>
##  1:   Male             1
##  2:   Male             1
##  3:   Male             1
##  4: Female             0
##  5:   Male             1
##  6:   Male             1
##  7:   Male             1
##  8:   Male             1
##  9: Female             0
## 10:   Male             1
## 11:   Male             1
## 12: Female             0
## 13: Female             0
## 14:   Male             1
## 15:   Male             1
## 16:   Male             1
## 17:   Male             1
## 18:   Male             1
## 19: Female             0
## 20:   Male             1
##     gender gender_binary
REFERENCE_DATE <- as.Date("2020-03-01")

credit_limit_data <- credit_limit_data %>%
  mutate(
    acct_open_date = as.Date(acct_open_date),
    acct_tenure_years =
      as.numeric(difftime(REFERENCE_DATE, acct_open_date, units = "days")) / 365.25
  )

head(credit_limit_data[, c("acct_open_date", "acct_tenure_years")], 20)
##     acct_open_date acct_tenure_years
##             <Date>             <num>
##  1:     1991-01-01          29.16359
##  2:     1994-01-01          26.16290
##  3:     1995-01-01          25.16359
##  4:     1995-01-01          25.16359
##  5:     1997-01-01          23.16222
##  6:     1997-01-01          23.16222
##  7:     1998-01-01          22.16290
##  8:     1998-01-01          22.16290
##  9:     1998-01-01          22.16290
## 10:     1999-01-01          21.16359
## 11:     1999-01-01          21.16359
## 12:     1999-01-01          21.16359
## 13:     1999-01-01          21.16359
## 14:     2000-01-01          20.16427
## 15:     2000-01-01          20.16427
## 16:     2000-01-01          20.16427
## 17:     2000-01-01          20.16427
## 18:     2000-01-01          20.16427
## 19:     2001-01-01          19.16222
## 20:     2001-01-01          19.16222
##     acct_open_date acct_tenure_years
summary(credit_limit_data$acct_tenure_years)
##    Min. 1st Qu.  Median    Mean 3rd Qu.    Max. 
##  0.0794  3.7906 10.0780  9.1243 13.4155 29.1636

1.5 DROP COLUMNS

By removing redundant or no longer needed features to ensure the dataset contains only relevant features for modeling

# ---------- DROP ORIGINAL STRING / SENSITIVE COLUMNS ----------
columns_to_drop <- c(
  "gender", "acct_open_date", "card_number", "expires", "cvv",
  "has_chip", "birth_year", "birth_month", "address", "latitude",
  "longitude", "transaction_frequency", "card_type", "card_brand",
  "retirement_age", "current_age","year_pin_last_changed", "avg_errors",
  "total_errors","num_refunds", "total_refunded",
  "min_transaction_amount", "credit_score"
)

# Drop only columns that exist
columns_to_drop <- intersect(columns_to_drop, names(credit_limit_data))

credit_limit_data <- credit_limit_data %>%
  select(-all_of(columns_to_drop))

colnames(credit_limit_data)
##  [1] "id"                     "client_id"              "num_cards_issued"      
##  [4] "per_capita_income"      "yearly_income"          "total_debt"            
##  [7] "num_credit_cards"       "debt_to_income_ratio"   "total_transactions"    
## [10] "avg_transaction_amount" "max_transaction_amount" "total_spent"           
## [13] "credit_limit"           "geo_cluster1"           "geo_cluster2"          
## [16] "geo_cluster3"           "geo_cluster4"           "geo_cluster5"          
## [19] "geo_cluster6"           "geo_cluster7"           "geo_cluster8"          
## [22] "Credit"                 "Debit"                  "Debit (Prepaid)"       
## [25] "Amex"                   "Discover"               "Mastercard"            
## [28] "Visa"                   "retired"                "age_group"             
## [31] "credit_score_category"  "gender_binary"          "acct_tenure_years"

2. Data Splitting

To split the dataset into training (80%) and testing (20%) at the client level to ensure that all transactions for a given client stay in the same set. This is done to avoid data leakage and ensures the same client won’t appear in both training and testing.

set.seed(42)
clients <- unique(credit_limit_data$client_id)
n <- length(clients)
clients <- sample(clients)

# Split indices
train_index <- 1:floor(0.8 * n)
test_index  <- (floor(0.8*n) + 1):n

# Assign clients to each set
train_clients <- clients[train_index]
test_clients  <- clients[test_index]

# Subset dataset
train_data <- credit_limit_data %>% filter(client_id %in% train_clients)
test_data  <- credit_limit_data %>% filter(client_id %in% test_clients)

# Separate features and target
x_train <- train_data %>% select(-credit_limit, -id, -client_id)
y_train <- train_data$credit_limit

x_test <- test_data %>% select(-credit_limit, -id, -client_id)
y_test <- test_data$credit_limit

3. MULTICOLLINEARITY CHECK

To check whether any highly correlated numeric features exists and removes them from both training and testing data if the correlations is greater than 0.8

num_data <- credit_limit_data %>%
  select(where(is.numeric))

cor_matrix <- cor(num_data, use = "complete.obs")

cor_df <- as.data.frame(as.table(cor_matrix)) %>%
  rename(
    var1 = Var1,
    var2 = Var2,
    correlation = Freq
  ) %>%
  filter(
    abs(correlation) > 0.8,
    var1 != var2
  )

cor_df <- cor_df %>%
  rowwise() %>%
  mutate(pair = paste(sort(c(var1, var2)), collapse = "_")) %>%
  ungroup() %>%
  distinct(pair, .keep_all = TRUE) %>%
  select(-pair)

knitr::kable(cor_df, digits = 3)
var1 var2 correlation
yearly_income per_capita_income 0.950
total_spent total_transactions 0.890
Debit Credit -0.818
Visa Mastercard -0.817
x_train <- x_train %>%
  select(-per_capita_income, -total_transactions)
x_test <- x_test %>%
  select(-per_capita_income, -total_transactions)

4. MODEL TRAINING

5 regression models are trained to predict the credit limit
- Linear Regression (LM)
- Random Forest (RF)
- eXtreme Gradient Boosting (XGBoost)
- Gradient Boosting Machine (GBM)
- Support Vector Regression (SVR)

Evaluation Metrics
- R-squared (R2): Measures the standard deviation of prediction errors. Lower is better
- RMSE (Root Mean Squared Error): Average absolute difference between predictions and actual values. Lower is better
- MAE (Mean Absolute Error): Average absolute difference between predictions and actual values. Lower is better

# ---------------- Train Linear Regression model ----------------
model_lm <- train(
  x = x_train,
  y = y_train,
  method = "lm"
)

lm_pred <- predict(model_lm, x_test)

lm_rmse <- RMSE(lm_pred, y_test)
lm_mae  <- MAE(lm_pred, y_test)
lm_r2   <- R2(lm_pred, y_test)


# ---------------- Train Random Forest model ----------------
set.seed(42)

rf_model <- randomForest(
  x = x_train,
  y = y_train,
  ntree = 500,        # n_estimators
  importance = TRUE
)

rf_pred <- predict(rf_model, x_test)


rf_rmse <- RMSE(rf_pred, y_test)
rf_mae  <- MAE(rf_pred, y_test)
rf_r2   <- R2(rf_pred, y_test)

# ------------------- Train XGBoost model -------------------

# Convert numeric data frames to matrices
x_train_matrix <- as.matrix(x_train)
x_test_matrix  <- as.matrix(x_test)

y_train_vector <- as.numeric(y_train)
y_test_vector  <- as.numeric(y_test)

dtrain <- xgb.DMatrix(data = x_train_matrix, label = y_train_vector)
dtest  <- xgb.DMatrix(data = x_test_matrix, label = y_test_vector)

set.seed(42)

xgb_params <- list(
  objective = "reg:squarederror",
  eta = 0.05,
  max_depth = 6,
  subsample = 0.8,
  colsample_bytree = 0.8
)

xgb_model <- xgb.train(
  params = xgb_params,
  data = dtrain,
  nrounds = 500,
  verbose = 0
)
xgb_pred <- predict(xgb_model, newdata = dtest)
xgb_rmse <- RMSE(xgb_pred, y_test_vector)
xgb_mae <- MAE(xgb_pred, y_test_vector)
xgb_r2 <- R2(xgb_pred, y_test_vector)

# ------------------- Train GBM model -------------------
set.seed(42)

model_gbm <- gbm(
  formula = credit_limit ~ . -id -client_id,
  data = train_data,
  distribution = "gaussian",
  n.trees = 500,
  interaction.depth = 6,
  shrinkage = 0.1,
  n.minobsinnode = 10,
  verbose = FALSE
)

gbm_pred <- predict(model_gbm, newdata = test_data, n.trees = 500)
gbm_rmse <- RMSE(gbm_pred, y_test)
gbm_mae  <- MAE(gbm_pred, y_test)
gbm_r2   <- R2(gbm_pred, y_test)

# ------------------- Train SVR model -------------------
model_svr <- svm(
  x = x_train,
  y = y_train,
  type = "eps-regression",   # SVR
  kernel = "radial",         # RBF kernel
  cost = 10,                 # C parameter
  gamma = 1 / ncol(x_train), # common default
  epsilon = 0.1
)

svr_pred <- predict(model_svr, x_test)
svr_rmse <- RMSE(svr_pred, y_test)
svr_mae  <- MAE(svr_pred, y_test)
svr_r2   <- R2(svr_pred, y_test)

# ------------------- Summary Table -------------------
results <- data.frame(
  Model = c("Linear Regression", "Random Forest", "XGBoost", "GBM", "SVR"),
  RMSE  = c(lm_rmse, rf_rmse, xgb_rmse, gbm_rmse, svr_rmse),
  MAE   = c(lm_mae, rf_mae, xgb_mae, gbm_mae, svr_mae),
  R2    = c(lm_r2, rf_r2, xgb_r2, gbm_r2, svr_r2)
)

results
##               Model     RMSE      MAE        R2
## 1 Linear Regression 9160.614 5833.327 0.5480276
## 2     Random Forest 8781.928 5339.454 0.5908154
## 3           XGBoost 8393.193 5469.750 0.6144570
## 4               GBM 8448.865 5380.416 0.6106920
## 5               SVR 9222.758 6073.933 0.5360446

5. HYPERPARAMETER TUNING

In order to improve the prediction performance, hyperparameter tuning is performed on the XGBoost (best-performing model in previous section) by testing different learning rates (eta) and depths (max_depth)

# Convert numeric data frames to matrices
x_train_matrix <- as.matrix(x_train)
x_test_matrix  <- as.matrix(x_test)

y_train_vector <- as.numeric(y_train)
y_test_vector  <- as.numeric(y_test)

dtrain <- xgb.DMatrix(data = x_train_matrix, label = y_train_vector)
dtest  <- xgb.DMatrix(data = x_test_matrix, label = y_test_vector)

# Set of 3 hyperparameter combinations
param_list <- list(
  list(eta = 0.05, max_depth = 6),
  list(eta = 0.01,  max_depth = 6),
  list(eta = 0.1, max_depth = 6),
  list(eta = 0.05, max_depth = 5),
  list(eta = 0.01,  max_depth = 5),
  list(eta = 0.1, max_depth = 5),
  list(eta = 0.05, max_depth = 4),
  list(eta = 0.01,  max_depth = 4),
  list(eta = 0.1, max_depth = 4)
)

# Store results
results <- data.frame(
  Set = character(),
  RMSE = numeric(),
  MAE  = numeric(),
  R2   = numeric()
)

models_list <- list()

# Train and evaluate each set
for (i in seq_along(param_list)) {
  params <- param_list[[i]]
  
  set.seed(42)
  xgb_params <- list(
    objective = "reg:squarederror",
    eta = params$eta,
    max_depth = params$max_depth,
    subsample = 0.8,
    colsample_bytree = 0.8
  )
  
  model <- xgb.train(
    params = xgb_params,
    data = dtrain,
    nrounds = 500,
    verbose = 0
  )
  
  pred <- predict(model, newdata = dtest)
  
  results <- rbind(
    results,
    data.frame(
      Set = paste0("Set_", i),
      RMSE = RMSE(pred, y_test_vector),
      MAE  = MAE(pred, y_test_vector),
      R2   = R2(pred, y_test_vector)
    )
  )
  
  models_list[[i]] <- model
}

# Show comparison
results
##     Set     RMSE      MAE        R2
## 1 Set_1 8393.193 5469.750 0.6144570
## 2 Set_2 8213.909 5188.977 0.6325698
## 3 Set_3 8511.461 5611.461 0.6036345
## 4 Set_4 8299.762 5345.261 0.6229770
## 5 Set_5 8227.450 5197.688 0.6312360
## 6 Set_6 8585.872 5610.113 0.5973321
## 7 Set_7 8384.557 5342.359 0.6153398
## 8 Set_8 8285.508 5219.537 0.6273367
## 9 Set_9 8481.278 5481.965 0.6075670

6. FEATURE IMPORTANCE

# ------------------- Find best model -------------------
best_index <- which.min(results$RMSE)
best_model <- models_list[[best_index]]
best_set_name <- results$Set[best_index]

cat("Best hyperparameter set:", best_set_name, "\n")
## Best hyperparameter set: Set_2
cat("RMSE:", round(results$RMSE[best_index],3),
    "MAE:", round(results$MAE[best_index],3),
    "R2:", round(results$R2[best_index],3), "\n")
## RMSE: 8213.909 MAE: 5188.977 R2: 0.633
# ----------- Feature importance for best model -------------------
importance_matrix <- xgb.importance(
  feature_names = colnames(x_train_matrix),
  model = best_model
)

n_features <- nrow(importance_matrix)
cat("Total features:", n_features, "\n")
## Total features: 28
# View top 10 features
head(importance_matrix, 10)
##                    Feature       Gain      Cover  Frequency
##                     <char>      <num>      <num>      <num>
##  1:          yearly_income 0.40550375 0.33351594 0.17772424
##  2:                  Debit 0.18822962 0.08916624 0.03567457
##  3:                 Credit 0.07301733 0.03852494 0.02548184
##  4:        Debit (Prepaid) 0.05313758 0.03359398 0.01385285
##  5:      acct_tenure_years 0.04504496 0.07449848 0.12059859
##  6:             total_debt 0.04401212 0.06368296 0.09191994
##  7:   debt_to_income_ratio 0.03432842 0.04528809 0.06801334
##  8: avg_transaction_amount 0.03380690 0.04983254 0.07968866
##  9:            total_spent 0.02054469 0.06000097 0.07385100
## 10: max_transaction_amount 0.01950817 0.04603279 0.06893996
# Plot top 10 features
xgb.plot.importance(importance_matrix[1:10, ])

# ------------------- Additional Results for XGBoost -------------------

# Predict using best model
best_pred <- predict(best_model, newdata = dtest)

# 1. Residuals
residuals <- y_test_vector - best_pred
residuals
##    [1]  8.791729e+02  3.423892e+03 -5.166446e+03  1.348534e+04  1.068193e+03
##    [6] -8.772004e+03  1.532232e+04 -1.277378e+04 -8.669209e+02 -1.371560e+04
##   [11]  1.624356e+03  8.222813e+02 -5.818965e+02 -5.369721e+02  3.942818e+03
##   [16] -1.435929e+04 -2.009473e+03  1.312433e+03 -1.301137e+03  3.386204e+03
##   [21] -4.552896e+03 -2.865277e+03  9.912140e+03  1.468184e+02  2.073516e+03
##   [26]  4.420279e+03  1.436514e+03 -2.204127e+02  3.873193e+03  1.642603e+03
##   [31] -4.024652e+03  2.080187e+03 -1.453548e+04 -1.692115e+03 -1.145421e+04
##   [36]  5.304802e+00 -8.318438e+03 -9.965049e+02  1.974162e+03 -3.303841e+03
##   [41] -2.071337e+03  6.253965e+02  5.543336e+03  5.964187e+03 -3.449731e+03
##   [46]  3.562905e+04 -7.404768e+03 -6.201436e+02 -3.645439e+02  6.972570e+03
##   [51] -1.422373e+02 -9.948844e+03 -7.567189e+02  4.427242e+03  2.817361e+02
##   [56] -9.174583e+03 -8.514059e+03 -7.390403e+03  1.008323e+04  3.703107e+03
##   [61] -4.735269e+02  8.940152e+03  1.733154e+03 -9.105049e+02  8.149453e+02
##   [66] -2.444986e+04  4.827600e+03  4.105839e+03 -4.345310e+03 -5.160187e+03
##   [71] -4.029090e+03 -4.462930e+02 -3.595960e+03 -1.117179e+03  1.724766e+02
##   [76] -3.956524e+02 -4.653994e+02 -4.583994e+03  1.594521e+03 -7.064473e+02
##   [81] -4.785098e+03 -4.494633e+03  3.145098e+03 -5.590732e+03 -6.227967e+03
##   [86] -2.458545e+03  5.713556e+03  4.681611e+02  4.874917e+02 -4.161920e+03
##   [91] -1.231660e+02 -1.466856e+03 -1.601239e+03 -2.008281e+02  2.500335e+03
##   [96] -4.585651e+03  3.814727e+02  2.197224e+03  1.812422e+02  3.143452e+02
##  [101]  5.383701e+02 -1.339129e+04 -1.303880e+04 -3.156867e+03 -1.177052e+03
##  [106] -4.741596e+02 -3.475001e+02 -2.017337e+03  1.295412e+03  5.364473e+02
##  [111] -3.503579e+02 -1.245545e+03  5.556299e+02 -4.250854e+02 -1.494244e+03
##  [116] -3.580386e+03 -1.257793e+03 -2.129392e+03 -1.267723e+04 -7.503404e+03
##  [121] -6.778934e+03 -5.085760e+02 -5.462988e+01 -1.339994e+04  4.379111e+02
##  [126] -9.032897e+02  1.808523e+03 -2.788368e+03  1.469992e+03 -4.639775e+02
##  [131] -1.273153e+03  6.232717e+03  1.818308e+03 -1.163322e+04  4.216988e+03
##  [136] -3.437247e+03  5.745637e+03  5.486205e+03 -2.823422e+03 -2.404195e+03
##  [141] -8.704456e+01 -2.136918e+03 -1.608776e+03  4.200685e+03 -1.731017e+03
##  [146]  8.373496e+03  2.916348e+02 -2.007330e+03 -8.616461e+03  9.013652e+03
##  [151] -4.400935e+03  3.577324e+02 -6.127246e+02  9.133875e+03 -8.299006e+02
##  [156]  5.830060e+03 -2.028872e+03  5.175355e+03 -2.100576e+04  4.433182e+03
##  [161]  6.981973e+02  8.698369e+02  6.437055e+03  7.803285e+03  7.122981e+03
##  [166] -4.563424e+03 -1.662766e+03 -1.029766e+03  6.745715e+03  2.858398e+02
##  [171] -8.641098e+03  2.275588e+03 -9.461289e+02  6.083871e+03  5.760206e+03
##  [176] -5.028850e+03 -6.377850e+03  4.633057e+03  2.272584e+03 -2.011805e+03
##  [181] -6.031956e+02  2.836717e+03 -1.002533e+03 -1.016335e+03  2.249049e+04
##  [186]  4.926846e+02  1.184779e+04 -1.198504e+04 -6.733789e+01  8.981322e+03
##  [191] -4.764771e+03  2.950905e+03 -1.396623e+04 -4.657548e+03 -8.830768e+03
##  [196] -1.048489e+04  8.872070e+01  9.593848e+02 -4.692027e+03  1.161019e+04
##  [201]  7.148174e+02 -1.233517e+03 -1.396847e+03  2.408022e+03 -1.957188e+03
##  [206] -6.512850e+03  7.217511e+03 -4.947380e+03 -4.447239e+02  8.795434e+03
##  [211] -1.832796e+03  3.042804e+03  9.651838e+03 -1.976034e+03  1.240216e+04
##  [216] -5.630795e+03 -8.108850e+03  2.599872e+03  9.838867e+00  7.792795e+03
##  [221] -1.358030e+04 -3.663030e+03 -4.013062e+03 -1.736693e+03  1.126501e+04
##  [226] -5.126982e+03 -1.651300e+03  2.740006e+03 -2.894402e+03  4.350743e+03
##  [231] -5.944760e+02  5.168102e+03 -1.123957e+04  2.041072e+03 -3.221469e+03
##  [236] -8.225664e+02 -5.470297e+03 -4.405253e+03 -5.081441e+03  8.267520e+02
##  [241]  1.085227e+04 -1.743965e+02 -3.701065e+03 -6.265220e+02  1.261276e+03
##  [246]  3.829898e+03  7.726455e+03 -6.152851e+03  1.068299e+04  1.877451e+02
##  [251]  5.251145e+03  1.081006e+04  2.588552e+03 -8.043561e+03 -1.013372e+03
##  [256]  1.908866e+03 -1.422429e+03 -1.481228e+03 -9.679717e+02 -5.557008e+03
##  [261]  2.007859e+03 -1.794398e+03 -4.565137e+01  3.458715e+03 -1.118238e+04
##  [266] -8.242313e+02 -2.924292e+03 -3.074092e+03 -8.957402e+02 -1.334484e+03
##  [271] -3.401168e+02  1.175836e+04 -4.558127e+03 -5.771439e+03 -2.611910e+03
##  [276]  3.195605e+02 -1.150312e+03 -1.131868e+04 -6.260713e+02 -2.807459e+03
##  [281]  3.239348e+03  5.698125e+02  1.473314e+03  1.253430e+04  1.045111e+04
##  [286] -6.430770e+03 -3.323611e+03 -2.389774e+03 -7.789826e+02 -4.659283e+03
##  [291] -1.126281e+04  3.185487e+03 -5.981894e+03  7.037420e+03 -1.685083e+03
##  [296] -1.885195e+02 -9.453457e+03 -6.541848e+03 -5.015950e+03 -1.024370e+04
##  [301]  2.395721e+03  4.572284e+04  5.000320e+03  7.867127e+03  1.258136e+03
##  [306] -1.754461e+03  2.633533e+03 -4.564317e+03 -6.937485e+02 -4.691641e+02
##  [311] -2.272229e+03 -2.696241e+03 -1.311527e+04  5.270807e+03 -3.374830e+03
##  [316]  1.692330e+03 -5.104959e+03  7.647445e+03 -1.441765e+04  1.716271e+03
##  [321] -1.420307e+03  1.306797e+03 -2.528656e+03  4.361344e+03  5.756605e+03
##  [326]  2.045564e+03 -1.982324e+04 -1.986364e+03 -2.673715e+03  2.742541e+03
##  [331] -4.098386e+02  2.732374e+03 -1.588073e+02  2.448681e+03 -1.738871e+03
##  [336] -1.163228e+04 -4.821104e+03 -1.054705e+04 -2.644858e+03  1.607562e+04
##  [341]  1.580613e+03 -1.043717e+03  1.355896e+03 -1.297768e+04  9.478703e+03
##  [346]  1.296223e+03  4.050348e+03 -1.437313e+03 -8.709414e+02  1.759444e+03
##  [351]  1.052775e+04  2.766104e+03  5.331592e+02  5.926727e+03 -5.303448e+02
##  [356]  3.571840e+03 -9.139749e+03  2.806491e+03 -5.474062e+02 -5.298680e+01
##  [361]  1.245380e+04 -7.474694e+02 -1.904884e+04 -1.166233e+04  2.193563e+03
##  [366] -4.237975e+03 -7.289393e+02 -1.643938e+04  3.451991e+03 -5.880454e+03
##  [371]  3.240402e+03  4.140280e+03 -1.593624e+03 -1.451266e+03 -3.436936e+03
##  [376] -4.875559e+03  2.152214e+03  2.306109e+03 -2.969280e+03 -2.664947e+04
##  [381] -8.126035e+02 -1.049222e+04  5.748461e+03 -7.026426e+03  8.898011e+03
##  [386] -4.486107e+03 -5.870896e+03  1.832875e+03  2.286155e+03 -2.016282e+03
##  [391]  9.817324e+02 -1.027935e+04  1.749789e+03 -1.120761e+04 -1.334613e+03
##  [396]  4.310053e+03 -2.603015e+02  1.156453e+04 -7.356133e+02  1.303907e+03
##  [401] -1.848405e+04  1.805168e+03 -1.544377e+04  1.684326e+03  1.723401e+04
##  [406]  1.502029e+02 -1.248069e+04  1.242323e+04 -3.886900e+03  1.649440e+03
##  [411]  3.994854e+03 -4.305961e+02 -7.334277e+02 -3.440690e+03 -1.045066e+04
##  [416] -1.224350e+03 -9.284023e+02  9.804001e+03 -5.335865e+03  1.616748e+04
##  [421] -1.904506e+03  4.236426e+02 -4.484631e+03  3.003742e+03 -7.363017e+02
##  [426] -5.420736e+02 -1.364334e+04  2.878066e+02 -3.676940e+02 -8.439654e+03
##  [431] -4.191822e+03 -1.721453e+03  7.251971e+03 -8.891381e+03 -9.100234e+02
##  [436] -8.558351e+03 -6.473705e+03  3.733754e+03 -3.653947e+03  5.687408e+03
##  [441]  1.318943e+03  6.825638e+03  4.891577e+03  1.990003e+03  7.040858e+03
##  [446]  2.502089e+03 -2.503434e+03 -2.920803e+03  1.172739e+03 -9.287207e+02
##  [451] -2.388603e+03 -8.928789e+02  8.448945e+02 -1.675847e+02  7.700444e+03
##  [456]  2.667391e+03 -9.000937e+02  1.017513e+03  7.554043e+02  4.319619e+03
##  [461]  4.574217e+03 -8.254459e+02 -3.230381e+03 -7.299326e+02  1.548836e+03
##  [466] -1.277658e+03  7.228519e+03 -9.150342e+03 -7.050523e+03 -9.425077e+02
##  [471] -9.702204e+03 -1.853452e+03 -2.282207e+03  1.269389e+03  4.623219e+03
##  [476] -8.610801e+02  4.815028e+03 -3.308205e+03  1.546032e+04  1.857516e+03
##  [481] -3.367240e+03  4.637281e+03 -7.019635e+03 -8.084480e+03  4.958292e+03
##  [486] -4.895475e+03  1.570292e+03  2.844483e+04 -5.185156e+02 -5.931097e+03
##  [491] -2.962666e+02  1.779516e+04  4.042864e+03  2.056116e+04 -3.669209e+02
##  [496] -5.917605e+02 -9.325244e+02  3.855599e+03 -4.602423e+03 -2.202865e+03
##  [501]  4.546113e+03  3.692176e+03  9.726234e+03 -2.038781e+03  5.615010e+02
##  [506] -4.233685e+03 -6.906166e+03  5.863395e+03 -6.041615e+01 -2.587319e+02
##  [511] -2.519037e+03 -5.191992e+02 -2.785547e+01  5.333548e+03  1.412236e+03
##  [516]  1.231920e+03 -1.940836e+03 -1.895826e+03  1.094192e+03  3.772408e+03
##  [521] -8.621783e+03 -1.600783e+03 -1.533395e+03 -7.029355e+02 -2.118425e+03
##  [526] -3.034740e+03 -3.586613e+03  2.759180e+03 -4.912533e+02  1.861673e+04
##  [531] -7.322701e+03 -2.907075e+02 -3.824061e+03 -5.946611e+02 -1.749335e+03
##  [536]  2.128066e+03 -4.744497e+02 -2.756568e+03 -3.315787e+03  2.804891e+03
##  [541]  5.312422e+02  3.801565e+03 -4.506637e+03 -2.751836e+03 -4.545381e+02
##  [546]  2.367597e+04  8.314338e+03 -2.544029e+03 -1.380984e+03 -1.364561e+04
##  [551] -1.856531e+02 -6.135824e+03 -6.734863e+02 -1.609043e+03 -1.592479e+03
##  [556]  1.027760e+04  8.561472e+03 -3.546602e+03  4.648919e+03 -2.521781e+03
##  [561]  4.847696e+03  3.290609e+03  3.885344e+03  1.835580e+03  3.754934e+03
##  [566]  7.154824e+02 -2.761678e+03  7.399953e+03 -2.806345e+02 -2.451507e+03
##  [571]  2.538130e+04 -2.002349e+03  1.427615e+03 -1.686797e+03 -6.034923e+03
##  [576] -1.341427e+04  6.633730e+02 -9.647539e+02 -5.697304e+03 -1.358258e+03
##  [581] -1.184449e+03  4.803220e+03  1.893929e+04 -1.269930e+04  6.276504e+02
##  [586] -3.339593e+02 -4.678356e+03  3.217241e+02 -1.391233e+03  1.584655e+04
##  [591]  6.321510e+03  8.209121e+03 -1.289479e+04 -2.024058e+04 -1.625813e+04
##  [596]  1.080419e+03 -1.626199e+03  6.374891e+03  3.685514e+03  1.235604e+04
##  [601] -1.596827e+04  2.212368e+03  9.293031e+03  4.171945e+03 -3.847760e+02
##  [606] -4.744695e+03  1.689771e+04 -3.767115e+03  5.182411e+04 -7.702656e+02
##  [611]  3.033780e+03 -4.105786e+03  1.530505e+04  6.995262e+03  1.593311e+01
##  [616] -9.980137e+02  5.263418e+03 -3.502869e+03  1.408269e+04  1.256421e+03
##  [621] -4.752992e+03 -1.217437e+03  3.683597e+03  4.650859e+03  3.467704e+04
##  [626] -1.732793e+04 -1.000498e+03 -1.675295e+03 -2.418247e+03  4.235062e+03
##  [631] -2.838648e+03  1.110820e+04  4.602248e+04 -2.871030e+03  1.292352e+03
##  [636]  1.609543e+04  4.197266e+02  7.518309e+03 -4.399246e+02 -5.219530e+02
##  [641] -1.309072e+03 -8.378148e+02  5.477631e+03  3.812461e+02 -1.077702e+04
##  [646] -2.325443e+03  1.771257e+03  1.012756e+03  3.765117e+03  2.201523e+03
##  [651]  6.732287e+04 -9.499812e+03  1.049441e+03 -2.904872e+03 -1.435171e+03
##  [656] -7.779721e+03 -8.772568e+03  1.808675e+03  2.101096e+03  1.063939e+04
##  [661] -6.986982e+02 -8.056982e+02 -3.040042e+03  3.650051e+03 -1.511968e+04
##  [666] -1.740840e+03  5.188086e+01  1.201265e+03 -3.749902e+02 -1.870647e+03
##  [671] -3.487752e+03  1.149556e+03 -7.324219e-01  2.344321e+03 -3.222790e+03
##  [676] -9.179824e+03  5.873313e+03 -3.054131e+02 -4.237090e+03 -4.889121e+02
##  [681]  5.284854e+02  3.194892e+03 -3.124416e+03  9.104912e+03 -6.258309e+02
##  [686] -3.252258e+03  2.514354e+03 -7.461611e+02  1.545185e+04 -1.396776e+03
##  [691]  6.007958e+03  1.103214e+03  8.319889e+03 -3.934180e+03  3.066488e+03
##  [696] -3.105295e+03  4.841290e+03 -1.282374e+03  2.820137e+02 -1.911318e+03
##  [701]  1.614999e+04 -2.618605e+03  5.361383e+03  3.668240e+03  1.087012e+03
##  [706] -1.019843e+04 -5.865721e+03 -1.667889e+03 -5.342959e+02  1.329565e+04
##  [711]  2.708311e+02 -4.033271e+03  2.509053e+03 -5.208027e+02  3.408770e+03
##  [716] -1.130096e+04 -1.240982e+03 -3.301385e+03 -3.275781e+01 -2.702754e+02
##  [721]  3.915039e+00  1.418770e+04  1.565743e+03 -7.671914e+02 -6.464437e+03
##  [726]  1.213400e+04 -6.995562e+03 -2.181496e+03 -3.152953e+03  3.999970e+03
##  [731] -4.004048e+02 -1.890764e+03 -1.892210e+04 -1.983730e+02  7.173691e+03
##  [736]  5.961570e+03 -3.498809e+03 -1.321613e+03 -7.150388e+02 -7.021396e+02
##  [741] -4.162988e+02  1.041144e+02 -1.255035e+04  8.539386e+03 -9.448101e+03
##  [746] -5.425047e+03 -1.699157e+04  6.543375e+03  7.579334e+03  1.325879e+04
##  [751] -6.918166e+03  1.914428e+03  7.631549e+03 -7.357915e+02 -7.617851e+03
##  [756] -5.823213e+03  3.685547e+00 -3.707202e+03  4.911291e+03  8.735239e+02
##  [761] -5.398004e+03 -1.955048e+03  3.286005e+04  2.734306e+03  1.122543e+03
##  [766]  7.343400e+03  1.128114e+04  3.455273e+02  2.234609e+03  2.930912e+04
##  [771]  4.751504e+02 -9.627356e+03 -9.075414e+03  1.551043e+04 -4.201051e+03
##  [776]  8.376020e+03  8.916436e+02 -8.917721e+03  1.796787e+02 -8.581549e+03
##  [781]  3.721582e+03  8.633406e+03  1.104066e+04  2.434456e+03 -1.113346e+04
##  [786] -1.179867e+03 -4.461768e+01 -4.901757e+02 -6.834412e+03 -3.073768e+03
##  [791] -2.360187e+03  4.992287e+03 -6.526071e+02 -1.347767e+03  9.142687e+03
##  [796] -4.725852e+03 -4.654924e+03  1.553721e+03  1.697475e+04 -4.591851e+03
##  [801]  1.341324e+04 -1.896899e+03 -2.494087e+03 -2.743848e+02 -4.051318e+03
##  [806] -6.690952e+02 -2.484988e+03 -8.312730e+03  1.017365e+03  3.193475e+03
##  [811]  4.446435e+03 -9.293262e+02 -1.813212e+03  1.480906e+03 -6.755118e+02
##  [816]  8.726049e+03  3.826437e+03 -2.225098e+02  5.812305e+01 -4.769043e+01
##  [821]  1.322055e+04  1.223184e+04  5.968134e+03  4.882142e+03 -1.829796e+04
##  [826] -3.888114e+02 -4.354577e+02  9.130922e+03  8.248213e+03  1.622765e+04
##  [831]  1.110599e+04 -8.699609e+01  1.192507e+04 -3.807676e+02  3.278742e+03
##  [836] -2.459415e+03 -1.478960e+03 -8.398965e+02  9.216451e+03 -5.795384e+03
##  [841]  4.100096e+03 -4.826326e+03 -8.348592e+03 -9.807645e+03  4.548635e+03
##  [846]  1.148526e+04  6.222273e+03  1.414983e+04 -7.949342e+03 -1.202334e+02
##  [851] -4.155660e+03  7.682505e+03 -1.167145e+04 -6.253052e+03  3.174320e+03
##  [856] -6.396896e+03 -1.070360e+04 -1.010796e+04 -4.990629e+03 -5.939258e+02
##  [861]  5.637712e+03  9.625953e+03  6.874779e+03 -1.264019e+03 -3.720742e+02
##  [866]  1.619875e+03 -5.359453e+02  1.049635e+03  6.022156e+03  1.089827e+03
##  [871]  8.185469e+02 -1.419541e+03 -7.788984e+02 -8.332711e+03 -5.068558e+03
##  [876]  3.022922e+03 -2.404571e+03 -1.798912e+03  3.465405e+03 -3.589883e+03
##  [881] -7.715566e+03  3.922976e+03 -6.999266e+03 -1.230286e+04 -3.802969e+02
##  [886]  1.499613e+03 -2.578548e+03 -6.962648e+02 -1.822187e+03  5.055625e+04
##  [891]  1.845192e+03 -1.375600e+04 -1.973804e+03  8.347354e+02  1.470470e+03
##  [896]  6.415942e+03  2.961687e+04 -1.182854e+04  5.071334e+03 -2.340254e+03
##  [901]  4.312402e+01  1.997608e+03  1.287934e+03 -2.464550e+03 -4.556336e+03
##  [906]  1.485786e+04 -4.836990e+03 -1.861822e+03 -5.514511e+03 -5.970307e+02
##  [911] -3.819852e+02  2.302952e+03 -1.007468e+04 -3.160722e+03 -1.626918e+03
##  [916]  1.217689e+04  3.579446e+03  3.290788e+03  4.387910e+02 -1.433655e+04
##  [921] -2.092342e+04 -1.244095e+04  4.001654e+03 -6.415381e+02  6.865332e+02
##  [926]  1.345701e+04 -2.241381e+04 -3.707816e+03 -2.165200e+04  7.129326e+02
##  [931] -2.312079e+04 -2.650947e+02  1.118526e+04 -2.781041e+03 -3.188339e+03
##  [936] -2.387592e+03  1.886547e+04 -6.841713e+03  2.710695e+03  2.071075e+03
##  [941] -2.269005e+03 -1.748879e+03  1.417319e+04  2.090006e+03  1.679922e+04
##  [946] -5.625981e+02  9.941289e+02 -8.373788e+03 -6.718828e+03 -7.144678e+02
##  [951] -4.099450e+02 -1.462547e+03 -6.518743e+02  6.449223e+03  2.808502e+03
##  [956]  5.024121e+02  1.351502e+04 -9.326617e+03  1.338477e+02 -1.524562e+03
##  [961]  9.142598e+02 -9.960361e+02 -4.275221e+03  2.681409e+03 -2.297760e+03
##  [966] -3.938828e+03 -3.709798e+02 -4.520000e+02 -5.320850e+03  2.380662e+04
##  [971] -1.232695e+02 -2.017100e+02 -4.154321e+02  2.850488e+01  5.089687e+02
##  [976]  9.985781e+02  3.335461e+03 -5.335943e+03 -8.663050e+03  7.194291e+03
##  [981] -6.609392e+03 -8.726980e+03  9.231758e+02  2.964496e+03  1.945311e+03
##  [986] -1.154632e+03  1.813059e+03  1.675783e+04 -1.207681e+04  1.270217e+04
##  [991] -8.748280e+03  4.804961e+02 -1.736584e+03  5.625266e+03  2.132125e+03
##  [996] -1.223510e+03 -1.048595e+04  3.546792e+03 -1.704531e+03  4.560927e+03
## [1001] -1.592970e+04 -8.173018e+03 -1.559841e+04 -1.211536e+04 -4.232952e+03
## [1006]  1.785858e+03 -2.859508e+03 -7.447285e+03  3.522419e+03  8.537523e+03
## [1011] -2.762262e+03 -1.709866e+02  2.805801e+02 -1.174771e+04 -6.753779e+03
## [1016] -7.554484e+03 -3.200339e+03  9.339883e+02  4.709900e+03  3.773785e+03
## [1021] -4.506996e+03  9.044998e+03 -3.165205e+03 -1.523564e+04 -7.172441e+02
## [1026]  1.026037e+03 -3.502461e+02 -1.314147e+04 -1.433423e+04 -1.116499e+03
## [1031]  5.001854e+03 -2.004823e+04 -3.435292e+02 -1.493588e+04 -3.746182e+03
## [1036]  7.790029e+03  1.421804e+03 -3.620937e+02  9.727520e+03  7.877869e+03
## [1041] -3.564113e+02  2.232672e+04 -5.104018e+02 -2.535666e+03  1.459937e+03
## [1046]  7.180977e+03  4.949176e+03 -5.478156e+03  3.210938e+01 -4.684482e+03
## [1051] -1.472332e+03  1.003155e+04 -4.842426e+03 -7.740967e+03  3.819703e+03
## [1056] -5.840957e+02 -2.783428e+03 -7.231721e+02  3.522238e+03 -3.416732e+02
## [1061] -1.976454e+04 -2.673540e+04  1.549805e+00 -5.064775e+02  2.395631e+03
## [1066] -2.266753e+03 -1.769981e+03 -7.471338e+02  3.251229e+04  2.442489e+04
## [1071]  4.217631e+03  2.909768e+03  1.604404e+03  8.651758e+02 -3.961469e+04
## [1076] -7.381652e+03 -9.389922e+03 -6.403505e+03 -6.400587e+03  4.270650e+03
## [1081]  8.048205e+03  5.605351e+03  1.431030e+03  4.620187e+03 -8.614887e+03
## [1086] -1.164497e+04  4.689375e+02  1.687875e+04 -3.632871e+02 -1.211830e+04
## [1091]  6.954863e+02  4.866330e+03 -4.009203e+02  1.425398e+03 -6.899100e+03
## [1096]  6.129630e+03 -4.710537e+03 -4.292274e+02  4.294691e+03 -2.660321e+03
## [1101]  1.590699e+03 -3.278534e+02 -9.137629e+03  5.273092e+03  6.649898e+03
## [1106]  3.074121e+02  6.609461e+03  1.520235e+04  6.668015e+03  1.114545e+03
## [1111] -4.221356e+02 -4.067321e+02  1.058912e+04 -6.780100e+03 -1.505101e+03
## [1116] -3.481348e+01 -5.144460e+03 -6.152703e+02 -7.696484e+01  1.831756e+03
## [1121]  1.945081e+03  5.862535e+03 -3.255634e+04 -8.716289e+02  8.949679e+01
## [1126] -6.075199e+03 -8.907135e+03 -1.298167e+03  9.241323e+01 -1.235289e+04
## [1131]  4.885518e+02 -6.561332e+03 -4.877380e+03  1.710609e+03 -5.420441e+03
## [1136] -2.729829e+02 -3.975828e+03 -1.747496e+03 -9.354727e+02 -1.028718e+04
## [1141]  3.050282e+03  7.304203e+03 -3.667046e+03 -6.986420e+03 -2.683348e+03
## [1146] -3.085654e+03  6.620830e+02  1.030133e+03  2.383833e+03  2.746576e+03
## [1151] -5.534393e+03  1.818406e+04  6.494677e+03  6.745586e+03 -1.196015e+04
## [1156]  1.314545e+04  1.670462e+02  3.686781e+04 -2.427713e+04  1.891330e+04
## [1161] -8.270710e+03  1.391405e+03 -1.735552e+02  7.264322e+03  3.688907e+03
## [1166] -8.102441e+02  1.032449e+03  8.379910e+03 -3.018303e+03 -6.768027e+02
## [1171]  8.914156e+03  2.542258e+03  3.401509e+03  2.815338e+03 -9.638486e+02
## [1176] -8.226915e+03  1.971635e+04 -8.260156e+02 -1.572379e+04  1.161321e+02
## [1181]  1.093915e+03 -3.137088e+03  1.722996e+03 -6.727157e+03 -7.672695e+02
## [1186]  5.169922e+00  6.137934e+03  1.171627e+04  2.896218e+03 -1.187364e+04
## [1191]  1.381452e+04  3.009180e+01 -3.359119e+03 -4.992219e+02 -3.771495e+02
## [1196]  3.633853e+03 -6.073535e+02 -8.136309e+03  6.461490e+03 -3.290476e+03
## [1201]  4.453877e+04  1.773203e+02 -1.093334e+03  3.714187e+03  5.059618e+01
## [1206]  4.917271e+03 -4.190740e+03  5.132530e+03  7.212227e+02 -4.794092e+03
## [1211]  6.420437e+03  8.545707e+03 -1.056264e+03  1.800148e+03 -4.510952e+02
## [1216] -2.595576e+03 -3.186507e+03  2.296862e+03  7.592045e+03  1.510465e+03
# 2. Median Absolute Error (MedAE)
medae <- median(abs(residuals))
medae
## [1] 3169.763
# 3. Mean Absolute Percentage Error (MAPE)
mape <- mean(abs(residuals / y_test_vector)) * 100
mape
## [1] 199.1095
# 4. Prediction vs Actual Plot
plot(y_test_vector, best_pred,
     xlab = "Actual Credit Limit",
     ylab = "Predicted Credit Limit",
     main = paste("XGBoost (Best) Predicted vs Actual -", best_set_name))
abline(a = 0, b = 1, col = "red", lwd = 2)

# 5. Residual Plot
plot(best_pred, residuals,
     xlab = "Predicted Credit Limit",
     ylab = "Residuals",
     main = paste("XGBoost (Best) Residuals vs Predicted -", best_set_name))
abline(h = 0, col = "red", lwd = 2)