The following analysis will evaluate neural net, logistic
regression, and decision tree models, to predict top 1000 users likely
to switch from freemium to premium.
#Loading high note dataset
setwd("/Users/sayemnasher/Desktop/module 2/data mining in r")
high_note_data <- read_csv("HN_data_PostModule.csv", col_types = "fnfnnnnnnnnnnnfnnnnnnnnnnff")
#Replacing NA factors with UNK
high_note_data <- high_note_data %>%
mutate(net_user = as.character(net_user)) %>%
mutate(net_user = as.factor(recode(net_user, "NA" = "UNK")) )%>%
mutate(male = as.character(male)) %>%
mutate(male = as.factor(recode(male, "NA" = "UNK"))) %>%
mutate(adopter = as.character(adopter)) %>%
mutate(adopter = as.factor(recode(adopter, "NA" = "UNK"))) %>%
mutate(good_country = as.character(good_country)) %>%
mutate(good_country = as.factor(recode(good_country, "NA" = "UNK"))) %>%
mutate(delta1_good_country = as.character(delta1_good_country)) %>%
mutate(delta1_good_country = as.factor(recode(delta1_good_country, "NA" = "UNK")))
#process data (changing na number data to mean/median)
high_note_data <- high_note_data %>%
group_by(male) %>%
mutate(age = ifelse(is.na(age), mean(age, na.rm = TRUE), age)) %>%
mutate(friend_cnt = ifelse(is.na(friend_cnt), median(friend_cnt, na.rm = TRUE), friend_cnt)) %>%
mutate(avg_friend_age = ifelse(is.na(avg_friend_age), mean(avg_friend_age, na.rm = TRUE), avg_friend_age)) %>%
mutate(avg_friend_male = ifelse(is.na(avg_friend_male), mean(avg_friend_male, na.rm = TRUE), avg_friend_male)) %>%
mutate(friend_country_cnt = ifelse(is.na(friend_country_cnt), median(friend_country_cnt, na.rm = TRUE), friend_country_cnt)) %>%
mutate(subscriber_friend_cnt = ifelse(is.na(subscriber_friend_cnt), median(subscriber_friend_cnt, na.rm = TRUE), subscriber_friend_cnt)) %>%
mutate(songsListened = ifelse(is.na(songsListened), median(songsListened, na.rm = TRUE), songsListened)) %>%
mutate(lovedTracks = ifelse(is.na(lovedTracks), median(lovedTracks, na.rm = TRUE), lovedTracks)) %>%
mutate(posts = ifelse(is.na(posts), median(posts, na.rm = TRUE), posts)) %>%
mutate(playlists = ifelse(is.na(playlists), median(playlists, na.rm = TRUE), playlists)) %>%
mutate(shouts = ifelse(is.na(shouts), median(shouts, na.rm = TRUE), shouts)) %>%
mutate(tenure = ifelse(is.na(tenure), mean(tenure, na.rm = TRUE), tenure)) %>%
mutate(delta1_friend_cnt = ifelse(is.na(delta1_friend_cnt), median(delta1_friend_cnt, na.rm = TRUE), delta1_friend_cnt)) %>%
mutate(delta1_avg_friend_age = ifelse(is.na(delta1_avg_friend_age), mean(delta1_avg_friend_age, na.rm = TRUE), delta1_avg_friend_age)) %>%
mutate(delta1_avg_friend_male = ifelse(is.na(delta1_avg_friend_male), mean(delta1_avg_friend_male, na.rm = TRUE), delta1_avg_friend_male)) %>%
mutate(delta1_friend_country_cnt = ifelse(is.na(delta1_friend_country_cnt), median(delta1_friend_country_cnt, na.rm = TRUE), delta1_friend_country_cnt)) %>%
mutate(delta1_subscriber_friend_cnt = ifelse(is.na(delta1_subscriber_friend_cnt), median(delta1_subscriber_friend_cnt, na.rm = TRUE), delta1_subscriber_friend_cnt)) %>%
mutate(delta1_songsListened = ifelse(is.na(delta1_songsListened), median(delta1_songsListened, na.rm = TRUE), delta1_songsListened)) %>%
mutate(delta1_lovedTracks = ifelse(is.na(delta1_lovedTracks), median(delta1_lovedTracks, na.rm = TRUE), delta1_lovedTracks)) %>%
mutate(delta1_posts = ifelse(is.na(delta1_posts), median(delta1_posts, na.rm = TRUE), delta1_posts)) %>%
mutate(delta1_playlists = ifelse(is.na(delta1_playlists), median(delta1_playlists, na.rm = TRUE), delta1_playlists)) %>%
mutate(delta1_shouts = ifelse(is.na(delta1_shouts), median(delta1_shouts, na.rm = TRUE), delta1_shouts)) %>%
ungroup()
#log transforming
high_note_data$friend_cnt<-log(high_note_data$friend_cnt+1)
high_note_data$playlists<-log(high_note_data$playlists+1)
high_note_data$posts<-log(high_note_data$posts+1)
high_note_data$songsListened<-log(high_note_data$songsListened+1)
#normalize data for models
normalize<-function(vec){
if (is.numeric(vec)) {
vec = (vec - min(vec)) / (max(vec) - min(vec)) }
return (vec)
}
high_note_data <- as.data.frame(lapply(high_note_data, normalize))
skim(high_note_data)
Data summary
| Name |
high_note_data |
| Number of rows |
107213 |
| Number of columns |
27 |
| _______________________ |
|
| Column type frequency: |
|
| factor |
5 |
| numeric |
22 |
| ________________________ |
|
| Group variables |
None |
Variable type: factor
| net_user |
0 |
1.00 |
FALSE |
107205 |
#NA: 9, : 1, : 1, ___: 1 |
| male |
38950 |
0.64 |
FALSE |
2 |
1: 42548, 0: 25715 |
| good_country |
39155 |
0.63 |
FALSE |
2 |
0: 43025, 1: 25033 |
| delta1_good_country |
39393 |
0.63 |
FALSE |
3 |
0: 67728, 1: 57, -1: 35 |
| adopter |
0 |
1.00 |
FALSE |
2 |
0: 100000, 1: 7213 |
Variable type: numeric
| age |
0 |
1 |
0.23 |
0.07 |
0 |
0.20 |
0.24 |
0.24 |
1 |
▃▇▁▁▁ |
| friend_cnt |
0 |
1 |
0.19 |
0.14 |
0 |
0.08 |
0.16 |
0.28 |
1 |
▇▅▁▁▁ |
| avg_friend_age |
0 |
1 |
0.24 |
0.07 |
0 |
0.19 |
0.23 |
0.25 |
1 |
▃▇▁▁▁ |
| avg_friend_male |
0 |
1 |
0.63 |
0.33 |
0 |
0.50 |
0.66 |
1.00 |
1 |
▃▂▃▇▇ |
| friend_country_cnt |
0 |
1 |
0.02 |
0.04 |
0 |
0.01 |
0.01 |
0.02 |
1 |
▇▁▁▁▁ |
| subscriber_friend_cnt |
0 |
1 |
0.00 |
0.01 |
0 |
0.00 |
0.00 |
0.00 |
1 |
▇▁▁▁▁ |
| songsListened |
0 |
1 |
0.54 |
0.21 |
0 |
0.44 |
0.59 |
0.69 |
1 |
▂▂▆▇▁ |
| lovedTracks |
0 |
1 |
0.00 |
0.01 |
0 |
0.00 |
0.00 |
0.00 |
1 |
▇▁▁▁▁ |
| posts |
0 |
1 |
0.02 |
0.08 |
0 |
0.00 |
0.00 |
0.00 |
1 |
▇▁▁▁▁ |
| playlists |
0 |
1 |
0.04 |
0.05 |
0 |
0.00 |
0.00 |
0.09 |
1 |
▇▁▁▁▁ |
| shouts |
0 |
1 |
0.00 |
0.00 |
0 |
0.00 |
0.00 |
0.00 |
1 |
▇▁▁▁▁ |
| tenure |
0 |
1 |
0.36 |
0.17 |
0 |
0.22 |
0.34 |
0.49 |
1 |
▅▇▆▂▁ |
| delta1_friend_cnt |
0 |
1 |
0.48 |
0.01 |
0 |
0.48 |
0.48 |
0.48 |
1 |
▁▁▇▁▁ |
| delta1_avg_friend_age |
0 |
1 |
0.72 |
0.01 |
0 |
0.72 |
0.72 |
0.72 |
1 |
▁▁▁▇▁ |
| delta1_avg_friend_male |
0 |
1 |
0.50 |
0.02 |
0 |
0.50 |
0.50 |
0.50 |
1 |
▁▁▇▁▁ |
| delta1_friend_country_cnt |
0 |
1 |
0.63 |
0.01 |
0 |
0.63 |
0.63 |
0.63 |
1 |
▁▁▁▇▁ |
| delta1_subscriber_friend_cnt |
0 |
1 |
0.35 |
0.01 |
0 |
0.35 |
0.35 |
0.35 |
1 |
▁▇▁▁▁ |
| delta1_songsListened |
0 |
1 |
0.42 |
0.01 |
0 |
0.42 |
0.42 |
0.42 |
1 |
▁▁▇▁▁ |
| delta1_lovedTracks |
0 |
1 |
0.44 |
0.01 |
0 |
0.44 |
0.44 |
0.44 |
1 |
▁▁▇▁▁ |
| delta1_posts |
0 |
1 |
0.00 |
0.00 |
0 |
0.00 |
0.00 |
0.00 |
1 |
▇▁▁▁▁ |
| delta1_playlists |
0 |
1 |
0.06 |
0.00 |
0 |
0.06 |
0.06 |
0.06 |
1 |
▇▁▁▁▁ |
| delta1_shouts |
0 |
1 |
0.09 |
0.00 |
0 |
0.09 |
0.09 |
0.09 |
1 |
▇▁▁▁▁ |
#create data frames
high_note_data_past <- high_note_data[,c('net_user', 'age', 'male', 'tenure', 'delta1_friend_cnt','delta1_avg_friend_age', 'delta1_avg_friend_male','delta1_friend_country_cnt', 'delta1_subscriber_friend_cnt', 'delta1_songsListened', 'delta1_lovedTracks', 'delta1_posts', 'delta1_playlists', 'delta1_shouts', 'delta1_good_country','adopter')]
high_note_data_past_y = high_note_data %>% pull("adopter")
high_note_data_y = high_note_data %>% pull("adopter")
high_note_data_past_x = high_note_data_past %>% select(-c("adopter"))
high_note_data_x = high_note_data %>% select(-c("adopter"))
HN_data_past_X_id<-high_note_data_past_x %>% select(c("net_user"))
HN_data_X_id<-high_note_data_x %>% select(c("net_user"))
high_note_data_past_x<-high_note_data_past_x %>% select(-c("net_user"))
high_note_data_x<-high_note_data_x %>% select(-c("net_user"))
#split the data
smp_size <- floor(.75 *nrow(high_note_data_past_x))
set.seed(12345)
train_ind <- sample(seq_len(nrow(high_note_data_past_x)), size = smp_size)
#create test and training for x
hn_data_train_past_x <- high_note_data_past_x[train_ind, ]
hn_data_train_x <- high_note_data_x [train_ind, ]
hn_data_test_past_x <- high_note_data_past_x[-train_ind, ]
hn_data_test_x <- high_note_data_x[-train_ind, ]
hn_data_train_past_y <- high_note_data_past_y[train_ind]
hn_data_train_y <- high_note_data_y[train_ind]
hn_data_test_past_y <- high_note_data_past_y[-train_ind]
hn_data_test_y <- high_note_data_y[-train_ind]
#additional libraries required
install.packages("https://cran.r-project.org/src/contrib/Archive/DMwR/DMwR_0.4.1.tar.gz", repos=NULL, type = "source")
library(DMwR)
#full training set x and y
set.seed(1234)
hn_data_train_past <- cbind(hn_data_train_past_x, hn_data_train_past_y)
hn_data_train <- cbind(hn_data_train_x, hn_data_train_y)
#SMOTE (assigning equal probabilities for 0 and 1)
balanced_hn_train_past <- SMOTE(hn_data_train_past_y ~ ., data.frame(hn_data_train_past), perc.over = 100, perc.under = 200)
balanced_hn_train <- SMOTE(hn_data_train_y ~., data.frame(hn_data_train), perc.over = 100, perc.under = 200)
#remove y column and store it
hn_data_train_past_x <- balanced_hn_train_past %>% select(-hn_data_train_past_y)
hn_data_train_x <- balanced_hn_train %>% select(-hn_data_train_y)
hn_data_train_past_y <- balanced_hn_train_past %>% pull(hn_data_train_past_y) %>% as.factor()
hn_data_train_y <- balanced_hn_train %>% pull (hn_data_train_y) %>% as.factor()
#creating a data frame for cost benefit analysis
clf_results <- data.frame(matrix(ncol = 5, nrow = 0))
names(clf_results) <- c("Model", "Accuracy", "Precision", "Recall", "F1")
#store TP,TN,FP,FN
cost_benefit_df <- data.frame(matrix(ncol = 5, nrow = 0))
names(cost_benefit_df) <- c("Model", "TP", "FN", "FP", "TN")
#LOGISTIC REGRESSION PAST DATA ONLY
yyy_fit <- train(hn_data_train_past_x,
hn_data_train_past_y,
method = "glm",
family = "binomial",
preProc = c("center", "scale"))
##predict on test
yyy_predict <- predict(yyy_fit, newdata = hn_data_test_past_x, positive="1" )
glm_prob <- predict(yyy_fit, newdata = hn_data_test_past_x, type = "prob")
## Add into clf_results dataframe
x1 <- confusionMatrix(yyy_predict, hn_data_test_past_y )[["overall"]]
y1 <- confusionMatrix(yyy_predict, hn_data_test_past_y)[["byClass"]]
clf_results[nrow(clf_results) + 1,] <- list(Model = "Log Regression Past",
Accuracy = round (x1[["Accuracy"]],3),
Precision = round (y1[["Precision"]],3),
Recall = round (y1[["Recall"]],3),
F1 = round (y1[["F1"]],3))
## Add into cost_benefit_df dataframe
a1 <- confusionMatrix(yyy_predict, hn_data_test_past_y)
cost_benefit_df[nrow(cost_benefit_df) + 1,] <- list(Model = "Log Regression Past",
TP = a1[["table"]][4],
FN = a1[["table"]][3],
FP = a1[["table"]][2],
TN = a1[["table"]][1])
print(yyy_fit)
## Generalized Linear Model
##
## 21468 samples
## 14 predictor
## 2 classes: '0', '1'
##
## Pre-processing: centered (12), scaled (12), ignore (2)
## Resampling: Bootstrapped (25 reps)
## Summary of sample sizes: 21468, 21468, 21468, 21468, 21468, 21468, ...
## Resampling results:
##
## Accuracy Kappa
## 0.6323881 0.2702951
#running log reg on all data
yyy_fit_2 <- train(hn_data_train_x,
hn_data_train_y,
method = "glm",
family = "binomial",
preProc = c("center", "scale"))
##predict test data
yyy_predict_2 <- predict(yyy_fit_2, newdata = hn_data_test_x, positive="1" )
glm_prob_2 <- predict(yyy_fit_2, newdata = hn_data_test_x, type = "prob")
## Add results into clf_results dataframe
xa <- confusionMatrix(yyy_predict_2, hn_data_test_y )[["overall"]]
ya <- confusionMatrix(yyy_predict_2, hn_data_test_y)[["byClass"]]
clf_results[nrow(clf_results) + 1,] <- list(Model = "Log Regression all",
Accuracy = round (xa[["Accuracy"]],3),
Precision = round (ya[["Precision"]],3),
Recall = round (ya[["Recall"]],3),
F1 = round (ya[["F1"]],3))
## Add results into cost_benefit_df dataframe
aa <- confusionMatrix(yyy_predict_2, hn_data_test_y )
cost_benefit_df[nrow(cost_benefit_df) + 1,] <- list(Model = "Log Regression all",
TP = aa[["table"]][4],
FN = aa[["table"]][3],
FP = aa[["table"]][2],
TN = aa[["table"]][1])
print(yyy_fit_2)
## Generalized Linear Model
##
## 21468 samples
## 25 predictor
## 2 classes: '0', '1'
##
## Pre-processing: centered (22), scaled (22), ignore (3)
## Resampling: Bootstrapped (25 reps)
## Summary of sample sizes: 21468, 21468, 21468, 21468, 21468, 21468, ...
## Resampling results:
##
## Accuracy Kappa
## 0.7203991 0.4409425
#Neural Net boost for past data
my.grid <- expand.grid(.decay = c(0.5, 0.1), .size = c(5, 7))
nn_clf_fit <- train(hn_data_train_past_x,
hn_data_train_past_y,
method = "nnet",
trace = F,
tuneGrid = my.grid,
linout = 0,
stepmax = 100,
threshold = 0.01 )
## Plot Neural Network
plotnet(nn_clf_fit$finalModel, y_names = "adopter")

print(nn_clf_fit)
## Neural Network
##
## 21468 samples
## 14 predictor
## 2 classes: '0', '1'
##
## No pre-processing
## Resampling: Bootstrapped (25 reps)
## Summary of sample sizes: 21468, 21468, 21468, 21468, 21468, 21468, ...
## Resampling results across tuning parameters:
##
## decay size Accuracy Kappa
## 0.1 5 0.6650791 0.3287331
## 0.1 7 0.6661859 0.3307394
## 0.5 5 0.6522559 0.3024155
## 0.5 7 0.6525476 0.3030672
##
## Accuracy was used to select the optimal model using the largest value.
## The final values used for the model were size = 7 and decay = 0.1.
#predict neural net on test data
nn_clf_predict <- predict(nn_clf_fit,hn_data_test_past_x, positive = "1")
NN_prob <- predict(nn_clf_fit, newdata = hn_data_test_past_x, type = "prob")
## Add into clf_results dataframe
x4 <- confusionMatrix(nn_clf_predict,hn_data_test_past_y)[["overall"]]
y4 <- confusionMatrix(nn_clf_predict,hn_data_test_past_y)[["byClass"]]
clf_results[nrow(clf_results) + 1,] <- list(Model = "Neural Network Past",
Accuracy = round (x4[["Accuracy"]],3),
Precision = round (y4[["Precision"]],3),
Recall = round (y4[["Recall"]],3),
F1 = round (y4[["F1"]],3))
## Add results into cost_benefit_df dataframe
a4 <- confusionMatrix(nn_clf_predict,hn_data_test_past_y)
cost_benefit_df[nrow(cost_benefit_df) + 1,] <- list(Model = "Neural Network Past",
TP = a4[["table"]][4],
FN = a4[["table"]][3],
FP = a4[["table"]][2],
TN = a4[["table"]][1])
#neural net for all data
my.grid <- expand.grid(.decay = c(0.5, 0.1), .size = c(5, 7))
nn_clf_fit_2 <- train(hn_data_train_x,
hn_data_train_y,
method = "nnet",
trace = F,
tuneGrid = my.grid,
linout = 0,
stepmax = 100,
threshold = 0.01 )
print(nn_clf_fit_2)
## Neural Network
##
## 21468 samples
## 25 predictor
## 2 classes: '0', '1'
##
## No pre-processing
## Resampling: Bootstrapped (25 reps)
## Summary of sample sizes: 21468, 21468, 21468, 21468, 21468, 21468, ...
## Resampling results across tuning parameters:
##
## decay size Accuracy Kappa
## 0.1 5 0.7399401 0.4797283
## 0.1 7 0.7390476 0.4779258
## 0.5 5 0.7230901 0.4457859
## 0.5 7 0.7249462 0.4495021
##
## Accuracy was used to select the optimal model using the largest value.
## The final values used for the model were size = 5 and decay = 0.1.
plotnet(nn_clf_fit_2$finalModel, y_names = "adopter")

#predit NN all on test data
nn_clf_predict_2 <- predict(nn_clf_fit_2,hn_data_test_x, positive = "1")
NN_prob_2 <- predict(nn_clf_fit_2, newdata = hn_data_test_x, type = "prob")
## Add results into clf_results dataframe
xd <- confusionMatrix(nn_clf_predict_2,hn_data_test_y)[["overall"]]
yd <- confusionMatrix(nn_clf_predict_2,hn_data_test_y)[["byClass"]]
clf_results[nrow(clf_results) + 1,] <- list(Model = "Neural Network All",
Accuracy = round (xd[["Accuracy"]],3),
Precision = round (yd[["Precision"]],3),
Recall = round (yd[["Recall"]],3),
F1 = round (yd[["F1"]],3))
## Add results into cost_benefit_df dataframe
ad <- confusionMatrix(nn_clf_predict_2,hn_data_test_y)
cost_benefit_df[nrow(cost_benefit_df) + 1,] <- list(Model = "Neural Network All",
TP = ad[["table"]][4],
FN = ad[["table"]][3],
FP = ad[["table"]][2],
TN = ad[["table"]][1])
#decison tree on past data
cross_validation <- trainControl(## 10-fold CV
method = "repeatedcv",
number = 10,
## repeated three times
repeats = 3)
Param_Grid <- expand.grid(maxdepth = 4)
dtree_fit <- train(hn_data_train_past_x,
hn_data_train_past_y,
method = "rpart2",
# split - criteria to split nodes
parms = list(split = "gini"),
tuneGrid = Param_Grid,
trControl = cross_validation,
# preProc - perform listed pre-processing to predictor dataframe
preProc = c("center", "scale"))
## Predict on test data
dtree_predict <- predict(dtree_fit, newdata = hn_data_test_past_x)
dtree_prob <- predict(dtree_fit, newdata = hn_data_test_past_x, type = "prob")
## Add results into clf_results dataframe
x6 <- confusionMatrix(dtree_predict, hn_data_test_past_y )[["overall"]]
y6 <- confusionMatrix(dtree_predict, hn_data_test_past_y )[["byClass"]]
clf_results[nrow(clf_results) + 1,] <- list(Model = "Decision Tree Past",
Accuracy = round (x6[["Accuracy"]],3),
Precision = round (y6[["Precision"]],3),
Recall = round (y6[["Recall"]],3),
F1 = round (y6[["F1"]],3))
a6 <- confusionMatrix(dtree_predict, hn_data_test_past_y )
cost_benefit_df[nrow(cost_benefit_df) + 1,] <- list(Model = "Decision Tree Past",
TP = a6[["table"]][4],
FN = a6[["table"]][3],
FP = a6[["table"]][2],
TN = a6[["table"]][1])
print(dtree_fit)
## CART
##
## 21468 samples
## 14 predictor
## 2 classes: '0', '1'
##
## Pre-processing: centered (12), scaled (12), ignore (2)
## Resampling: Cross-Validated (10 fold, repeated 3 times)
## Summary of sample sizes: 19321, 19321, 19322, 19320, 19321, 19322, ...
## Resampling results:
##
## Accuracy Kappa
## 0.7526707 0.5053419
##
## Tuning parameter 'maxdepth' was held constant at a value of 4
#using decision tree on all data
cross_validation <- trainControl(## 10-fold CV
method = "repeatedcv",
number = 10,
## repeated three times
repeats = 3)
Param_Grid <- expand.grid(maxdepth = 4)
dtree_fit_2 <- train(hn_data_train_x,
hn_data_train_y,
method = "rpart2",
# split - criteria to split nodes
parms = list(split = "gini"),
tuneGrid = Param_Grid,
trControl = cross_validation,
# preProc - perform listed pre-processing to predictor dataframe
preProc = c("center", "scale"))
## Predict on test data
dtree_predict_2 <- predict(dtree_fit_2, newdata = hn_data_test_x)
dtree_prob_2 <- predict(dtree_fit_2, newdata = hn_data_test_x, type = "prob")
## Add results into clf_results dataframe
xg <- confusionMatrix(dtree_predict_2, hn_data_test_y )[["overall"]]
yg <- confusionMatrix(dtree_predict_2, hn_data_test_y )[["byClass"]]
clf_results[nrow(clf_results) + 1,] <- list(Model = "Decision Tree All",
Accuracy = round (xg[["Accuracy"]],3),
Precision = round (yg[["Precision"]],3),
Recall = round (yg[["Recall"]],3),
F1 = round (yg[["F1"]],3))
ag <- confusionMatrix(dtree_predict_2, hn_data_test_y )
cost_benefit_df[nrow(cost_benefit_df) + 1,] <- list(Model = "Decision Tree All",
TP = ag[["table"]][4],
FN = ag[["table"]][3],
FP = ag[["table"]][2],
TN = ag[["table"]][1])
print(dtree_fit_2)
## CART
##
## 21468 samples
## 25 predictor
## 2 classes: '0', '1'
##
## Pre-processing: centered (22), scaled (22), ignore (3)
## Resampling: Cross-Validated (10 fold, repeated 3 times)
## Summary of sample sizes: 19320, 19322, 19321, 19320, 19322, 19322, ...
## Resampling results:
##
## Accuracy Kappa
## 0.7775922 0.5551849
##
## Tuning parameter 'maxdepth' was held constant at a value of 4
#summary
print(cost_benefit_df)
## Model TP FN FP TN
## 1 Log Regression Past 713 497 3964 10104
## 2 Log Regression all 787 423 3817 10251
## 3 Neural Network Past 793 417 5286 8782
## 4 Neural Network All 841 369 3977 10091
## 5 Decision Tree Past 1161 685 6820 18138
## 6 Decision Tree All 1028 818 4916 20042
#summary
print(clf_results)
## Model Accuracy Precision Recall F1
## 1 Log Regression Past 0.708 0.953 0.718 0.819
## 2 Log Regression all 0.722 0.960 0.729 0.829
## 3 Neural Network Past 0.627 0.955 0.624 0.755
## 4 Neural Network All 0.716 0.965 0.717 0.823
## 5 Decision Tree Past 0.720 0.964 0.727 0.829
## 6 Decision Tree All 0.786 0.961 0.803 0.875
ggplot(clf_results[1:6,] %>% arrange(desc(Accuracy)) %>%
mutate(Model=factor(Model, levels=Model) ),
aes(x = Model, y = Accuracy)) +
geom_bar(stat = "identity" , width=0.3, fill="steelblue") +
coord_cartesian(ylim = c(0.2, 1)) +
geom_hline(aes(yintercept = mean(Accuracy)),
colour = "green",linetype="dashed") +
ggtitle("Compare Accuracy for all Models") +
theme(plot.title = element_text(color="black", size=10, hjust = 0.5),axis.text.x=element_text(angle=90,hjust=1,vjust=0.5))

#compare f1 scores
ggplot(clf_results[1:6,] %>% arrange(desc(F1)) %>%
mutate(Model=factor(Model, levels=Model) ),
aes(x = Model, y = F1)) +
geom_bar(stat = "identity" , width=0.3, fill="steelblue") +
coord_cartesian(ylim = c(0.2, 1)) +
geom_hline(aes(yintercept = mean(F1)),
colour = "green",linetype="dashed") +
ggtitle("Compare F1 Score for all Models") +
theme(plot.title = element_text(color="black", size=10, hjust = 0.5),
axis.text.x=element_text(angle=90,hjust=1,vjust=0.5))

#After comparing accuracy, F1 scores, and AUC, Neural Net proves to be the best performing model. Now I will go back to the data, filter out free users and deploy the model.
#filter out free users
HN_data_free_users<-high_note_data%>% filter(adopter==0)
HN_data_free_users_test<-HN_data_free_users %>% select(-c("net_user","adopter"))
HN_data_free_users_id<-HN_data_free_users %>% select(c("net_user"))
predict_adopting_probability<- predict(nn_clf_fit_2, newdata = HN_data_free_users_test, type = "prob")
probability_of_adopting<-predict_adopting_probability[,2]
#adding id to the table
adopters_list<-cbind(HN_data_free_users_id, probability_of_adopting)
summary(adopters_list)
## net_user probability_of_adopting
## #NAME? : 9 Min. :0.00
## ____foursticks: 1 1st Qu.:0.15
## ____mih : 1 Median :0.34
## ___8 : 1 Mean :0.35
## ___ashley_ : 1 3rd Qu.:0.52
## ___cake : 1 Max. :1.00
## (Other) :99986 NA's :43106
top_1000 <-head(adopters_list[order(-adopters_list$probability_of_adopting), ],1000)
summary(top_1000)
## net_user probability_of_adopting
## _domin_ : 1 Min. :0.8331
## _faceless_: 1 1st Qu.:0.8563
## _www_ : 1 Median :0.8877
## -aceton- : 1 Mean :0.8966
## 1manguy : 1 3rd Qu.:0.9281
## a_luv : 1 Max. :1.0000
## (Other) :994
print(top_1000)
## net_user probability_of_adopting
## 52943 jonatancerradam 1.0000000
## 81035 pjackson1966 1.0000000
## 2935 uterpendragon1 0.9999994
## 69930 miniloq 0.9999994
## 89370 screamadelicate 0.9999587
## 39958 franzilinchen 0.9999344
## 26275 coanta 0.9999289
## 99098 theremingirl 0.9998710
## 24922 chiefin 0.9998507
## 48921 irena245 0.9997824
## 46075 himu666 0.9996803
## 32868 donvitalone 0.9996142
## 6437 wrathfulseven 0.9993887
## 92868 smurf72 0.9993450
## 56483 khsargenius 0.9991964
## 83161 qwertytale 0.9991470
## 77743 oracool43 0.9988486
## 69554 mikeinchnails 0.9987175
## 73215 mysickburden 0.9984148
## 72691 murasaki_midori 0.9983052
## 68392 mellemusic 0.9981523
## 60846 leroyusbanker 0.9977611
## 28178 d1kaja 0.9975225
## 43381 greenvillepa 0.9973916
## 78017 oukay 0.9973853
## 19305 big_electro_cat 0.9970058
## 95466 stu_b 0.9966493
## 79875 perfectionhate 0.9965746
## 78504 palblues 0.9965118
## 64181 m909 0.9955900
## 33977 dunkelblow 0.9953592
## 32138 djflanger 0.9953277
## 84442 rebekasamyrra 0.9951835
## 52055 jjoossee1 0.9949379
## 87555 s_and_q 0.9949366
## 44896 hasantayyar 0.9945922
## 22677 cabeshpash 0.9944904
## 94692 staytuned101 0.9944459
## 72495 mtmusic 0.9944097
## 31870 discotequedude 0.9932320
## 7332 xpehope3 0.9929914
## 54923 kalpamantra 0.9926826
## 99259 theviperchick 0.9920622
## 90903 shiroyamachou 0.9915619
## 77456 omnilrenegade 0.9915271
## 31420 diegohatake 0.9915252
## 91420 silea85 0.9912005
## 6593 wynterkat 0.9909041
## 12286 alegouveia 0.9905779
## 56904 kingbea 0.9899713
## 51573 jesst7 0.9894493
## 45781 hetsku 0.9894205
## 70857 mnisdthcr 0.9893861
## 48824 invisiblewings 0.9889894
## 21435 brentill 0.9879722
## 25131 chochewee 0.9877978
## 70289 missganjita 0.9874922
## 35496 elitortosa 0.9870774
## 62979 love_puppy 0.9868348
## 95450 stryjek4 0.9863471
## 91965 sixbarrels123 0.9861792
## 6342 woolymammal 0.9860499
## 91414 silasrjones 0.9852342
## 76256 novi_sad 0.9850363
## 56922 kingflov 0.9846886
## 52079 jkarlsonsrv 0.9845497
## 63550 luismoranm 0.9843429
## 40188 freijzer-2 0.9842668
## 29209 dariomorfeo 0.9829097
## 54878 kalevkodzis 0.9823732
## 30156 deannaelise 0.9822625
## 78172 ozacr 0.9817098
## 847 topa2a 0.9808060
## 62645 lord_doguero 0.9805748
## 31545 digitalxsunrise 0.9798585
## 48511 inequ 0.9798507
## 12176 albidude 0.9796625
## 82878 qba132 0.9789233
## 34653 edmonia 0.9785534
## 8304 yoquese666 0.9783248
## 18768 benm_87 0.9782525
## 73298 mzflossy 0.9780148
## 79211 pattysauce 0.9779990
## 56224 kerarus 0.9775708
## 1971 tvickie 0.9763859
## 94852 stelmaria_roars 0.9762956
## 99608 thrishul093 0.9759500
## 76652 o_ghost_girl 0.9755163
## 31933 ditrixa 0.9755134
## 73774 narku 0.9754874
## 92851 smp4life 0.9752675
## 42408 glamgirl74 0.9745765
## 6004 winstonbox 0.9745647
## 70701 mktinchen 0.9741322
## 97437 tattooolli 0.9739346
## 35590 elliedi 0.9734894
## 68490 memoryman1964 0.9733653
## 16105 asanamats8 0.9730337
## 98410 the_karo 0.9729614
## 89958 septacle 0.9720531
## 66936 masteredd 0.9719230
## 61793 liquidcruelty 0.9716325
## 11275 aeris_bru 0.9715687
## 28625 damscray 0.9703431
## 77701 operabouffe 0.9686821
## 91740 sinedd17 0.9686596
## 63917 luvradio 0.9679237
## 45922 hi_black 0.9678678
## 76347 nsuziey 0.9674013
## 40364 frontman_exkpi 0.9664773
## 88994 schavira 0.9662184
## 56469 khons 0.9658602
## 46653 hotgal2 0.9657443
## 80906 pirozhok12 0.9653068
## 70080 mirksuhh 0.9647169
## 78410 pafurada 0.9645615
## 46466 homie20078 0.9644708
## 81060 pk-pk 0.9642354
## 92542 slippershuffle 0.9641271
## 55171 karenbetts 0.9640726
## 82058 princesskhym 0.9640200
## 44693 hapciumadam 0.9636527
## 22401 bustedmarauder 0.9634856
## 79458 pawoko 0.9633350
## 27647 cryohazmat 0.9630178
## 19710 bizoniy 0.9626569
## 47847 iinaki 0.9617704
## 6370 worldmusicwoman 0.9615935
## 62684 lorddream 0.9612691
## 50404 jannika89 0.9607338
## 15519 arcangelic 0.9602644
## 23052 camiloindie 0.9599802
## 68459 meltone9750 0.9597438
## 60844 leroyinchains 0.9593804
## 47170 i-sabotage 0.9592845
## 97030 takingbacksarah 0.9588937
## 74521 neologyc 0.9577485
## 9379 zkish 0.9572716
## 23902 cautiouzman 0.9570607
## 60139 lawr3nce 0.9567907
## 43974 guptakaran11 0.9563849
## 56155 kenkentaka 0.9560317
## 16684 auntsugar 0.9559899
## 47263 iambh 0.9559059
## 11307 afader 0.9554679
## 41554 geew-bu-raah 0.9551241
## 94224 spumco 0.9549745
## 13652 amore_tu 0.9546410
## 17165 b3n1h 0.9543944
## 90348 shadowdemonxyx 0.9542059
## 40278 frigginalex 0.9540181
## 10937 adamf3 0.9539476
## 81040 pjbird 0.9539413
## 91577 simcommander 0.9537164
## 81614 popazuda 0.9536070
## 9009 zegblog 0.9529490
## 40681 funkybigband 0.9529183
## 45163 heartscore 0.9527921
## 63088 lowtunedgirl 0.9524355
## 65453 manek07 0.9524165
## 85991 robertoszym 0.9521891
## 40661 funkmasterb55 0.9520876
## 35197 eldiablos 0.9509371
## 49220 ispytatel 0.9507490
## 69363 mightykendar 0.9504956
## 58884 kwagua 0.9504757
## 87933 saloti 0.9501484
## 47262 iambentobox 0.9500366
## 79459 pawy_vedder 0.9493758
## 69316 midorichan 0.9492998
## 47774 ihafkenschiel 0.9490029
## 23524 carol_eulalio 0.9489285
## 4368 vitormusachio 0.9488456
## 38136 faun28 0.9487963
## 28353 daemith 0.9487021
## 8787 zakkthorn 0.9487020
## 89148 schriklog 0.9485466
## 2554 un-cassette 0.9481697
## 49691 j_nothing 0.9479134
## 37639 fabiananemerodr 0.9478770
## 5003 war25 0.9474034
## 35460 elisagismondi 0.9470258
## 38728 figinimigini 0.9468183
## 22814 caiotribbiani 0.9465503
## 23381 carlacoutinho 0.9464695
## 88001 samar_saeedi 0.9460136
## 36992 ethnojazz 0.9458228
## 80117 petrouu 0.9457599
## 94995 stevenbobby 0.9447785
## 45218 heavytony 0.9442734
## 32890 dookiemonkey7 0.9441154
## 24548 charsier 0.9441108
## 5386 wendersonaguiar 0.9436429
## 6621 x-boy84 0.9430852
## 32781 doney60_12 0.9428681
## 69938 minimax1965 0.9426053
## 3326 vaniorsioli 0.9424138
## 72580 mugen_92 0.9418519
## 74832 nextsaturday86 0.9413761
## 17001 aylinslkgl 0.9413253
## 19249 bicibebek 0.9404574
## 73143 mymusicmydivas 0.9404292
## 44032 gustavodrigues 0.9403811
## 22234 bumble-boy 0.9399875
## 86837 rozart 0.9396686
## 72158 mrmario1 0.9394069
## 56216 keoos 0.9390996
## 1843 tunch 0.9382021
## 14288 andyisdead 0.9381588
## 60095 laurisvrm 0.9380444
## 73895 natasa_bdm 0.9378000
## 88073 sammckeetd 0.9373176
## 75761 no-false-saint 0.9370174
## 40212 frequencyfather 0.9367130
## 25736 cinemababe 0.9364407
## 6083 witchiz 0.9363441
## 5717 wickeda_sd 0.9362652
## 71966 mraben 0.9360879
## 91122 shtuka 0.9359155
## 83660 rainbowshoe 0.9355985
## 66030 marianarosa 0.9355787
## 23810 cataploft 0.9353480
## 39022 fkrispin 0.9346985
## 37945 fanour 0.9340960
## 36452 eppos 0.9337843
## 78613 pand0ra23 0.9336784
## 63056 lovesnobigtruth 0.9335416
## 97151 tamtamdam 0.9334570
## 28423 daialvarado 0.9330935
## 11305 af7017ax 0.9329261
## 77642 onur666 0.9328877
## 76002 nooneastern 0.9319374
## 26329 cocoroco 0.9318554
## 1833 tumbling-down 0.9315609
## 27862 curryser 0.9313735
## 82122 pro100pro 0.9313582
## 27509 criticalcat 0.9309973
## 9394 zloiman 0.9309874
## 50019 jaimewinn 0.9309372
## 18582 belindaj_1 0.9309264
## 85170 rhamw 0.9299235
## 97130 tamihania 0.9297293
## 76838 odysseyandoracl 0.9296684
## 61061 lfluffy 0.9296510
## 17563 balynah 0.9293999
## 62251 lo_re_da_na 0.9288882
## 53259 josephalanfears 0.9288592
## 17158 b1ackd0g 0.9283073
## 63716 lulitaeme 0.9281920
## 91039 shortbeatnik 0.9280803
## 3517 veiledsongbird 0.9280604
## 6774 xatise 0.9280274
## 48686 inrigo 0.9277600
## 13881 anaryaeledhwen 0.9277218
## 15080 antoaneta_p 0.9274767
## 21685 brokenfist 0.9274151
## 78557 pame_agostini 0.9272420
## 17829 barrettszipper 0.9267817
## 48829 inzaratha 0.9264881
## 17823 baronvontito18 0.9262970
## 74025 nattysnlove21 0.9261467
## 81673 poratliron 0.9260563
## 96911 tadix 0.9259457
## 73063 mycatisnotavirg 0.9257862
## 30626 demol88 0.9257508
## 27525 crlcan81 0.9255873
## 99833 tijuanalady 0.9254271
## 75183 niewiasta 0.9250584
## 28249 d_rox 0.9249782
## 30459 deh__ 0.9249288
## 56300 keturahkhaos 0.9243915
## 83235 r_kohakusui 0.9243117
## 29779 davidmills71 0.9241472
## 26339 codamarine 0.9241123
## 39604 foreignconcepts 0.9240616
## 38050 fast4321 0.9240406
## 62728 lore-sima 0.9238201
## 34788 eerika23 0.9237445
## 94649 stassino 0.9236484
## 38225 feblackmore 0.9235632
## 36333 enrico111 0.9229807
## 22093 buddhaa 0.9226147
## 28457 daisygraves 0.9224368
## 53523 jrencarnacion 0.9223600
## 10670 abstract2 0.9223349
## 89920 sentoul 0.9221524
## 39347 floydsounds 0.9218584
## 27274 crazy_nightmare 0.9217789
## 84678 refuse-resist 0.9211388
## 84512 redbarn23 0.9208888
## 54007 juliantorrado 0.9207539
## 93602 sosiambrisa 0.9207428
## 1987 twan313d 0.9206835
## 46871 hughesrjl 0.9206735
## 65187 malcolmv 0.9202856
## 66675 marty_key 0.9201863
## 60609 lemelek 0.9192868
## 12461 alexander_allen 0.9189281
## 70360 misspinkeyes 0.9187947
## 29900 dazzle1452 0.9186769
## 74701 netzach 0.9183905
## 81811 powpawpow 0.9182627
## 25739 cinemarae 0.9180820
## 32705 domiwie 0.9180641
## 2435 ulfhedhnar 0.9179208
## 81736 postdawn 0.9176764
## 39228 flixxxer 0.9172977
## 54119 julzianaz 0.9171387
## 67951 mdenisa 0.9169369
## 29233 dark_admin 0.9169047
## 39178 flfl10 0.9168863
## 33647 drudru 0.9166496
## 36834 eskimofo 0.9160796
## 48523 inewhite 0.9158786
## 47584 idealholokaust 0.9156848
## 22833 caitrin1985 0.9154400
## 46969 hungry_helen 0.9153920
## 40485 fubuki_ 0.9151525
## 23546 carolina48 0.9149972
## 81509 polskam 0.9147821
## 97509 tayotaz 0.9146360
## 4213 violent_phlegm 0.9146140
## 48200 imcarenas 0.9145730
## 51534 jessicalux 0.9143175
## 17964 bastetmeister 0.9141425
## 4534 voddi 0.9141232
## 34252 e-lisa 0.9140694
## 48970 irka_rump 0.9134939
## 30586 dem_oloart 0.9133588
## 69841 mimidel 0.9132366
## 21932 bryan94 0.9131157
## 41637 general_picasso 0.9130487
## 88504 sarap00jp 0.9126461
## 12726 ali824 0.9125182
## 42329 giugiu9 0.9123563
## 6175 wo_olfy 0.9122644
## 73584 nalurebell 0.9117235
## 66045 mariannaychev 0.9111280
## 5251 webspiderus 0.9110505
## 63032 lovemaximumfm 0.9110246
## 4338 vitali_teen 0.9109855
## 8901 zarubec 0.9109705
## 88348 santiago-davila 0.9106734
## 38789 filmclip 0.9105876
## 26565 combatcircus 0.9103764
## 82882 qbayne 0.9099184
## 86816 roy_cakes 0.9096364
## 99333 thiagoceles 0.9093486
## 71184 monday6am 0.9093010
## 88169 samuryi00 0.9092643
## 10373 a_luv 0.9089863
## 69725 milenuts 0.9085158
## 95790 sumiyakani1get 0.9084678
## 5906 willowbear1228 0.9083242
## 42943 goosiak1990 0.9082757
## 96822 t9rzyk 0.9081077
## 73098 mygodchris 0.9079659
## 28094 czarrocker 0.9077726
## 31247 di_west 0.9077437
## 7945 yarwan 0.9075432
## 7318 xoxkarijoxox 0.9073080
## 56446 khareen17 0.9070597
## 8696 zabusia 0.9068969
## 61419 lilhan 0.9068669
## 41328 garyloo 0.9067603
## 72562 mufasa2008 0.9063683
## 60751 leonardo_freak 0.9062504
## 26602 comparedto 0.9061884
## 67391 mauricesnabilie 0.9061067
## 30297 decibel120 0.9058808
## 72369 ms_creep 0.9057096
## 3044 vacuiforme 0.9056436
## 23263 captainfe 0.9055507
## 27093 cozynite 0.9053668
## 38376 felipinx 0.9053118
## 70204 miss0nitro0zoe 0.9050364
## 95468 stuart_davis15 0.9050171
## 61702 linkntinks 0.9049398
## 97040 takunisdope 0.9048448
## 62212 lm_link 0.9042145
## 71101 moljo 0.9041675
## 16458 asylumjesus 0.9039820
## 90447 shamansir 0.9039222
## 91409 sil_marillion 0.9038979
## 36891 essereinutile 0.9035762
## 25490 chserena 0.9033905
## 5715 wicked_one_777 0.9033352
## 22758 caffeinateme 0.9032409
## 69858 mina188 0.9031212
## 8249 yogotox 0.9028971
## 50247 jamierobertward 0.9028313
## 29184 dargard 0.9026820
## 73817 nastazjaf 0.9025998
## 30427 defk3000 0.9025763
## 22711 cactusvoice 0.9025374
## 31509 digeme 0.9024940
## 26252 cmudp 0.9024353
## 71056 mojito76 0.9024288
## 91383 sihayakara 0.9020361
## 49022 iroqk 0.9020176
## 53772 jucaazevedo 0.9019597
## 81881 praleila 0.9018618
## 16578 atreyu1982 0.9018523
## 48460 indieprince 0.9015286
## 86183 rockinbird 0.9014044
## 74302 nefrete 0.9011624
## 74911 nicast 0.9010335
## 35418 elify76 0.9009418
## 54804 kaizers0maestra 0.9007263
## 74716 neurolepsia 0.9007125
## 79031 passpassive 0.9006679
## 15481 arashisu 0.9005620
## 29101 dantyrr 0.9004478
## 82651 punkybi0tch 0.9003165
## 27468 crisrocs 0.9001372
## 43337 greendevotchka 0.8996108
## 2013 twenty-ten 0.8995143
## 48452 indiehearts 0.8994258
## 56297 ketsuk 0.8989245
## 60842 leroidusoleil 0.8989098
## 28463 dajahns 0.8988608
## 57339 klaudiakaminska 0.8987888
## 89490 seamstress 0.8987505
## 23364 carinilumpyhead 0.8987338
## 43524 grimyaku 0.8985566
## 60084 lauriette91 0.8984906
## 43147 grabhorn 0.8984294
## 89529 seanmoser 0.8982312
## 18116 bboudoux 0.8982144
## 88790 sawa1383 0.8981377
## 41290 garmanator 0.8981093
## 2418 uksteveh 0.8980010
## 1017 toulouselr 0.8979216
## 41459 gberryman 0.8977066
## 41453 gba91 0.8976058
## 876 tor_wilson 0.8976038
## 79166 patron-24 0.8975070
## 21397 breezesquad 0.8975029
## 15645 arhaeopter 0.8971468
## 83060 quetzallimusic 0.8971340
## 74062 nava_blues 0.8970449
## 20875 boogieman981 0.8968515
## 25109 chloeflux 0.8967507
## 99495 thomasjgreen 0.8965957
## 64236 maaaiden 0.8965335
## 4465 vladan88 0.8964053
## 76467 nuotarefiume 0.8963282
## 16759 autumnsweater5 0.8959181
## 55686 katvonpberg 0.8957009
## 11908 akasuna 0.8956386
## 27871 cursedzephyr 0.8956354
## 70319 missluciferr 0.8952487
## 82240 protey-corrax 0.8952454
## 46542 hootchi_cootchi 0.8948865
## 46303 hodracirk 0.8948197
## 75452 nimhead 0.8948086
## 49793 jackcosta 0.8947547
## 26467 colgatewhite 0.8947513
## 32623 dolcesangue 0.8947124
## 76523 nuttyxander 0.8945210
## 82038 princegarth 0.8941429
## 79025 passionofknife 0.8941006
## 95846 sunjazz2 0.8939684
## 15715 arka-fon 0.8936030
## 55318 karolxmmmmm 0.8933866
## 83849 randolfcarter 0.8932962
## 89964 sepulturero_666 0.8932634
## 85008 restythestar 0.8930886
## 60940 letsfocusonme 0.8930215
## 5554 wheelq 0.8928076
## 76238 novacat 0.8926832
## 91371 sigridur 0.8923535
## 79118 patquinnchin 0.8920550
## 29712 david_14 0.8920425
## 46842 hudgie 0.8915906
## 41600 gelpie 0.8915742
## 59525 lamariposaazul 0.8914897
## 6607 wysteria 0.8914249
## 60462 leftofleftfield 0.8911622
## 64056 lyrio 0.8908739
## 33613 drpop 0.8908609
## 25571 chunky_bacon 0.8907758
## 19866 blackdragon90 0.8904792
## 41871 getup14 0.8904759
## 49062 isabel86 0.8902329
## 88782 savoy-wang 0.8901451
## 79132 patriciapaiva 0.8900512
## 29228 dark-skyliner 0.8897127
## 72457 mstfcn 0.8895455
## 39131 flaviorafael 0.8891697
## 51157 jegraphy 0.8890535
## 54039 julienegunner 0.8889685
## 48765 interozitor 0.8888088
## 69558 mikel- 0.8888015
## 61474 lillek 0.8887262
## 35082 el_boris 0.8885865
## 33435 dressed 0.8885484
## 60276 leadae 0.8881638
## 84224 razmosis2 0.8879986
## 39349 floyt 0.8877097
## 97272 tar-morgul 0.8876436
## 24178 ceravene 0.8871020
## 27961 cyberaktif08 0.8870485
## 70403 missyas 0.8868359
## 90820 shimmerrose 0.8867151
## 98401 the_heathen1991 0.8864080
## 6886 xdepriiivedx 0.8861568
## 40967 gabirp 0.8861433
## 79834 peppermintdream 0.8858250
## 28304 dacevedo86 0.8857106
## 97187 tanguyvd 0.8856172
## 99064 theransu 0.8855252
## 42412 glamoour 0.8854497
## 62239 lnkjuice 0.8853962
## 34145 dylan05 0.8850125
## 38380 felix-wf 0.8848269
## 43964 guns-of-brixton 0.8847658
## 24992 chillwithschill 0.8846444
## 10177 _faceless_ 0.8845225
## 23102 cancatalyurek 0.8840326
## 72015 mrc68 0.8839558
## 59235 ladodoclem 0.8839503
## 81082 pkzlck 0.8837968
## 31833 disarm1979 0.8837557
## 70324 missmalice 0.8836902
## 90978 shnur58 0.8835862
## 7175 xknutx 0.8834502
## 21856 bruno_avilar 0.8834026
## 39045 flakoo 0.8833428
## 74299 nefer-tari 0.8832994
## 80979 piveskid 0.8832957
## 10284 a---a 0.8832478
## 15084 antolele 0.8832002
## 88306 sankt 0.8831545
## 22761 cage2005 0.8829925
## 56410 kfjotek 0.8827704
## 56235 keridven 0.8825636
## 44156 gyorffymate 0.8825066
## 71599 moriax 0.8822898
## 30729 denizm75 0.8822787
## 45805 hexenmeisterin 0.8821714
## 76872 offinkaa 0.8820519
## 70864 mnlpdr 0.8820444
## 13688 amshu 0.8816458
## 18428 beek56 0.8816415
## 41817 gerryhectic 0.8813953
## 9263 zielonykrolik22 0.8812632
## 53982 julianapena 0.8806042
## 63952 lvgame 0.8803717
## 5137 wavebyebye 0.8800781
## 64750 magda_arch 0.8797625
## 48703 insanity35 0.8794621
## 86855 rpalstra 0.8793053
## 41419 gawrosz 0.8792939
## 14446 angelinasouza 0.8792723
## 2966 uuree 0.8791964
## 24938 chiiii88 0.8791737
## 14015 andosis 0.8791643
## 17184 b_nourbakhsh 0.8791267
## 79001 pasha89 0.8788395
## 48171 imagining 0.8788044
## 27714 csumler 0.8787924
## 54700 kafcamus 0.8787771
## 28613 damonf888 0.8786329
## 80101 petramode 0.8785423
## 56338 kevinesqueche 0.8783207
## 20314 blue-desert 0.8782883
## 36968 eternalsilence- 0.8782048
## 95956 superegoistic 0.8780361
## 12563 alexisanima 0.8780198
## 27258 crazy-4ertenok 0.8779341
## 77196 olegkor 0.8779043
## 55386 kasiatek 0.8778256
## 24455 chapal 0.8778048
## 74187 ne-fer 0.8776829
## 30911 derschrader 0.8776819
## 67601 maxx_500 0.8774896
## 45115 he6o 0.8774507
## 51079 jefersombh 0.8774382
## 22190 bulcas 0.8769836
## 36141 emreg00 0.8769426
## 17403 badquanta 0.8765746
## 13659 amortencja 0.8761507
## 69030 mia_wallace92 0.8760512
## 92883 smzi 0.8760312
## 77217 olesjafan 0.8758699
## 53605 jsiminski 0.8758534
## 92005 sjhur724 0.8757165
## 61618 lind_l_tailor 0.8749665
## 99408 thinwhitedukem 0.8748818
## 7877 yamillevegan 0.8748795
## 12724 ali3n0 0.8748534
## 76073 noriko-san 0.8747191
## 55270 karmakiller_hxc 0.8745344
## 71429 mooncoma 0.8743867
## 85984 roberto_cups 0.8743611
## 63200 lu_sonn 0.8741595
## 87460 rytep 0.8740365
## 13672 amperx 0.8739193
## 79055 pataglu 0.8737785
## 44605 hanlonconnor 0.8736789
## 37704 fabsya 0.8736462
## 1839 tumsik 0.8736207
## 71476 mooondek 0.8736159
## 35789 emac7dawgs 0.8735538
## 72172 mrmojoking 0.8734702
## 96189 suzej 0.8734326
## 56210 kenyikissarmy 0.8734163
## 86724 rossshannon 0.8733209
## 43978 gurenka 0.8730994
## 16651 audreycharlot 0.8726275
## 54715 kage_enma 0.8725870
## 85520 rihouibo 0.8725269
## 83370 radiantlotus 0.8725163
## 35974 eminpascha 0.8724105
## 5903 willone 0.8722775
## 91515 silver_28 0.8721302
## 97680 technoankan 0.8716169
## 28298 dabu22 0.8715660
## 68327 melcavalera 0.8715539
## 88771 savesomeface 0.8715483
## 79794 pep3 0.8715054
## 74681 netjer-deru 0.8714030
## 98972 thennil 0.8713784
## 75652 nivagio 0.8712253
## 65744 marcelaospina 0.8711284
## 74625 nerwowy 0.8711204
## 50475 jaras1 0.8709512
## 47669 ifeelneon 0.8708699
## 59299 ladygrnningsoul 0.8707235
## 35543 ellalla 0.8706364
## 2359 ugomonio 0.8706328
## 58319 kristi_gail 0.8706093
## 2788 up_the_sounders 0.8705886
## 96178 suviheikkila 0.8705321
## 69405 mihaela_cookie 0.8705129
## 95720 suicidal_mood 0.8704602
## 59626 langsuirx 0.8703750
## 38350 felipedupeixe 0.8702747
## 4050 vilamelka 0.8700460
## 84598 redrabbitmoon 0.8699108
## 62185 llittle 0.8698984
## 17955 bast-imret 0.8694709
## 23253 captain_rum 0.8694478
## 51620 jesusvillalba 0.8693335
## 36166 emwu4real 0.8692882
## 69981 minoreth 0.8688501
## 4003 viicmiranda 0.8687376
## 99172 theskunkcatcher 0.8687266
## 50098 jakubc 0.8687111
## 36945 eszra 0.8686317
## 93080 snuffix 0.8682311
## 49972 jaguuar 0.8679383
## 41154 gallegherplus 0.8679361
## 14675 ankism 0.8677110
## 75219 nightfalldeath 0.8676641
## 73794 nasaja16 0.8676338
## 77285 olivierdv13 0.8674237
## 26199 clubsilenzio 0.8668686
## 95258 stoopidity 0.8668618
## 55292 karol_fox 0.8667115
## 41666 genevievedesro 0.8664078
## 21103 boxcar_cbu 0.8661395
## 28504 dalilines 0.8660100
## 59995 lauracat93 0.8659090
## 12983 allenhigh2000 0.8656613
## 33632 drstrange22 0.8656209
## 20618 bobububo 0.8655527
## 59814 laska_z_pol-ska 0.8654211
## 65379 manatheminx 0.8653147
## 62696 lordmeatsword 0.8652334
## 53413 jovenmisterioso 0.8652236
## 19400 bigkajamma 0.8651998
## 33181 dracovolantus 0.8650023
## 56257 kerrio 0.8649797
## 12246 ale_av 0.8647592
## 23897 cause_time 0.8646511
## 80192 pgeuder 0.8644332
## 19885 blackheartbree 0.8643795
## 99350 thiagosj 0.8642219
## 96127 susemiez 0.8639924
## 25943 clamaresan 0.8638639
## 28503 dalilaboechat 0.8637908
## 5961 wind_rose 0.8636575
## 15349 appl3jelly 0.8634014
## 91741 sinefil77 0.8633590
## 79263 paulaelenah 0.8632322
## 50553 jasmin66613 0.8629461
## 64022 lyndin 0.8629383
## 80806 pioglfr2 0.8629234
## 74767 neverwherekate 0.8629103
## 21068 bouree 0.8626445
## 10278 _www_ 0.8625234
## 96280 svirach 0.8622743
## 90114 servial 0.8622289
## 64660 madulaa 0.8620754
## 74583 nerdgirl1 0.8620464
## 21104 boxcodax 0.8620122
## 43462 gretchyn 0.8619937
## 77728 opsica 0.8619744
## 16028 artstorm 0.8618069
## 96871 tabbyface 0.8616015
## 44994 havoc8282 0.8615987
## 89654 secretspicy15 0.8615521
## 38605 fewald 0.8612938
## 58403 krol4ik 0.8612166
## 81665 popture 0.8611114
## 40429 fruitcakemofo 0.8609117
## 9777 -aceton- 0.8608598
## 27614 crunchyfrog31 0.8607222
## 22734 cadubione 0.8606878
## 54708 kafrancis 0.8606772
## 59819 lasonea 0.8606450
## 99539 thomvr 0.8606266
## 24553 chaschperli 0.8604337
## 12373 alessandrod 0.8602609
## 60719 leodinosaur 0.8593988
## 86012 robin-bad 0.8593836
## 2941 utilizatori 0.8593429
## 14904 annygahan 0.8593135
## 37580 f-uckfashion 0.8592573
## 47659 ieve 0.8591222
## 15051 antikrbic 0.8590381
## 12033 akutenshi13 0.8589606
## 53277 josevitor 0.8588245
## 33729 ds_mary 0.8586262
## 38448 fenring 0.8586038
## 2444 ulippka 0.8585109
## 21848 brunitxinhaleal 0.8584892
## 92112 skazia 0.8584022
## 71390 monument 0.8582606
## 97768 teh_moose 0.8582063
## 59738 larisa_shepitko 0.8581682
## 79793 peosmirr 0.8581359
## 41056 gabycancun 0.8579894
## 34106 dwittenberg 0.8579026
## 86118 rock-fm 0.8578167
## 64781 magependragon 0.8577747
## 19579 bingocrepuscle 0.8575858
## 30725 denizborulcesi 0.8574904
## 59307 ladykekeee 0.8571336
## 6548 wutya 0.8571211
## 34891 egotripping44 0.8570645
## 78307 pabloalvarez 0.8570515
## 36884 ess093 0.8569109
## 36403 eolienn 0.8568634
## 45791 hevinalle 0.8568135
## 11598 ahelixkate 0.8567974
## 94141 spolastre 0.8567763
## 13283 alvin-hang 0.8563749
## 9403 zmajchek 0.8561561
## 35434 elimmar 0.8559672
## 64801 maggotka 0.8559375
## 13582 amiegeek 0.8556841
## 24301 chach27592 0.8554462
## 94102 spittingcat 0.8553709
## 33130 dpxamarok 0.8552854
## 58539 ksenia_kundenko 0.8552225
## 68084 medieval86 0.8548970
## 44435 hallamainen 0.8547968
## 21370 breather 0.8547826
## 61180 lichit 0.8546512
## 45105 hdumbledore 0.8545709
## 18844 bequem 0.8543890
## 93879 spartah3ad 0.8542767
## 77610 onlysau 0.8542513
## 42573 gnatho 0.8541160
## 17897 base_tripp 0.8538843
## 86672 rosebud_rouge 0.8536027
## 41901 ggiannini 0.8535953
## 7773 xzisten 0.8535608
## 78895 paranoidbird 0.8535493
## 80316 philipesteiff 0.8534155
## 89083 schnattchen1973 0.8534028
## 40343 frogman_needy 0.8533580
## 39980 frauausathen 0.8533469
## 1751 tualatin14 0.8532748
## 4403 vivazwei 0.8531850
## 51924 jimmorrison71 0.8531622
## 81820 pozitivno_luda 0.8528327
## 81275 pluym 0.8522746
## 4296 virus_pedro 0.8519651
## 51487 jespomo 0.8517198
## 55648 katrin_kutepova 0.8516277
## 44739 harchibaldo 0.8516063
## 50138 jamardracken 0.8515510
## 10163 _domin_ 0.8513587
## 69758 milkl 0.8512385
## 39593 forbidden667 0.8511969
## 39380 fluke_ 0.8511148
## 83502 rafaelfroner 0.8510115
## 95404 striges 0.8510049
## 35154 elbarbon 0.8508627
## 88389 sapuara 0.8508538
## 88920 scarletbright 0.8508288
## 17995 bathory191919 0.8508176
## 45709 herome87 0.8507022
## 87603 saaur 0.8506837
## 14876 annita_blake 0.8506144
## 74388 nekrofelia_ 0.8505925
## 33312 drbankman 0.8505908
## 37068 euronymous_pl 0.8504388
## 57716 koki4a 0.8504131
## 81016 pixievondust 0.8502970
## 59002 kyodes 0.8502574
## 61775 liperox 0.8498678
## 750 tonightstheday 0.8498356
## 62412 loldokwadratu 0.8497610
## 93159 sodium_haze 0.8497120
## 11204 aduy 0.8495589
## 11540 agu_g 0.8495290
## 61863 lisishin 0.8495200
## 51661 jexblade 0.8494850
## 98232 thanatos2380 0.8494332
## 84620 redtalula 0.8492147
## 81570 pony999 0.8491413
## 56671 kill_da_queen 0.8491407
## 45551 hennebrecht 0.8487682
## 28794 daniel-aqwa 0.8485775
## 76704 obiwankenobi73 0.8485222
## 48048 illstreetbluez 0.8484823
## 26106 clevercarrot 0.8484368
## 8752 zagadeczka 0.8483318
## 71614 morkervasen 0.8482717
## 18087 bb_128 0.8482410
## 43222 grand_denial 0.8481872
## 2758 unsteadypuppy 0.8481390
## 29580 datadata 0.8481170
## 73725 napalm-death 0.8481032
## 56040 kelikandzer1626 0.8480777
## 39579 for-w-art 0.8480568
## 36201 encore_mad 0.8480188
## 35216 electoral 0.8478739
## 96766 szumekmm 0.8478739
## 21197 bragilmassoud 0.8476972
## 38323 felasriff 0.8476894
## 1782 tuere7 0.8476781
## 76142 noshelflife 0.8474445
## 24730 chen-jie 0.8474016
## 80531 piekarz1997 0.8473991
## 34347 eartle 0.8471646
## 25054 chipchopchip 0.8467827
## 20620 boby-kun 0.8466851
## 38865 finomosec 0.8463513
## 93647 soulicitrock 0.8462476
## 16939 axeminister 0.8461729
## 5879 williamt 0.8460856
## 99358 thiarles 0.8459568
## 53592 jseppanen 0.8459499
## 47934 iksnaics 0.8459197
## 28355 daemnian 0.8458483
## 55987 keitadaigo 0.8457514
## 23506 carnizalove 0.8455901
## 22752 cafeacto 0.8455789
## 7911 yankokitanov 0.8455127
## 80222 phalanx595 0.8455103
## 2196 typicalgirl 0.8453661
## 8341 yositosi 0.8453576
## 13835 anamnetikbl3v 0.8452262
## 92866 smultronsylt 0.8452205
## 13686 amromo 0.8452066
## 59285 ladybuggin 0.8451955
## 18293 beau_de_jour 0.8451456
## 15118 antonshangin 0.8450692
## 62842 lostkaeto 0.8450260
## 2416 ukrbrutaller 0.8448753
## 34130 dydimo 0.8448398
## 24845 chezzabot 0.8447564
## 56749 killme_kate 0.8447513
## 56714 killerclown58 0.8447233
## 19827 black_sister 0.8446994
## 84196 rays-in-space 0.8446827
## 79453 pawliczenko 0.8446407
## 72436 mspaiige1 0.8445902
## 67712 mazokunomiko 0.8444733
## 90124 sesler 0.8444205
## 34045 dustx85 0.8443793
## 85778 rjdejong 0.8443409
## 12948 alla-in-music 0.8443078
## 99231 thetrousers 0.8440830
## 37998 faridab8 0.8440351
## 83620 raiat 0.8439062
## 37786 failaz2 0.8438367
## 42328 giugiu88 0.8437188
## 84669 refikcaglayan 0.8436842
## 47505 icexqueen_k 0.8435303
## 90822 shimn 0.8433768
## 62817 lost_lullabies 0.8432979
## 75621 nita_ 0.8432130
## 10733 accidentalchoic 0.8431850
## 34866 eggrole1 0.8430402
## 53151 jordanf92 0.8430161
## 68346 melidagor 0.8429359
## 32811 donnaloia 0.8428339
## 58378 krki_krki 0.8427884
## 44339 haindede 0.8427358
## 63951 lverth 0.8427186
## 69169 michalz82 0.8426221
## 29591 datenkind 0.8426181
## 48024 ill_eyggro 0.8424409
## 31370 didymos234 0.8423781
## 80708 pingumusic 0.8423335
## 75672 niyamaimirin 0.8423044
## 19599 bio-tex 0.8422399
## 73797 nasedil_genio 0.8422132
## 23346 cardiolock 0.8421019
## 20316 blue-green 0.8418475
## 87207 ruscvorn 0.8414360
## 33225 dragonkite 0.8414049
## 70856 mnilu 0.8413268
## 20799 bonequinha_ 0.8413208
## 43367 greenoutsider 0.8412738
## 9933 1manguy 0.8412340
## 46970 hungryzombie 0.8407700
## 18231 bearberni 0.8406797
## 93666 soulshow 0.8405037
## 48815 invert_girl 0.8403590
## 74237 necmettin 0.8401993
## 89635 seclusion3 0.8401859
## 32597 doitbetter 0.8400836
## 43848 guilakatos 0.8400512
## 68607 merduk89 0.8399397
## 99166 thesincommittee 0.8397503
## 19281 biestblut1 0.8397162
## 61551 lilyfears 0.8396597
## 77947 ostategrl08 0.8396577
## 33215 dragonfliye 0.8395940
## 98567 thebranka 0.8395724
## 21407 brelsnok 0.8393933
## 90816 shimetsu 0.8393398
## 45260 heeeeeeeeeey 0.8392379
## 96206 svaerke 0.8390389
## 71963 mr_whirly 0.8389221
## 87757 sagan-indiana 0.8388851
## 71076 mokapantages 0.8386915
## 53741 juanmasg 0.8386654
## 47152 hyzzteria 0.8386539
## 65262 malkor84 0.8384967
## 18361 beckettt 0.8381083
## 4981 wangle-mcdangle 0.8380936
## 88151 samulbusuk 0.8380834
## 53300 joshdelarocha 0.8380791
## 61216 lienonrhine 0.8380719
## 88700 satschi 0.8380410
## 6810 xblaster 0.8380171
## 73071 mycodenameissid 0.8380029
## 54072 julioenge 0.8379758
## 5495 wezarscrew 0.8379756
## 43583 grooovism 0.8377735
## 53665 jtfaster 0.8376811
## 71628 morninghalo 0.8376216
## 77370 omant 0.8375943
## 68643 merostica 0.8375887
## 30232 deathvallyqueen 0.8374823
## 8970 zebool 0.8374315
## 57794 kondri 0.8374258
## 81006 pixieguts 0.8373156
## 86468 romankr 0.8372686
## 86795 roxanamaravilla 0.8371326
## 36233 endorphin_rush 0.8371264
## 98399 the_goddess4 0.8370125
## 58267 kringle20 0.8369168
## 255 tlr1920 0.8368548
## 93256 solar_smile 0.8368166
## 38034 farzer 0.8367158
## 30437 deft-ru 0.8366567
## 10851 acotam 0.8366022
## 28028 cynic_liegelord 0.8364864
## 42592 gnlpf666 0.8363741
## 26274 coalhoof 0.8363445
## 53856 juicyjuicyjuice 0.8363337
## 74504 neoblaze 0.8362693
## 48472 indigoeyes57 0.8360975
## 66850 mashadk 0.8360656
## 79388 pauluk_dunaj 0.8360345
## 8009 yazdankm 0.8358810
## 85478 ridurand 0.8356733
## 68794 metallindustrie 0.8356015
## 75913 nokon74kk 0.8355201
## 37868 fallenlui 0.8354963
## 86533 ronaldvdstad 0.8354889
## 8138 yevetteo 0.8353607
## 62252 loach 0.8349976
## 22169 buhender 0.8349500
## 13225 alterna 0.8349065
## 58869 kvazar13 0.8348022
## 38188 fdelrosario 0.8347409
## 15052 antikrist9090 0.8341475
## 43488 gribo4ok 0.8340309
## 54490 k-ndela 0.8340063
## 96668 systema714 0.8339780
## 1140 tradycyjnie 0.8339054
## 70987 moeysplaylist 0.8338729
## 68384 mell0ww 0.8337611
## 77085 okkibox 0.8336364
## 79673 peetee1979 0.8334892
## 36178 en1m4 0.8334521
## 2319 uenzdei 0.8332688
## 75788 no_surp 0.8331291
## 48256 immolationind 0.8331071