https://rpubs.com/fumi/593496 参考資料
http://alfredplpl.hatenablog.com/entry/2013/12/24/225420
https://qiita.com/nkjm/items/e751e49c7d2c619cbeab
https://momonoki2017.blogspot.com/2018/04/r007-riris.html
http://d-m-l.jp/Rbiz/task_rf.html
https://funatsu-lab.github.io/open-course-ware/machine-learning/random-forest/
http://takenaka-akio.org/doc/r_auto/chapter_03.html
http://yut.hatenablog.com/entry/20120827/1346024147
https://mjin.doshisha.ac.jp/R/Chap_23/23.html
https://qiita.com/TsutomuNakamura/items/a1a6a02cb9bb0dcbb37f 混同行列(Confusion Matrix) とは
https://shohei-doi.github.io/notes/posts/2019-05-27-cross-validation/
http://d-m-l.jp/Rbiz/task_rf.html ランダムフォレストとは機械学習のアルゴリズムの1つで、学習用のデータをランダムにサンプリングして多数の決定木を作成し、作成した決定木をもとに多数決で結果を決める方法です。精度、汎用性が高く扱いやすい分析手法です。
ランダムフォレストの特徴
手始めに使用するデータを選択する。Titanichはダメ、cordatahはダミー変数のため使えない。
質的変数/カテゴリカルデータはダミー変数に似ているが全く違う
質的変数を量的変数として扱う事ができるようにするのが(https://markezine.jp/article/detail/20790)
0,1のデータをダミー変数という。
以下の2行のおまじないで、あっという間に質的変数/カテゴリカルデータをダミー変数に加工できます!
tmp <- dummyVars(~., data=sample1)
sample1.dummy <- as.data.frame(predict(tmp, sample1))
1行目dummyVarsの“~.” ……このニョロニョロの後ろの “.”(ピリオド) は、全ての質的変数を対象とすることを意味します。
data=sample1……対象とするデータはsample1ですよ!の意味です。
2行目は1行目で作成したtmpを使って、sample1の質的変数をダミー変数に変換して、sample1.dummyに格納します。
#ランダムフォレストで使用するデータ - Titanics.rpart - Titanic - Titanichはtraingが統計処理されたデータでありこの演習には不向き - cordataは、グラフィック用に処理されたデータでありtrainのPclasswを3区分したり、sexを2区分するなど一部質的化したが、Fareh・年齢は量的データのままであり、氏名はそのままであり、欠落のあるデータは補完してある。 - ダミー変数ummy_varn等はカテゴリーデータをintegerデータに置き換えたものであり以下の論点に合わないらしいので使わない - lldataを使っても良いが、(makedummies()を使用してダミー変数)を実施する前のdumとnot_dum結合した、 - train2を使用する
#randomForestではCharacterは使わないようにしよう http://ushi-goroshi.hatenablog.com/entry/2019/01/30/171259
library(car)
## Loading required package: carData
library(caret)
## Loading required package: lattice
## Loading required package: ggplot2
library(cluster)
library(dummies)
## dummies-1.5.6 provided by Decision Patterns
library(data.table)
library(dplyr)
##
## Attaching package: 'dplyr'
## The following objects are masked from 'package:data.table':
##
## between, first, last
## The following object is masked from 'package:car':
##
## recode
## The following objects are masked from 'package:stats':
##
## filter, lag
## The following objects are masked from 'package:base':
##
## intersect, setdiff, setequal, union
library(e1071)
library(epitools)
library(effects)
## Registered S3 methods overwritten by 'lme4':
## method from
## cooks.distance.influence.merMod car
## influence.merMod car
## dfbeta.influence.merMod car
## dfbetas.influence.merMod car
## Use the command
## lattice::trellis.par.set(effectsTheme())
## to customize lattice options for effects plots.
## See ?effectsTheme for details.
library(ggplot2)
library(ggthemes)
library(randomForest)
## randomForest 4.6-14
## Type rfNews() to see new features/changes/bug fixes.
##
## Attaching package: 'randomForest'
## The following object is masked from 'package:dplyr':
##
## combine
## The following object is masked from 'package:ggplot2':
##
## margin
library(ranger)
##
## Attaching package: 'ranger'
## The following object is masked from 'package:randomForest':
##
## importance
library(rgl)
library(rattle)
## Rattle: A free graphical interface for data science with R.
## バージョン 5.3.0 Copyright (c) 2006-2018 Togaware Pty Ltd.
## 'rattle()' と入力して、データを多角的に分析します。
##
## Attaching package: 'rattle'
## The following object is masked from 'package:ranger':
##
## importance
## The following object is masked from 'package:randomForest':
##
## importance
library(readr)
library(rpart.plot)
## Loading required package: rpart
library(rpart)
library(readr)
library(reshape)
##
## Attaching package: 'reshape'
## The following object is masked from 'package:dplyr':
##
## rename
## The following object is masked from 'package:data.table':
##
## melt
library(rsconnect)
library(reshape2)
##
## Attaching package: 'reshape2'
## The following objects are masked from 'package:reshape':
##
## colsplit, melt, recast
## The following objects are masked from 'package:data.table':
##
## dcast, melt
library(tidyr)
##
## Attaching package: 'tidyr'
## The following object is masked from 'package:reshape2':
##
## smiths
## The following objects are masked from 'package:reshape':
##
## expand, smiths
library(xtable)
library(nnet)
library(stargazer)
##
## Please cite as:
## Hlavac, Marek (2018). stargazer: Well-Formatted Regression and Summary Statistics Tables.
## R package version 5.2.2. https://CRAN.R-project.org/package=stargazer
library(tidyverse)
## -- Attaching packages ------------------------------------------------ tidyverse 1.3.0 --
## √ tibble 3.0.0 √ stringr 1.4.0
## √ purrr 0.3.3 √ forcats 0.5.0
## -- Conflicts --------------------------------------------------- tidyverse_conflicts() --
## x dplyr::between() masks data.table::between()
## x randomForest::combine() masks dplyr::combine()
## x tidyr::expand() masks reshape::expand()
## x dplyr::filter() masks stats::filter()
## x dplyr::first() masks data.table::first()
## x dplyr::lag() masks stats::lag()
## x dplyr::last() masks data.table::last()
## x purrr::lift() masks caret::lift()
## x randomForest::margin() masks ggplot2::margin()
## x dplyr::recode() masks car::recode()
## x reshape::rename() masks dplyr::rename()
## x purrr::some() masks car::some()
## x purrr::transpose() masks data.table::transpose()
require(ranger)
library(makedummies)
# 下水道データ読み込み# 基本統計量表示 gesui # 教科書ではlogit
gesui = read_csv("osui.csv")
## Parsed with column specification:
## cols(
## OBJECTID = col_double(),
## sys_name = col_double(),
## slope = col_double(),
## uedokaburi = col_double(),
## masuhonsuu = col_double(),
## long = col_double(),
## kubun = col_double(),
## did = col_double(),
## kouhou = col_double(),
## nendo = col_double(),
## ekijyouka = col_double(),
## kyouyounensuu = col_double(),
## kansyu = col_double(),
## kei = col_double(),
## kinkyuudo = col_double(),
## taisyo = col_double()
## )
gesui<- data.frame(gesui) # 教科書ではlogit
#OBJECTID列をデータから削除
gesui <- gesui[-1:-2]
stargazer(as.data.frame(gesui),type = "html")
| Statistic | N | Mean | St. Dev. | Min | Pctl(25) | Pctl(75) | Max |
| slope | 1,423 | 3.437 | 2.323 | -6 | 1.8 | 4.5 | 10 |
| uedokaburi | 1,423 | 4.371 | 2.475 | 0.360 | 2.727 | 4.949 | 13.863 |
| masuhonsuu | 1,423 | 1.338 | 1.808 | 0 | 0 | 2 | 13 |
| long | 1,423 | 35.129 | 18.972 | 0.970 | 21.445 | 46.510 | 196.280 |
| kubun | 1,423 | 1.204 | 0.403 | 1 | 1 | 1 | 2 |
| did | 1,423 | 0.696 | 0.460 | 0 | 0 | 1 | 1 |
| kouhou | 1,423 | 0.415 | 0.493 | 0 | 0 | 1 | 1 |
| nendo | 1,423 | 1,982.967 | 5.973 | 1,974 | 1,978 | 1,990 | 2,006 |
| ekijyouka | 1,423 | 0.396 | 0.611 | 0 | 0 | 1 | 4 |
| kyouyounensuu | 1,423 | 33.033 | 5.973 | 10 | 26 | 38 | 42 |
| kansyu | 1,423 | 1.198 | 0.399 | 1 | 1 | 1 | 2 |
| kei | 1,423 | 517.182 | 308.765 | 200 | 250 | 800 | 1,650 |
| kinkyuudo | 1,423 | 1.297 | 1.457 | 0 | 0 | 3 | 3 |
| taisyo | 1,423 | 0.448 | 0.497 | 0 | 0 | 1 | 1 |
gesui$kansyu <- as.factor(gesui$kansyu)
gesui$taisyo <- as.factor(gesui$taisyo)
gesui$kubun <- as.factor(gesui$kubun)
gesui$did <- as.factor(gesui$did)
gesui$ekijyouka <- as.factor(gesui$ekijyouka)
gesui$kouhou <- as.factor(gesui$kouhou)
http://ushi-goroshi.hatenablog.com/entry/2019/01/30/171259 目的変数がcharacterだと分類として扱ってくれない http://zeema.hatenablog.com/entry/2017/09/04/003400 # ダミー化したい変数をセレクト > dum <- cordata %>% select(Survived, Pclass,Sex, Embarked, Dfamsize, Title, group, Wom_chd) # ダミー化しない変数をセレクト > not_dum <- cordata %>% select(PassengerId, Name, Age, SibSp, Parch, Ticket, Fare, Cabin, famsize) # makedummies()を使用してダミー変数を作成 > dummy_var <- makedummies(dum, basal_level = FALSE)
cordata <- gesui
# ダミー化したい変数をセレクト
dum <- cordata %>% select(kansyu, kubun, did, ekijyouka, kouhou)
# ダミー化しない変数をセレクト
not_dum <- cordata %>% select(slope, uedokaburi, masuhonsuu, long, kyouyounensuu, kei, taisyo)
# makedummies()を使用してダミー変数を作成
dummy_var <- makedummies(dum, basal_level = FALSE)
# 結合する
gesui <- cbind(dummy_var, not_dum)
stargazer(as.data.frame(gesui),type = "html")
| Statistic | N | Mean | St. Dev. | Min | Pctl(25) | Pctl(75) | Max |
| kansyu | 1,423 | 0.198 | 0.399 | 0 | 0 | 0 | 1 |
| kubun | 1,423 | 0.204 | 0.403 | 0 | 0 | 0 | 1 |
| did | 1,423 | 0.696 | 0.460 | 0 | 0 | 1 | 1 |
| ekijyouka_1 | 1,423 | 0.285 | 0.452 | 0 | 0 | 1 | 1 |
| ekijyouka_2 | 1,423 | 0.050 | 0.218 | 0 | 0 | 0 | 1 |
| ekijyouka_4 | 1,423 | 0.003 | 0.053 | 0 | 0 | 0 | 1 |
| kouhou | 1,423 | 0.415 | 0.493 | 0 | 0 | 1 | 1 |
| slope | 1,423 | 3.437 | 2.323 | -6 | 1.8 | 4.5 | 10 |
| uedokaburi | 1,423 | 4.371 | 2.475 | 0.360 | 2.727 | 4.949 | 13.863 |
| masuhonsuu | 1,423 | 1.338 | 1.808 | 0 | 0 | 2 | 13 |
| long | 1,423 | 35.129 | 18.972 | 0.970 | 21.445 | 46.510 | 196.280 |
| kyouyounensuu | 1,423 | 33.033 | 5.973 | 10 | 26 | 38 | 42 |
| kei | 1,423 | 517.182 | 308.765 | 200 | 250 | 800 | 1,650 |
exclude_cols = c("OBJECTID","kinkyuudo")
gesui = gesui[ !names(gesui) %in% exclude_cols ]
model = randomForest(taisyo ~ ., data = gesui, importance = TRUE)
model
##
## Call:
## randomForest(formula = taisyo ~ ., data = gesui, importance = TRUE)
## Type of random forest: classification
## Number of trees: 500
## No. of variables tried at each split: 3
##
## OOB estimate of error rate: 25.79%
## Confusion matrix:
## 0 1 class.error
## 0 638 148 0.1882952
## 1 219 418 0.3437991
predition = predict(model, gesui)
model$importance
## 0 1 MeanDecreaseAccuracy MeanDecreaseGini
## kansyu 4.233214e-02 0.0189491159 0.0318739036 17.8666437
## kubun 1.287404e-02 0.0051269519 0.0093787262 11.6992013
## did 1.242598e-02 0.0328468558 0.0215384255 18.2008846
## ekijyouka_1 2.280584e-02 0.0133320809 0.0185718149 17.5399447
## ekijyouka_2 2.542344e-03 0.0062433939 0.0042011843 8.1715758
## ekijyouka_4 -8.997764e-05 -0.0002070925 -0.0001412558 0.4825711
## kouhou 4.662527e-03 0.0218314541 0.0123295764 13.6776431
## slope 2.967735e-02 0.0247092254 0.0274431153 103.1305249
## uedokaburi 1.544906e-02 0.0505868613 0.0311946011 117.4479910
## masuhonsuu 8.917230e-03 0.0050808791 0.0071816455 39.4057850
## long 1.598637e-02 0.0314113622 0.0229602062 106.9996257
## kyouyounensuu 7.683942e-02 0.0611672393 0.0697890455 85.9681105
## kei 3.835561e-02 0.0518697734 0.0443317643 57.8421030
varImpPlot(model)
predition
## 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
## 0 1 1 0 1 0 1 1 1 1 0 0 1 1 0 0
## 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32
## 0 1 1 0 0 1 1 1 1 0 0 1 1 0 1 1
## 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48
## 1 1 0 0 1 1 1 1 1 1 1 1 1 1 1 1
## 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64
## 1 0 1 1 1 1 0 0 0 1 0 1 1 1 0 0
## 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80
## 0 1 1 1 1 0 0 1 0 0 1 1 0 0 1 0
## 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96
## 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0
## 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112
## 0 0 1 0 0 0 0 1 0 0 0 1 0 0 1 0
## 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128
## 1 0 1 0 0 1 0 1 0 0 0 1 0 0 0 0
## 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144
## 1 0 0 0 0 0 1 0 1 1 1 1 0 0 0 0
## 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160
## 0 0 0 1 0 0 0 0 0 0 0 0 1 0 0 0
## 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176
## 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0
## 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192
## 0 0 0 0 0 1 1 1 0 1 0 0 0 0 0 0
## 193 194 195 196 197 198 199 200 201 202 203 204 205 206 207 208
## 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 1
## 209 210 211 212 213 214 215 216 217 218 219 220 221 222 223 224
## 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0
## 225 226 227 228 229 230 231 232 233 234 235 236 237 238 239 240
## 0 0 0 0 0 0 1 1 0 1 1 1 1 1 1 0
## 241 242 243 244 245 246 247 248 249 250 251 252 253 254 255 256
## 0 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1
## 257 258 259 260 261 262 263 264 265 266 267 268 269 270 271 272
## 1 1 1 1 1 1 0 1 1 0 0 1 1 1 1 1
## 273 274 275 276 277 278 279 280 281 282 283 284 285 286 287 288
## 0 0 0 1 1 1 1 0 1 1 1 0 0 0 0 0
## 289 290 291 292 293 294 295 296 297 298 299 300 301 302 303 304
## 0 0 0 0 0 0 0 1 1 1 0 1 0 0 0 1
## 305 306 307 308 309 310 311 312 313 314 315 316 317 318 319 320
## 0 1 0 0 0 0 0 0 1 0 0 0 0 1 1 1
## 321 322 323 324 325 326 327 328 329 330 331 332 333 334 335 336
## 1 1 1 1 1 0 0 0 0 0 0 0 1 1 1 1
## 337 338 339 340 341 342 343 344 345 346 347 348 349 350 351 352
## 1 1 0 1 0 1 1 1 1 1 0 1 1 1 0 1
## 353 354 355 356 357 358 359 360 361 362 363 364 365 366 367 368
## 1 1 1 0 1 1 0 0 0 0 0 1 1 1 1 1
## 369 370 371 372 373 374 375 376 377 378 379 380 381 382 383 384
## 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0
## 385 386 387 388 389 390 391 392 393 394 395 396 397 398 399 400
## 0 1 1 1 1 1 1 1 0 1 1 0 1 1 1 1
## 401 402 403 404 405 406 407 408 409 410 411 412 413 414 415 416
## 1 0 1 1 1 0 1 0 0 0 0 0 0 0 0 0
## 417 418 419 420 421 422 423 424 425 426 427 428 429 430 431 432
## 0 0 0 1 0 1 1 0 1 0 1 1 1 1 1 1
## 433 434 435 436 437 438 439 440 441 442 443 444 445 446 447 448
## 1 1 1 1 0 1 1 1 1 1 1 1 1 0 1 1
## 449 450 451 452 453 454 455 456 457 458 459 460 461 462 463 464
## 0 1 0 0 1 1 1 1 1 1 1 1 1 0 0 0
## 465 466 467 468 469 470 471 472 473 474 475 476 477 478 479 480
## 0 1 1 0 0 0 1 0 1 1 0 0 0 0 0 1
## 481 482 483 484 485 486 487 488 489 490 491 492 493 494 495 496
## 1 1 1 1 1 1 1 1 0 0 1 1 1 1 1 0
## 497 498 499 500 501 502 503 504 505 506 507 508 509 510 511 512
## 1 1 1 0 1 0 1 1 1 0 1 0 1 0 1 1
## 513 514 515 516 517 518 519 520 521 522 523 524 525 526 527 528
## 1 1 0 1 1 0 0 0 0 1 1 1 1 1 1 1
## 529 530 531 532 533 534 535 536 537 538 539 540 541 542 543 544
## 1 1 0 1 1 1 1 1 1 1 1 1 1 0 1 1
## 545 546 547 548 549 550 551 552 553 554 555 556 557 558 559 560
## 0 1 0 0 1 0 0 1 0 0 0 1 0 1 0 0
## 561 562 563 564 565 566 567 568 569 570 571 572 573 574 575 576
## 0 0 0 0 0 1 1 1 0 0 1 0 0 0 0 0
## 577 578 579 580 581 582 583 584 585 586 587 588 589 590 591 592
## 0 0 1 0 0 0 1 0 1 1 0 1 1 0 0 1
## 593 594 595 596 597 598 599 600 601 602 603 604 605 606 607 608
## 0 0 1 0 0 0 0 0 0 0 0 0 1 1 1 0
## 609 610 611 612 613 614 615 616 617 618 619 620 621 622 623 624
## 0 0 0 1 1 1 1 1 1 1 1 1 1 1 1 1
## 625 626 627 628 629 630 631 632 633 634 635 636 637 638 639 640
## 1 1 1 1 0 1 0 0 1 0 1 0 0 0 0 1
## 641 642 643 644 645 646 647 648 649 650 651 652 653 654 655 656
## 1 0 1 1 0 0 1 1 0 0 0 1 0 1 1 1
## 657 658 659 660 661 662 663 664 665 666 667 668 669 670 671 672
## 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0
## 673 674 675 676 677 678 679 680 681 682 683 684 685 686 687 688
## 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0
## 689 690 691 692 693 694 695 696 697 698 699 700 701 702 703 704
## 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0
## 705 706 707 708 709 710 711 712 713 714 715 716 717 718 719 720
## 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0
## 721 722 723 724 725 726 727 728 729 730 731 732 733 734 735 736
## 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0
## 737 738 739 740 741 742 743 744 745 746 747 748 749 750 751 752
## 0 0 0 1 1 0 1 0 0 0 1 0 0 0 0 0
## 753 754 755 756 757 758 759 760 761 762 763 764 765 766 767 768
## 0 0 1 1 0 0 1 0 0 0 0 0 0 0 1 0
## 769 770 771 772 773 774 775 776 777 778 779 780 781 782 783 784
## 0 0 0 1 0 0 0 0 1 0 0 0 0 0 0 1
## 785 786 787 788 789 790 791 792 793 794 795 796 797 798 799 800
## 0 1 0 0 0 0 0 0 0 0 0 1 1 1 0 1
## 801 802 803 804 805 806 807 808 809 810 811 812 813 814 815 816
## 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1
## 817 818 819 820 821 822 823 824 825 826 827 828 829 830 831 832
## 1 1 0 1 0 0 0 1 0 0 0 0 1 0 0 1
## 833 834 835 836 837 838 839 840 841 842 843 844 845 846 847 848
## 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1
## 849 850 851 852 853 854 855 856 857 858 859 860 861 862 863 864
## 0 0 0 1 1 1 0 1 1 0 0 0 0 0 0 1
## 865 866 867 868 869 870 871 872 873 874 875 876 877 878 879 880
## 0 0 0 0 1 1 0 0 0 1 0 1 1 0 0 0
## 881 882 883 884 885 886 887 888 889 890 891 892 893 894 895 896
## 0 0 1 0 1 1 0 1 1 1 1 1 1 1 0 0
## 897 898 899 900 901 902 903 904 905 906 907 908 909 910 911 912
## 0 1 0 0 1 1 1 1 0 0 1 1 1 1 1 1
## 913 914 915 916 917 918 919 920 921 922 923 924 925 926 927 928
## 1 1 1 1 1 1 1 0 1 1 1 0 1 1 0 0
## 929 930 931 932 933 934 935 936 937 938 939 940 941 942 943 944
## 1 1 1 0 1 1 1 0 0 0 1 1 1 0 1 0
## 945 946 947 948 949 950 951 952 953 954 955 956 957 958 959 960
## 1 0 0 0 1 1 0 0 0 0 0 0 0 0 0 0
## 961 962 963 964 965 966 967 968 969 970 971 972 973 974 975 976
## 0 0 0 1 0 1 1 0 1 0 0 0 0 0 0 1
## 977 978 979 980 981 982 983 984 985 986 987 988 989 990 991 992
## 0 0 0 0 0 0 0 1 0 0 0 1 1 1 0 1
## 993 994 995 996 997 998 999 1000 1001 1002 1003 1004 1005 1006 1007 1008
## 1 1 1 1 1 1 0 1 1 1 0 1 0 0 0 1
## 1009 1010 1011 1012 1013 1014 1015 1016 1017 1018 1019 1020 1021 1022 1023 1024
## 1 1 1 1 1 1 1 1 1 0 0 1 0 1 1 0
## 1025 1026 1027 1028 1029 1030 1031 1032 1033 1034 1035 1036 1037 1038 1039 1040
## 0 0 0 1 0 1 0 0 0 0 0 0 0 0 1 0
## 1041 1042 1043 1044 1045 1046 1047 1048 1049 1050 1051 1052 1053 1054 1055 1056
## 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0
## 1057 1058 1059 1060 1061 1062 1063 1064 1065 1066 1067 1068 1069 1070 1071 1072
## 0 0 0 1 0 0 1 1 0 1 0 0 0 1 1 1
## 1073 1074 1075 1076 1077 1078 1079 1080 1081 1082 1083 1084 1085 1086 1087 1088
## 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1
## 1089 1090 1091 1092 1093 1094 1095 1096 1097 1098 1099 1100 1101 1102 1103 1104
## 1 1 1 0 0 1 1 1 1 1 1 1 1 1 1 1
## 1105 1106 1107 1108 1109 1110 1111 1112 1113 1114 1115 1116 1117 1118 1119 1120
## 1 1 1 0 1 0 0 0 0 1 1 1 1 1 1 0
## 1121 1122 1123 1124 1125 1126 1127 1128 1129 1130 1131 1132 1133 1134 1135 1136
## 1 1 1 0 0 1 1 1 1 1 1 1 0 0 1 0
## 1137 1138 1139 1140 1141 1142 1143 1144 1145 1146 1147 1148 1149 1150 1151 1152
## 0 0 1 1 1 0 0 0 0 0 0 1 1 0 0 1
## 1153 1154 1155 1156 1157 1158 1159 1160 1161 1162 1163 1164 1165 1166 1167 1168
## 1 0 1 0 0 1 0 0 0 0 0 0 0 1 1 0
## 1169 1170 1171 1172 1173 1174 1175 1176 1177 1178 1179 1180 1181 1182 1183 1184
## 0 0 0 0 0 0 0 1 0 0 0 0 0 1 0 1
## 1185 1186 1187 1188 1189 1190 1191 1192 1193 1194 1195 1196 1197 1198 1199 1200
## 0 0 0 1 1 1 0 0 0 1 0 1 0 0 0 1
## 1201 1202 1203 1204 1205 1206 1207 1208 1209 1210 1211 1212 1213 1214 1215 1216
## 0 1 1 1 1 1 1 0 0 0 1 1 1 1 1 0
## 1217 1218 1219 1220 1221 1222 1223 1224 1225 1226 1227 1228 1229 1230 1231 1232
## 0 0 1 0 0 0 0 0 0 1 1 0 0 0 0 0
## 1233 1234 1235 1236 1237 1238 1239 1240 1241 1242 1243 1244 1245 1246 1247 1248
## 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1
## 1249 1250 1251 1252 1253 1254 1255 1256 1257 1258 1259 1260 1261 1262 1263 1264
## 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1
## 1265 1266 1267 1268 1269 1270 1271 1272 1273 1274 1275 1276 1277 1278 1279 1280
## 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0
## 1281 1282 1283 1284 1285 1286 1287 1288 1289 1290 1291 1292 1293 1294 1295 1296
## 1 1 0 0 0 0 0 0 0 1 0 1 0 0 0 1
## 1297 1298 1299 1300 1301 1302 1303 1304 1305 1306 1307 1308 1309 1310 1311 1312
## 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0
## 1313 1314 1315 1316 1317 1318 1319 1320 1321 1322 1323 1324 1325 1326 1327 1328
## 0 0 0 1 1 0 0 0 0 0 0 0 0 0 0 1
## 1329 1330 1331 1332 1333 1334 1335 1336 1337 1338 1339 1340 1341 1342 1343 1344
## 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0
## 1345 1346 1347 1348 1349 1350 1351 1352 1353 1354 1355 1356 1357 1358 1359 1360
## 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0
## 1361 1362 1363 1364 1365 1366 1367 1368 1369 1370 1371 1372 1373 1374 1375 1376
## 0 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0
## 1377 1378 1379 1380 1381 1382 1383 1384 1385 1386 1387 1388 1389 1390 1391 1392
## 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0
## 1393 1394 1395 1396 1397 1398 1399 1400 1401 1402 1403 1404 1405 1406 1407 1408
## 0 1 0 0 0 0 1 0 1 1 0 0 0 0 0 0
## 1409 1410 1411 1412 1413 1414 1415 1416 1417 1418 1419 1420 1421 1422 1423
## 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0
## Levels: 0 1
#gesui$kinkyuudo <- as.factor(gesui$kinkyuudo)
names(gesui)
## [1] "kansyu" "kubun" "did" "ekijyouka_1"
## [5] "ekijyouka_2" "ekijyouka_4" "kouhou" "slope"
## [9] "uedokaburi" "masuhonsuu" "long" "kyouyounensuu"
## [13] "kei" "taisyo"
train <- gesui[1:300,]
test <- gesui[301:478,]
model1 = randomForest(taisyo ~ ., data = gesui)
model1
##
## Call:
## randomForest(formula = taisyo ~ ., data = gesui)
## Type of random forest: classification
## Number of trees: 500
## No. of variables tried at each split: 3
##
## OOB estimate of error rate: 25.72%
## Confusion matrix:
## 0 1 class.error
## 0 637 149 0.1895674
## 1 217 420 0.3406593
model2 = randomForest(taisyo ~ ., data = train)
model2
##
## Call:
## randomForest(formula = taisyo ~ ., data = train)
## Type of random forest: classification
## Number of trees: 500
## No. of variables tried at each split: 3
##
## OOB estimate of error rate: 25.33%
## Confusion matrix:
## 0 1 class.error
## 0 127 34 0.2111801
## 1 42 97 0.3021583
#参考資料ではimportance(model1)で変数の重みが算出される事になっているが、実際にはmodel1$importancedでないと 算出できない。
model1$importance
## MeanDecreaseGini
## kansyu 18.068534
## kubun 11.563438
## did 17.674304
## ekijyouka_1 17.577241
## ekijyouka_2 8.306091
## ekijyouka_4 0.536363
## kouhou 13.759117
## slope 104.437746
## uedokaburi 116.318594
## masuhonsuu 38.296165
## long 108.454028
## kyouyounensuu 87.220616
## kei 58.565176
model2$importance
## MeanDecreaseGini
## kansyu 4.641374
## kubun 2.687339
## did 5.118726
## ekijyouka_1 5.059292
## ekijyouka_2 4.475664
## ekijyouka_4 0.000000
## kouhou 2.647003
## slope 22.368325
## uedokaburi 24.359465
## masuhonsuu 10.020120
## long 27.845245
## kyouyounensuu 16.267887
## kei 11.603067
varImpPlot(model1)
varImpPlot(model2)
ランダムフォレストチューニング(データ検証)
http://sfchaos.hatenablog.com/entry/20150628/p1
#注1:set.seed(123)乱数発生 ttps://qiita.com/aich_08_/items/6d885c91c9d461514018
まずは単純にtuneRF関数を実行してみる まずは特別な設定を行わずにtuneRF関数を実行してみよう.tuneRF関数の第1引数には説明変数,第2引数には目的変数を指定する.また,doBest引数をTRUEに指定すると,評価が最も良いモデルを返すようになる.
dim(gesui)
## [1] 1423 14
sapply(gesui, class)
## kansyu kubun did ekijyouka_1 ekijyouka_2
## "integer" "integer" "integer" "integer" "integer"
## ekijyouka_4 kouhou slope uedokaburi masuhonsuu
## "integer" "integer" "numeric" "numeric" "numeric"
## long kyouyounensuu kei taisyo
## "numeric" "numeric" "numeric" "factor"
head(gesui)
## kansyu kubun did ekijyouka_1 ekijyouka_2 ekijyouka_4 kouhou slope uedokaburi
## 1 0 0 1 1 0 0 1 0.00 3.758000
## 2 0 1 1 1 0 0 0 3.90 3.763000
## 3 0 1 1 1 0 0 0 1.32 3.538794
## 4 1 1 1 1 0 0 0 1.22 1.054575
## 5 1 1 1 0 0 0 0 2.50 1.533001
## 6 0 0 1 1 0 0 1 3.50 4.122386
## masuhonsuu long kyouyounensuu kei taisyo
## 1 0 9.94 33 1100 0
## 2 0 15.40 40 800 1
## 3 1 14.85 40 250 1
## 4 1 3.39 12 200 0
## 5 0 7.78 28 250 1
## 6 0 9.75 40 1100 0
set.seed(123)#注1
gesui.tune <- tuneRF(gesui %>% select(-taisyo) ,# 説明変数
gesui$taisyo, # 目的変数
doBest = T) #分岐に使う変数の数(mtry)を求めるフラグ
## mtry = 3 OOB error = 26.91%
## Searching left ...
## mtry = 2 OOB error = 27.97%
## -0.03916449 0.05
## Searching right ...
## mtry = 6 OOB error = 26.63%
## 0.01044386 0.05
この結果,特徴量の個数が
3個のときに,Out-of-Bag誤差(OOB error)は7.11% 6個のときに,Out-of-Bag誤差は6.698%、 2個のときに,Out-of-Bag誤差は6.28%、 1個のときに,Out-of-Bag誤差は5.868%、
となり,特徴量の個数が3個のときにOut-of-Bag誤差が最少となり, この個数に設定するのが良さそうであることがわかる*1
構築する決定木の個数を増やしてみる ntreeTry引数はデフォルトでは50となっており,50個の決定木を構築することがわかる.500個の決定木を構築するように指定してみよう.
set.seed(123)#注1
gesui.tune <- tuneRF(gesui %>% select(-taisyo) ,# 説明変数
gesui$taisyo, # 目的変数
ntreeTry=500, #決定木数
trace = TRUE,
doBest = T)
## mtry = 3 OOB error = 26%
## Searching left ...
## mtry = 2 OOB error = 27.06%
## -0.04054054 0.05
## Searching right ...
## mtry = 6 OOB error = 27.2%
## -0.04594595 0.05
3個のときに,Out-of-Bag誤差(OOB error)が最大であることは変わらない
チューニングで求めたmtry(tuneRF()結果を、オブジェクトの$mtryに入っています)はこの関数の引数に代入します。
gesui.rf <- randomForest( # 予測、分類器の構築
taisyo ~ ., # モデル式
data = gesui, # データ
mtry = gesui.tune$mtry) # 分岐に使う変数の数
gesui.rf
##
## Call:
## randomForest(formula = taisyo ~ ., data = gesui, mtry = gesui.tune$mtry)
## Type of random forest: classification
## Number of trees: 500
## No. of variables tried at each split: 3
##
## OOB estimate of error rate: 26.21%
## Confusion matrix:
## 0 1 class.error
## 0 632 154 0.1959288
## 1 219 418 0.3437991
x=gesui.rf$importance
出力結果の読み方 OOB estimate of error rate:誤判別率 Confusion matrix:縦軸が予測数、横軸が実際の数。下の例では”0”(緊急度3以下)と478個予測したうち、実際に”0”だったものが450個、“1”だったものが28個と読み取れます。
#重要度順のグラフを出力
rank <- data.frame(x) # 重要度のリストをデータフレームに変換
rank$factor <- rownames(rank) # 行名になっている要因をデータフレームに追加
rank <- rank[order(rank[,1], decreasing=T),] # 重要度(偏回帰係数的なもの)順に並び替え
rownames(rank) <- 1:nrow(rank) # ランキングを行名にする
rank
## MeanDecreaseGini factor
## 1 116.4457660 uedokaburi
## 2 107.4025665 long
## 3 103.7218818 slope
## 4 87.5391129 kyouyounensuu
## 5 58.9119751 kei
## 6 38.9347577 masuhonsuu
## 7 18.2252375 kansyu
## 8 17.9408333 did
## 9 16.6045116 ekijyouka_1
## 10 12.9843072 kouhou
## 11 11.1536215 kubun
## 12 8.7862046 ekijyouka_2
## 13 0.4363132 ekijyouka_4
重要度順のグラフを出力
varImpPlot(gesui.rf)
#plot(gesui, col=c(2, 3, 4)[gesui$kionkyudo])
plot(gesui, col=c(2, 3)[gesui$taisyo])
-下水道データ読み込み# 基本統計量表示 gesui # 教科書ではlogit
gesui = read_csv("gesuidou.csv")
## Parsed with column specification:
## cols(
## OBJECTID = col_double(),
## slope = col_double(),
## long = col_double(),
## uedokaburi = col_double(),
## sitadokaburi = col_double(),
## masuhonsuu = col_double(),
## nendo = col_double(),
## kei = col_double(),
## kubun = col_double(),
## did = col_double(),
## kouhou = col_double(),
## ekijyouka = col_double(),
## kansyu = col_double(),
## kinkyuudo = col_double(),
## taisyo = col_double()
## )
gesui<- data.frame(gesui) # 教科書ではlogit
gesui$kansyu <- as.factor(gesui$kansyu)
gesui$taisyo <- as.factor(gesui$taisyo)
gesui$kubun <- as.factor(gesui$kubun)
gesui$did <- as.factor(gesui$did)
gesui$ekijyouka <- as.factor(gesui$ekijyouka)
#gesui <- gesui[-1] #OBJECTID列をデータから削除
exclude_cols = c("OBJECTID","sys_name")
gesui = gesui[ !names(gesui) %in% exclude_cols ]
set.seed(123)#注1
gesui.tune <- tuneRF(gesui %>% select(-kinkyuudo) ,# 説明変数
gesui$kinkyuudo, # 目的変数
ntreeTry=500, #決定木数
trace = TRUE,
doBest = T)
## Warning in randomForest.default(x, y, mtry = mtryStart, ntree = ntreeTry, :
## The response has five or fewer unique values. Are you sure you want to do
## regression?
## mtry = 4 OOB error = 1.480305
## Searching left ...
## Warning in randomForest.default(x, y, mtry = mtryCur, ntree = ntreeTry, :
## The response has five or fewer unique values. Are you sure you want to do
## regression?
## mtry = 2 OOB error = 1.486863
## -0.004430167 0.05
## Searching right ...
## Warning in randomForest.default(x, y, mtry = mtryCur, ntree = ntreeTry, :
## The response has five or fewer unique values. Are you sure you want to do
## regression?
## mtry = 8 OOB error = 1.519686
## -0.02660376 0.05
## Warning in randomForest.default(x, y, mtry = res[which.min(res[, 2]), 1], :
## The response has five or fewer unique values. Are you sure you want to do
## regression?
3個のときに,Out-of-Bag誤差(OOB error)が最大であることは変わらない
チューニングで求めたmtry(tuneRF()結果を、オブジェクトの$mtryに入っています)はこの関数の引数に代入します。
gesui.rf <- randomForest( # 予測、分類器の構築
kinkyuudo ~ ., # モデル式
data = gesui, # データ
mtry = gesui.tune$mtry) # 分岐に使う変数の数
## Warning in randomForest.default(m, y, ...): The response has five or fewer
## unique values. Are you sure you want to do regression?
gesui.rf
##
## Call:
## randomForest(formula = kinkyuudo ~ ., data = gesui, mtry = gesui.tune$mtry)
## Type of random forest: regression
## Number of trees: 500
## No. of variables tried at each split: 4
##
## Mean of squared residuals: 1.476213
## % Var explained: 30.78
x=gesui.rf$importance
出力結果の読み方 OOB estimate of error rate:誤判別率 Confusion matrix:縦軸が予測数、横軸が実際の数。 上の例では正解率69.04% ”0”(緊急度3以下)と218個予測したうち、実際に”0”だったものが162個、“2”だったものが2個、“3”だったものが54と読み取れます。
#重要度順のグラフを出力
rank <- data.frame(x) # 重要度のリストをデータフレームに変換
rank$factor <- rownames(rank) # 行名になっている要因をデータフレームに追加
rank <- rank[order(rank[,1], decreasing=T),] # 重要度(偏回帰係数的なもの)順に並び替え
rownames(rank) <- 1:nrow(rank) # ランキングを行名にする
rank
## IncNodePurity factor
## 1 137.118695 long
## 2 132.486026 sitadokaburi
## 3 130.789376 slope
## 4 124.661796 uedokaburi
## 5 116.354171 kansyu
## 6 69.429390 nendo
## 7 62.659995 kei
## 8 47.334756 masuhonsuu
## 9 38.187225 ekijyouka
## 10 19.239761 did
## 11 14.344117 kouhou
## 12 9.550413 taisyo
## 13 5.626238 kubun
重要度順のグラフを出力
varImpPlot(gesui.rf)
plot(gesui, col=c(2, 3, 4)[gesui$kionkyudo])
-下水道データ読み込み# 基本統計量表示 gesui # 教科書ではlogit
#gesui = read_csv("osui2.csv")
gesui = read_csv("enbi.csv")
## Parsed with column specification:
## cols(
## OBJECTID = col_double(),
## sys_name = col_double(),
## slope = col_double(),
## uedokaburi = col_double(),
## masuhonsuu = col_double(),
## long = col_double(),
## kubun = col_double(),
## did = col_double(),
## kouhou = col_double(),
## nendo = col_double(),
## ekijyouka = col_double(),
## kyouyounensuu = col_double(),
## kansyu = col_double(),
## kei = col_double(),
## kinkyuudo = col_double(),
## taisyo = col_double()
## )
gesui <- data.frame(gesui) # 教科書ではlogit
#testデータの行番号取得
#randomgesui<-sample(282,200)
#train <- gesui[randomgesui,]
#test <-gesui[-randomgesui,]
#cat(test$sys_name, file = "testrow.txt",append=FALSE)
#write.table(test,"testoutput.txt", quote=F,
# col.names=T, append=T)
gesui <- gesui[-1:-2] #OBJECTID,sys_name列をデータから削除
gesui <- gesui[-13]
gesui <- gesui[-8]
gesui <- gesui[-10]
gesui2 <- gesui
stargazer(as.data.frame(gesui),type = "html")
| Statistic | N | Mean | St. Dev. | Min | Pctl(25) | Pctl(75) | Max |
| slope | 282 | 3.309 | 2.017 | 0.000 | 1.900 | 4.100 | 9.900 |
| uedokaburi | 282 | 4.218 | 2.570 | 1.009 | 2.462 | 5.397 | 13.385 |
| masuhonsuu | 282 | 1.284 | 1.765 | 0 | 0 | 2 | 11 |
| long | 282 | 31.300 | 15.309 | 0.970 | 21.325 | 40.492 | 96.820 |
| kubun | 282 | 1.209 | 0.407 | 1 | 1 | 1 | 2 |
| did | 282 | 0.766 | 0.424 | 0 | 1 | 1 | 1 |
| kouhou | 282 | 0.337 | 0.473 | 0 | 0 | 1 | 1 |
| ekijyouka | 282 | 0.202 | 0.402 | 0 | 0 | 0 | 1 |
| kyouyounensuu | 282 | 27.514 | 5.204 | 10 | 25 | 27 | 40 |
| kei | 282 | 390.248 | 162.287 | 200 | 250 | 600 | 900 |
| taisyo | 282 | 0.312 | 0.464 | 0 | 0 | 1 | 1 |
gesui$taisyo <- as.factor(gesui$taisyo)
#gesui$kansyu <- as.factor(gesui$kansyu)
gesui$kubun <- as.factor(gesui$kubun)
gesui$did <- as.factor(gesui$did)
gesui$ekijyouka <- as.factor(gesui$ekijyouka)
#gesui$kinkyuudo <- as.factor(gesui$kinkyuudo)
sapply(gesui, class)
## slope uedokaburi masuhonsuu long kubun
## "numeric" "numeric" "numeric" "numeric" "factor"
## did kouhou ekijyouka kyouyounensuu kei
## "factor" "numeric" "factor" "numeric" "numeric"
## taisyo
## "factor"
summary(gesui)
## slope uedokaburi masuhonsuu long kubun
## Min. :0.000 Min. : 1.009 Min. : 0.000 Min. : 0.97 1:223
## 1st Qu.:1.900 1st Qu.: 2.462 1st Qu.: 0.000 1st Qu.:21.32 2: 59
## Median :2.685 Median : 3.402 Median : 1.000 Median :30.06
## Mean :3.309 Mean : 4.218 Mean : 1.284 Mean :31.30
## 3rd Qu.:4.100 3rd Qu.: 5.397 3rd Qu.: 2.000 3rd Qu.:40.49
## Max. :9.900 Max. :13.385 Max. :11.000 Max. :96.82
## did kouhou ekijyouka kyouyounensuu kei taisyo
## 0: 66 Min. :0.0000 0:225 Min. :10.00 Min. :200.0 0:194
## 1:216 1st Qu.:0.0000 1: 57 1st Qu.:25.00 1st Qu.:250.0 1: 88
## Median :0.0000 Median :25.00 Median :250.0
## Mean :0.3369 Mean :27.51 Mean :390.2
## 3rd Qu.:1.0000 3rd Qu.:27.00 3rd Qu.:600.0
## Max. :1.0000 Max. :40.00 Max. :900.0
cordata <- gesui
# ダミー化したい変数をセレクト
dum <- cordata %>% select( kubun, did, ekijyouka, kouhou)
# ダミー化しない変数をセレクト
not_dum <- cordata %>% select(slope, uedokaburi, masuhonsuu, long, kyouyounensuu, kei, taisyo)
# makedummies()を使用してダミー変数を作成
dummy_var <- makedummies(dum, basal_level = FALSE)
# 結合する
gesui <- cbind(dummy_var, not_dum)
randomgesui<-sample(282,200)
train <- gesui[randomgesui,]
test <-gesui[-randomgesui,]
gesui <- train
mean(train$taisyo == 1)#trainh平均値
## [1] 0.325
mean(test$taisyo == 1)#test平均値
## [1] 0.2804878
https://shohei-doi.github.io/notes/posts/2019-05-27-cross-validation/
#変数を選択しない場合
vote_rf2 <- train(
taisyo ~ .,
data = train,
method = "rf"
)
train_rf2 <- confusionMatrix(predict(vote_rf2, train), train$taisyo)
test_rf2 <- confusionMatrix(predict(vote_rf2, test), test$taisyo)
print(str_c("Accuracy (train):", train_rf2$overall[1]))
## [1] "Accuracy (train):0.99"
print(str_c("Accuracy (test) :", test_rf2$overall[1]))
## [1] "Accuracy (test) :0.865853658536585"
gesui.rf2 <- randomForest( # 予測、分類器の構築
taisyo ~ ., # モデル式
data = train,# データ
importance = TRUE)
predrandam = predict(gesui.rf2, train)
predrandam
## 108 115 223 65 28 79 74 134 18 255 70 225 259 227 272 184 242 197 230 1
## 0 0 0 1 1 0 0 1 0 0 0 0 0 0 0 1 0 1 0 0
## 14 282 25 84 82 155 104 94 277 5 69 80 217 145 181 39 38 280 194 182
## 0 0 0 0 0 1 0 0 0 1 0 0 1 0 1 0 0 1 1 1
## 254 73 36 260 167 208 60 151 45 68 102 239 229 266 275 128 130 121 66 270
## 0 0 0 0 1 1 1 0 0 1 1 0 0 0 0 0 0 1 0 0
## 47 172 88 249 264 202 183 161 54 176 171 21 226 144 222 237 91 257 111 101
## 0 0 0 0 0 1 1 0 0 1 0 0 0 1 1 0 0 0 0 1
## 232 20 190 117 245 258 200 124 164 46 215 174 261 62 148 162 83 87 35 193
## 0 0 1 1 0 0 1 0 0 0 1 0 0 1 0 0 0 0 0 1
## 139 201 219 136 3 132 220 180 126 158 142 137 195 207 16 23 30 98 154 4
## 0 1 1 0 0 0 1 1 0 0 1 0 1 1 0 0 0 1 0 0
## 244 7 160 125 192 133 43 113 106 177 210 281 153 271 211 246 238 118 129 99
## 0 1 0 0 1 0 0 0 0 0 1 0 0 0 1 0 0 1 1 0
## 103 179 216 198 112 253 85 186 58 71 75 173 131 187 204 248 37 191 140 141
## 0 0 1 1 0 0 1 0 1 0 0 0 0 1 1 0 0 0 0 0
## 53 8 56 185 9 228 221 178 209 51 147 152 233 13 2 199 165 11 19 52
## 0 1 1 1 0 0 1 0 0 0 1 0 0 0 1 1 0 0 1 0
## 31 33 278 273 50 150 59 188 175 95 146 34 61 276 143 212 157 274 262 77
## 0 0 0 0 0 0 1 1 0 1 1 0 1 0 1 1 0 0 0 0
## Levels: 0 1
table(predrandam,train$taisyo)
##
## predrandam 0 1
## 0 135 0
## 1 0 65
x=gesui.rf2$importance
x
## 0 1 MeanDecreaseAccuracy MeanDecreaseGini
## kubun 0.010002373 0.0093517937 0.009684711 1.857444
## did 0.003650259 0.0192575109 0.008639808 2.079759
## ekijyouka 0.015888868 -0.0008110953 0.010502403 2.862351
## kouhou 0.011129744 0.0059055140 0.009584091 1.612064
## slope 0.022294892 0.0124416897 0.019072820 15.898385
## uedokaburi 0.031174303 0.0346052180 0.032335689 18.143593
## masuhonsuu -0.001824595 0.0113326682 0.002379708 5.166964
## long 0.015141604 0.0718148803 0.033212224 18.265191
## kyouyounensuu 0.026423577 0.0959856646 0.048651012 13.006310
## kei 0.005158631 0.0586434281 0.022156765 5.399041
varImpPlot(gesui.rf2)
#変数を選択した場合(3つ除去-kouhou, -did, -kubun)
#変数を選択した場合(3つ除去-kouhou, -did, -kubun)
vote_rf2 <- train(
taisyo ~ .,
data = train%>%
select(-kouhou, -did, -kubun),
method = "rf",
importance = TRUE
)
train_rf2 <- confusionMatrix(predict(vote_rf2, train), train$taisyo)
test_rf2 <- confusionMatrix(predict(vote_rf2, test), test$taisyo)
print(str_c("Accuracy (train):", train_rf2$overall[1]))
## [1] "Accuracy (train):1"
print(str_c("Accuracy (test) :", test_rf2$overall[1]))
## [1] "Accuracy (test) :0.853658536585366"
若干ではあるものの、予測精度が改善しました。 0.853→0.8780 ポイントとしては、 特徴量は多ければいいというわけではないということ、 ドメイン知識に基づく特徴選択が有効な方法であるということです。
#変数を選択した場合(4つ除去-kouhou, -did, -kubun, -masuhonsuu)
#変数を選択した場合(3つ除去-kouhou, -did, -kubun)
vote_rf2 <- train(
taisyo ~ .,
data = train%>%
select(-kouhou, -did, -kubun, -masuhonsuu),
method = "rf"
)
train_rf2 <- confusionMatrix(predict(vote_rf2, train), train$taisyo)
test_rf2 <- confusionMatrix(predict(vote_rf2, test), test$taisyo)
print(str_c("Accuracy (train):", train_rf2$overall[1]))
## [1] "Accuracy (train):1"
print(str_c("Accuracy (test) :", test_rf2$overall[1]))
## [1] "Accuracy (test) :0.865853658536585"
さらに精度は向上した。0.8780→0.89
#変数を選択した場合(5つ除去-kouhou, -did, -kubun, -masuhonsuu, -ekijyouka)
vote_rf2 <- train(
taisyo ~ .,
data = train%>%
select(-kouhou, -did, -kubun, -masuhonsuu, -ekijyouka),
method = "rf",
importance = TRUE
)
train_rf2 <- confusionMatrix(predict(vote_rf2, train), train$taisyo)
test_rf2 <- confusionMatrix(predict(vote_rf2, test), test$taisyo)
print(str_c("Accuracy (train):", train_rf2$overall[1]))
## [1] "Accuracy (train):1"
print(str_c("Accuracy (test) :", test_rf2$overall[1]))
## [1] "Accuracy (test) :0.902439024390244"
さらに精度が向上した。0.89→0.90
gesui.rf2 <- randomForest( # 予測、分類器の構築
taisyo ~ slope + uedokaburi + masuhonsuu + long + kyouyounensuu + kei, # モデル式
data = train,# データ
importance = TRUE)
predrandam = predict(gesui.rf2, test)
predrandam
## 6 10 12 15 17 22 24 26 27 29 32 40 41 42 44 48 49 55 57 63
## 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 1 1
## 64 67 72 76 78 81 86 89 90 92 93 96 97 100 105 107 109 110 114 116
## 1 1 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0
## 119 120 122 123 127 135 138 149 156 159 163 166 168 169 170 189 196 203 205 206
## 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1
## 213 214 218 224 231 234 235 236 240 241 243 247 250 251 252 256 263 265 267 268
## 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0
## 269 279
## 0 0
## Levels: 0 1
table(predrandam,test$taisyo)
##
## predrandam 0 1
## 0 59 9
## 1 0 14
x=gesui.rf2$importance
varImpPlot(gesui.rf2)
plot(test$taisyo, predrandam, main = gesui.rf2$call)
curve(identity, add = TRUE)
gesui.rf2
##
## Call:
## randomForest(formula = taisyo ~ slope + uedokaburi + masuhonsuu + long + kyouyounensuu + kei, data = train, importance = TRUE)
## Type of random forest: classification
## Number of trees: 500
## No. of variables tried at each split: 2
##
## OOB estimate of error rate: 23%
## Confusion matrix:
## 0 1 class.error
## 0 124 11 0.08148148
## 1 35 30 0.53846154
rank <- data.frame(x) # 重要度のリストをデータフレームに変換
rank$factor <- rownames(rank) # 行名になっている要因をデータフレームに追加
rank <- rank[order(rank[,1], decreasing=T),] # 重要度(偏回帰係数的なもの)順に並び替え
rownames(rank) <- 1:nrow(rank) # ランキングを行名にする
rank
## X0 X1 MeanDecreaseAccuracy MeanDecreaseGini factor
## 1 0.024443328 0.093234777 0.046461890 13.987568 kyouyounensuu
## 2 0.023308806 0.015836466 0.020989665 17.688803 slope
## 3 0.017221048 0.079024413 0.036859773 21.822768 long
## 4 0.009728601 0.042189923 0.020088700 20.481903 uedokaburi
## 5 -0.002126088 0.063095670 0.018872302 5.580675 kei
## 6 -0.005742498 0.009167146 -0.001279201 5.712475 masuhonsuu
http://d-m-l.jp/Rbiz/task_rf.html
dim(gesui)
## [1] 200 11
sapply(gesui, class)
## kubun did ekijyouka kouhou slope
## "integer" "integer" "integer" "numeric" "numeric"
## uedokaburi masuhonsuu long kyouyounensuu kei
## "numeric" "numeric" "numeric" "numeric" "numeric"
## taisyo
## "factor"
head(gesui)
## kubun did ekijyouka kouhou slope uedokaburi masuhonsuu long kyouyounensuu
## 108 1 1 0 0 9.60 2.481001 3 39.08 26
## 115 1 1 0 1 4.12 4.084854 7 51.90 24
## 223 0 1 0 0 1.34 3.484858 4 20.20 25
## 65 1 1 0 0 3.60 1.572562 1 24.52 37
## 28 0 1 0 0 1.72 2.119883 0 17.00 40
## 79 1 1 1 0 1.82 1.366309 0 23.59 24
## kei taisyo
## 108 250 0
## 115 250 0
## 223 250 0
## 65 250 1
## 28 250 1
## 79 250 0
set.seed(123)#注1
gesui.tune <- tuneRF(gesui %>% select(-kouhou, -did, -kubun, -masuhonsuu, -ekijyouka,-taisyo) ,# 説明変数
gesui$taisyo, # 目的変数
doBest = T) #分岐に使う変数の数(mtry)を求めるフラグ
## mtry = 2 OOB error = 24%
## Searching left ...
## mtry = 1 OOB error = 23%
## 0.04166667 0.05
## Searching right ...
## mtry = 4 OOB error = 27%
## -0.125 0.05
set.seed(123)#注1
gesui.tune <- tuneRF(gesui %>% select(-kouhou, -did, -kubun, -masuhonsuu, -ekijyouka,-taisyo) ,# 説明変数
gesui$taisyo, # 目的変数
ntreeTry=2500, #決定木数
trace = TRUE,
doBest = T)
## mtry = 2 OOB error = 22%
## Searching left ...
## mtry = 1 OOB error = 22%
## 0 0.05
## Searching right ...
## mtry = 4 OOB error = 25.5%
## -0.1590909 0.05
gesui.rf2 <- randomForest( # 予測、分類器の構築
taisyo ~ slope + uedokaburi + masuhonsuu + long + kyouyounensuu + kei,
data = gesui, # データ
# 3/30 データから5変数を除去
mtry = gesui.tune$mtry,importance=T)
# 3/26 importance=TRUEにしないとimportance関数は使えない事が分かった。
#分岐に使う変数の数
gesui.tune$mtry
## [1] 1
gesui.rf2
##
## Call:
## randomForest(formula = taisyo ~ slope + uedokaburi + masuhonsuu + long + kyouyounensuu + kei, data = gesui, mtry = gesui.tune$mtry, importance = T)
## Type of random forest: classification
## Number of trees: 500
## No. of variables tried at each split: 1
##
## OOB estimate of error rate: 24.5%
## Confusion matrix:
## 0 1 class.error
## 0 128 7 0.05185185
## 1 42 23 0.64615385
predrandam = predict(gesui.rf2, test)
predrandam
## 6 10 12 15 17 22 24 26 27 29 32 40 41 42 44 48 49 55 57 63
## 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1
## 64 67 72 76 78 81 86 89 90 92 93 96 97 100 105 107 109 110 114 116
## 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0
## 119 120 122 123 127 135 138 149 156 159 163 166 168 169 170 189 196 203 205 206
## 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1
## 213 214 218 224 231 234 235 236 240 241 243 247 250 251 252 256 263 265 267 268
## 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0
## 269 279
## 0 0
## Levels: 0 1
summary(predrandam)
## 0 1
## 70 12
table(predrandam,test$taisyo)
##
## predrandam 0 1
## 0 58 12
## 1 1 11
stargazer(as.data.frame(gesui),type = "html")
| Statistic | N | Mean | St. Dev. | Min | Pctl(25) | Pctl(75) | Max |
| kubun | 200 | 0.200 | 0.401 | 0 | 0 | 0 | 1 |
| did | 200 | 0.725 | 0.448 | 0 | 0 | 1 | 1 |
| ekijyouka | 200 | 0.215 | 0.412 | 0 | 0 | 0 | 1 |
| kouhou | 200 | 0.355 | 0.480 | 0 | 0 | 1 | 1 |
| slope | 200 | 3.296 | 2.032 | 0.000 | 1.900 | 4.200 | 9.700 |
| uedokaburi | 200 | 4.121 | 2.418 | 1.055 | 2.476 | 5.261 | 13.385 |
| masuhonsuu | 200 | 1.270 | 1.781 | 0 | 0 | 2 | 11 |
| long | 200 | 31.977 | 15.610 | 0.970 | 21.745 | 41.785 | 96.820 |
| kyouyounensuu | 200 | 27.615 | 5.313 | 10 | 25 | 27 | 40 |
| kei | 200 | 394.500 | 164.025 | 200 | 250 | 600 | 900 |
stargazer(as.data.frame(train),type = "html")
| Statistic | N | Mean | St. Dev. | Min | Pctl(25) | Pctl(75) | Max |
| kubun | 200 | 0.200 | 0.401 | 0 | 0 | 0 | 1 |
| did | 200 | 0.725 | 0.448 | 0 | 0 | 1 | 1 |
| ekijyouka | 200 | 0.215 | 0.412 | 0 | 0 | 0 | 1 |
| kouhou | 200 | 0.355 | 0.480 | 0 | 0 | 1 | 1 |
| slope | 200 | 3.296 | 2.032 | 0.000 | 1.900 | 4.200 | 9.700 |
| uedokaburi | 200 | 4.121 | 2.418 | 1.055 | 2.476 | 5.261 | 13.385 |
| masuhonsuu | 200 | 1.270 | 1.781 | 0 | 0 | 2 | 11 |
| long | 200 | 31.977 | 15.610 | 0.970 | 21.745 | 41.785 | 96.820 |
| kyouyounensuu | 200 | 27.615 | 5.313 | 10 | 25 | 27 | 40 |
| kei | 200 | 394.500 | 164.025 | 200 | 250 | 600 | 900 |
stargazer(as.data.frame(test),type = "html")
| Statistic | N | Mean | St. Dev. | Min | Pctl(25) | Pctl(75) | Max |
| kubun | 82 | 0.232 | 0.425 | 0 | 0 | 0 | 1 |
| did | 82 | 0.866 | 0.343 | 0 | 1 | 1 | 1 |
| ekijyouka | 82 | 0.171 | 0.379 | 0 | 0 | 0 | 1 |
| kouhou | 82 | 0.293 | 0.458 | 0 | 0 | 1 | 1 |
| slope | 82 | 3.340 | 1.991 | 0.000 | 1.896 | 3.968 | 9.900 |
| uedokaburi | 82 | 4.454 | 2.910 | 1.009 | 2.455 | 5.852 | 12.289 |
| masuhonsuu | 82 | 1.317 | 1.735 | 0 | 0 | 2 | 10 |
| long | 82 | 29.647 | 14.509 | 2.710 | 19.992 | 39.252 | 65.070 |
| kyouyounensuu | 82 | 27.268 | 4.952 | 17 | 25 | 27 | 40 |
| kei | 82 | 379.878 | 158.476 | 200 | 250 | 600 | 600 |
#model = randomForest(taisyo ~ ., data = gesui)
model = randomForest(taisyo ~ ., data = train,
importance = TRUE)
#model = randomForest(kinkyuudo ~ ., data = gesui)
model
##
## Call:
## randomForest(formula = taisyo ~ ., data = train, importance = TRUE)
## Type of random forest: classification
## Number of trees: 500
## No. of variables tried at each split: 3
##
## OOB estimate of error rate: 23%
## Confusion matrix:
## 0 1 class.error
## 0 124 11 0.08148148
## 1 35 30 0.53846154
#predition = predict(model, gesui)
predition = predict(model, test)
predition
## 6 10 12 15 17 22 24 26 27 29 32 40 41 42 44 48 49 55 57 63
## 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 1 1
## 64 67 72 76 78 81 86 89 90 92 93 96 97 100 105 107 109 110 114 116
## 1 1 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0
## 119 120 122 123 127 135 138 149 156 159 163 166 168 169 170 189 196 203 205 206
## 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 1 1 1 1
## 213 214 218 224 231 234 235 236 240 241 243 247 250 251 252 256 263 265 267 268
## 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0
## 269 279
## 0 0
## Levels: 0 1
summary(predition)
## 0 1
## 67 15
table(predition,test$taisyo)
##
## predition 0 1
## 0 57 10
## 1 2 13
#sapply(gesui, class)
#summary(gesui)
model$importance
## 0 1 MeanDecreaseAccuracy MeanDecreaseGini
## kubun 0.007871812 0.009622028 0.008367010 2.002231
## did -0.002518434 0.024138238 0.006210774 1.894325
## ekijyouka 0.014100131 0.004389452 0.011004973 3.071322
## kouhou 0.008602097 0.004028499 0.007306158 1.698362
## slope 0.019207119 0.021664619 0.019802753 15.488437
## uedokaburi 0.031551001 0.041418112 0.034660871 18.357236
## masuhonsuu 0.000461169 0.006201611 0.002264023 5.164022
## long 0.016038744 0.072431528 0.034296065 18.982396
## kyouyounensuu 0.027590364 0.101130112 0.051144365 12.551415
## kei 0.002671212 0.052172352 0.018541582 4.779170
varImpPlot(model)
dim(gesui)
## [1] 200 11
sapply(gesui, class)
## kubun did ekijyouka kouhou slope
## "integer" "integer" "integer" "numeric" "numeric"
## uedokaburi masuhonsuu long kyouyounensuu kei
## "numeric" "numeric" "numeric" "numeric" "numeric"
## taisyo
## "factor"
head(gesui)
## kubun did ekijyouka kouhou slope uedokaburi masuhonsuu long kyouyounensuu
## 108 1 1 0 0 9.60 2.481001 3 39.08 26
## 115 1 1 0 1 4.12 4.084854 7 51.90 24
## 223 0 1 0 0 1.34 3.484858 4 20.20 25
## 65 1 1 0 0 3.60 1.572562 1 24.52 37
## 28 0 1 0 0 1.72 2.119883 0 17.00 40
## 79 1 1 1 0 1.82 1.366309 0 23.59 24
## kei taisyo
## 108 250 0
## 115 250 0
## 223 250 0
## 65 250 1
## 28 250 1
## 79 250 0
set.seed(123)#注1
#gesui.tune <- tuneRF(gesui %>% select(-kinkyuudo) ,# 説明変数
# gesui$kinkyuudo, # 目的変数
gesui.tune <- tuneRF(gesui %>% select(-taisyo) ,# 説明変数
gesui$taisyo, # 目的変数
doBest = T) #分岐に使う変数の数(mtry)を求めるフラグ
## mtry = 3 OOB error = 23%
## Searching left ...
## mtry = 2 OOB error = 26%
## -0.1304348 0.05
## Searching right ...
## mtry = 6 OOB error = 27.5%
## -0.1956522 0.05
set.seed(123)#注1 #gesui.tune <- tuneRF(gesui %>% select(-kinkyuudo) ,# 説明変数 # gesui\(kinkyuudo, # 目的変数 gesui.tune <- tuneRF(gesui %>% select(-taisyo),# 説明変数 gesui\)taisyo,# 目的変数 doBest = T)#分岐に使う変数の数(mtry)を求めるフラグ
この結果,特徴量の個数が 3個以上のときに,Out-of-Bag誤差(OOB error)は3.99% 2個のときに,Out-of-Bag誤差は4.57%、 1個のときに,Out-of-Bag誤差は4.36%、
となり,特徴量の個数が3個のときにOut-of-Bag誤差が最少となり, この個数に設定するのが良さそうであることがわかる*1
構築する決定木の個数を増やしてみる ntreeTry引数はデフォルトでは50となっており,50個の決定木を構築することがわかる.1500個の決定木を構築するように指定してみよう.
gesui <- train
train <- gesui[-kouhou] gesui <- train test <- test[-kouhou]
stargazer(as.data.frame(train),type = "html")
| Statistic | N | Mean | St. Dev. | Min | Pctl(25) | Pctl(75) | Max |
| kubun | 200 | 0.200 | 0.401 | 0 | 0 | 0 | 1 |
| did | 200 | 0.725 | 0.448 | 0 | 0 | 1 | 1 |
| ekijyouka | 200 | 0.215 | 0.412 | 0 | 0 | 0 | 1 |
| kouhou | 200 | 0.355 | 0.480 | 0 | 0 | 1 | 1 |
| slope | 200 | 3.296 | 2.032 | 0.000 | 1.900 | 4.200 | 9.700 |
| uedokaburi | 200 | 4.121 | 2.418 | 1.055 | 2.476 | 5.261 | 13.385 |
| masuhonsuu | 200 | 1.270 | 1.781 | 0 | 0 | 2 | 11 |
| long | 200 | 31.977 | 15.610 | 0.970 | 21.745 | 41.785 | 96.820 |
| kyouyounensuu | 200 | 27.615 | 5.313 | 10 | 25 | 27 | 40 |
| kei | 200 | 394.500 | 164.025 | 200 | 250 | 600 | 900 |
stargazer(as.data.frame(test),type = "html")
| Statistic | N | Mean | St. Dev. | Min | Pctl(25) | Pctl(75) | Max |
| kubun | 82 | 0.232 | 0.425 | 0 | 0 | 0 | 1 |
| did | 82 | 0.866 | 0.343 | 0 | 1 | 1 | 1 |
| ekijyouka | 82 | 0.171 | 0.379 | 0 | 0 | 0 | 1 |
| kouhou | 82 | 0.293 | 0.458 | 0 | 0 | 1 | 1 |
| slope | 82 | 3.340 | 1.991 | 0.000 | 1.896 | 3.968 | 9.900 |
| uedokaburi | 82 | 4.454 | 2.910 | 1.009 | 2.455 | 5.852 | 12.289 |
| masuhonsuu | 82 | 1.317 | 1.735 | 0 | 0 | 2 | 10 |
| long | 82 | 29.647 | 14.509 | 2.710 | 19.992 | 39.252 | 65.070 |
| kyouyounensuu | 82 | 27.268 | 4.952 | 17 | 25 | 27 | 40 |
| kei | 82 | 379.878 | 158.476 | 200 | 250 | 600 | 600 |
set.seed(123)#注1
#gesui.tune <- tuneRF(gesui %>% select(-kinkyuudo) ,# 説明変数
# gesui$kinkyuudo, # 目的変数
gesui.tune <- tuneRF(gesui %>% select(-taisyo) ,# 説明変数
gesui$taisyo, # 目的変数
ntreeTry=2500, #決定木数
trace = TRUE,
doBest = T)
## mtry = 3 OOB error = 22.5%
## Searching left ...
## mtry = 2 OOB error = 21.5%
## 0.04444444 0.05
## Searching right ...
## mtry = 6 OOB error = 23%
## -0.02222222 0.05
6個のときに,Out-of-Bag誤差(OOB error)が最大となり、5.06%となっている。
チューニングで求めたmtry(tuneRF()結果を、オブジェクトの$mtryに入っています)はこの関数の引数に代入します。
gesui.rf2 <- randomForest( # 予測、分類器の構築
# kinkyuudo ~ ., # モデル式
taisyo ~ ., # モデル式
data = gesui, # データ
# 3/30 データから5変数を除去
mtry = gesui.tune$mtry,importance=T) # 3/26 importance =TRUEにしないとimportance関数は使えない事が分かった。
#分岐に使う変数の数
gesui.rf2
##
## Call:
## randomForest(formula = taisyo ~ ., data = gesui, mtry = gesui.tune$mtry, importance = T)
## Type of random forest: classification
## Number of trees: 500
## No. of variables tried at each split: 2
##
## OOB estimate of error rate: 22%
## Confusion matrix:
## 0 1 class.error
## 0 126 9 0.06666667
## 1 35 30 0.53846154
predrandam = predict(gesui.rf2, test)
predrandam
## 6 10 12 15 17 22 24 26 27 29 32 40 41 42 44 48 49 55 57 63
## 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 1 1
## 64 67 72 76 78 81 86 89 90 92 93 96 97 100 105 107 109 110 114 116
## 1 1 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0
## 119 120 122 123 127 135 138 149 156 159 163 166 168 169 170 189 196 203 205 206
## 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1
## 213 214 218 224 231 234 235 236 240 241 243 247 250 251 252 256 263 265 267 268
## 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0
## 269 279
## 0 0
## Levels: 0 1
summary(predrandam)
## 0 1
## 68 14
table(predrandam,test$taisyo)
##
## predrandam 0 1
## 0 58 10
## 1 1 13
# 乱数の設定
# ランダムフォレストによる分類に寄与した変数の分析
library(kernlab)
##
## Attaching package: 'kernlab'
## The following object is masked from 'package:purrr':
##
## cross
## The following object is masked from 'package:ggplot2':
##
## alpha
set.seed(1)
rf.model <- randomForest(taisyo ~ .,
data = train, ntree = 2, proximity = TRUE)
# 分類に寄与した変数を視覚化
varImpPlot(rf.model)
しかし、上記では、1,0のどちらに寄与したのかわからないのでどちらに特徴的なのか(=高い頻度で現れているのか)という情報は、この図からは得られません。そこで、partialPlot関数を使って、部分従属プロットを描きます。
# 描画する変数の数を指定
x <- 10
# 変数名の取得
variable.names <- colnames(train)
# 寄与度の高い変数の番号を取得
rk <- order(rf.model$importance, decreasing = TRUE)[1 : x]
# 描画エリアの設定
par(mfrow = c(2, 5))
# 部分従属プロットの描画
for(i in rk)
partialPlot(rf.model, train, variable.names[i], main = variable.names[i], xlab = variable.names[i], ylab = "Partial Dependency", col = "red")
下水劣化推定変数の重要度
gesui.rf2
##
## Call:
## randomForest(formula = taisyo ~ ., data = gesui, mtry = gesui.tune$mtry, importance = T)
## Type of random forest: classification
## Number of trees: 500
## No. of variables tried at each split: 2
##
## OOB estimate of error rate: 22%
## Confusion matrix:
## 0 1 class.error
## 0 126 9 0.06666667
## 1 35 30 0.53846154
x=gesui.rf2$importance
x
## 0 1 MeanDecreaseAccuracy MeanDecreaseGini
## kubun 0.009234618 0.008252653 8.539903e-03 1.879844
## did -0.002366052 0.031621911 8.212625e-03 1.979532
## ekijyouka 0.011859796 0.003918464 9.291947e-03 2.619773
## kouhou 0.006400661 0.008880522 7.005136e-03 1.561213
## slope 0.013445629 0.013921105 1.343864e-02 13.208184
## uedokaburi 0.031935389 0.034651752 3.272941e-02 15.952420
## masuhonsuu -0.005424500 0.012144159 7.862569e-05 5.266834
## long 0.009682394 0.059949356 2.582232e-02 15.101663
## kyouyounensuu 0.026323347 0.097244579 4.895761e-02 11.657992
## kei 0.003585217 0.053000954 1.954380e-02 5.156416
http://sfchaos.hatenablog.com/entry/20150628/p1 https://tjo.hatenablog.com/entry/2013/09/02/190449
出力結果の読み方 OOB estimate of error rate:誤判別率 Confusion matrix:縦軸が予測数、横軸が実際の数。下の例では”0”(緊急度3以下)と478個予測したうち、実際に”0”だったものが450個、“1”だったものが28個と読み取れます。
重要度の高い順番に並び替え
rank <- data.frame(x) # 重要度のリストをデータフレームに変換
rank$factor <- rownames(rank) # 行名になっている要因をデータフレームに追加
rank <- rank[order(rank[,1], decreasing=T),] # 重要度(偏回帰係数的なもの)順に並び替え
rownames(rank) <- 1:nrow(rank) # ランキングを行名にする
rank
## X0 X1 MeanDecreaseAccuracy MeanDecreaseGini factor
## 1 0.031935389 0.034651752 3.272941e-02 15.952420 uedokaburi
## 2 0.026323347 0.097244579 4.895761e-02 11.657992 kyouyounensuu
## 3 0.013445629 0.013921105 1.343864e-02 13.208184 slope
## 4 0.011859796 0.003918464 9.291947e-03 2.619773 ekijyouka
## 5 0.009682394 0.059949356 2.582232e-02 15.101663 long
## 6 0.009234618 0.008252653 8.539903e-03 1.879844 kubun
## 7 0.006400661 0.008880522 7.005136e-03 1.561213 kouhou
## 8 0.003585217 0.053000954 1.954380e-02 5.156416 kei
## 9 -0.002366052 0.031621911 8.212625e-03 1.979532 did
## 10 -0.005424500 0.012144159 7.862569e-05 5.266834 masuhonsuu
plot(gesui.rf2)
varImpPlot(gesui.rf2)
参考 https://yolo-kiyoshi.com/2019/09/16/post-1226/ https://aotamasaki.hatenablog.com/entry/bias_in_feature_importances
# 別のサイトでのランダムフォレストによるEDAをRで実践 https://navaclass.com/random-forest-eda/
#set.seed(111)
#ランダムフォレストモデルの学習
#boston.rf <- randomForest(kinkyuudo ~ .,
#boston.rf <- randomForest(taisyo ~ .,
# data = train,
# importance=T)
#テストデータに対する予測
#pred <- predict(boston.rf, newdata = test)
#観測値と予測値をプロット
#plot(test$taisyo, pred, main = boston.rf$call)
#curve(identity, add = TRUE)
#pred = predict(gesui.rf2, test)
pred = predict(gesui.rf2, train)#報告書には使わない
#plot(test$taisyo, pred, main = gesui.rf2$call)
plot(train$taisyo, pred, main = gesui.rf2$call)#報告書には使わない
curve(identity, add = TRUE)
#3/26追加
kekka<-table(pred,train$taisyo)
kekka
##
## pred 0 1
## 0 135 2
## 1 0 63
pred = predict(gesui.rf2, test)#報告書用
kekka<-table(pred,test$taisyo)
kekka
##
## pred 0 1
## 0 58 10
## 1 1 13
require(ranger)
#観測値と予測値をプロット
plot(test$taisyo, pred, main = gesui.rf2$call)
curve(identity, add = TRUE)
#予測誤差(RMSE:二乗平均平方根誤差) #予測誤差の推定のため目的変数をニューリックに変換する
#予測誤差(RMSE:二乗平均平方根誤差
rms <- function(act, pred) {
sqrt(mean((act - pred) ^ 2))
}
cat(" RMSE =", rms(test$taisyo, pred))
## Warning in Ops.factor(act, pred): '-' not meaningful for factors
## RMSE = NA
predict(model_1, newdata = test))
https://funatsu-lab.github.io/open-course-ware/machine-learning/random-forest/
#require(ranger):ranndamforestの別バージョン
require(ranger)
# モデルの構築
ranger_model <- ranger(
# formula = as.factor(default.payment.next.month) ~ ., # default.payment.next.monthが目的変数になる
formula = taisyo ~ .,
data = train,
num.trees = 1000,
mtry = 5,
write.forest = TRUE,
importance = 'impurity',
probability = TRUE
)
ranger_pred <- predict(ranger_model, data=test) ranger_pred$predictions
#決定木
library(rpart.plot) model = rpart(taisyo ~ ., data = train) model pred = predict(model, test)#報告書用 pred #table(pred,test[,11])
kekka<-table(pred,test$taisyo)
kekka
```r
stargazer(as.data.frame(gesui),type = "html")
| Statistic | N | Mean | St. Dev. | Min | Pctl(25) | Pctl(75) | Max |
| kubun | 200 | 0.200 | 0.401 | 0 | 0 | 0 | 1 |
| did | 200 | 0.725 | 0.448 | 0 | 0 | 1 | 1 |
| ekijyouka | 200 | 0.215 | 0.412 | 0 | 0 | 0 | 1 |
| kouhou | 200 | 0.355 | 0.480 | 0 | 0 | 1 | 1 |
| slope | 200 | 3.296 | 2.032 | 0.000 | 1.900 | 4.200 | 9.700 |
| uedokaburi | 200 | 4.121 | 2.418 | 1.055 | 2.476 | 5.261 | 13.385 |
| masuhonsuu | 200 | 1.270 | 1.781 | 0 | 0 | 2 | 11 |
| long | 200 | 31.977 | 15.610 | 0.970 | 21.745 | 41.785 | 96.820 |
| kyouyounensuu | 200 | 27.615 | 5.313 | 10 | 25 | 27 | 40 |
| kei | 200 | 394.500 | 164.025 | 200 | 250 | 600 | 900 |
ビジネスに活かすデータマイニング(尾崎豊) http://yut.hatenablog.com/entry/20120827/1346024147
library(e1071)
# 予測結果と実測値の対比
d.svm<-svm(taisyo ~ .,
data = train)
print(d.svm)
##
## Call:
## svm(formula = taisyo ~ ., data = train)
##
##
## Parameters:
## SVM-Type: C-classification
## SVM-Kernel: radial
## cost: 1
##
## Number of Support Vectors: 136
predsvm<-predict(d.svm,newdata=test)
summary(predsvm)
## 0 1
## 69 13
kekka<-table(predsvm,test$taisyo)
kekka
##
## predsvm 0 1
## 0 57 12
## 1 2 11
ハイパーパラメータチューニング この結果、The final values used for the model were size = 5 and decay = 0.3. より、size = 2 and decay = 1が一番良い事が分る。
set.seed(0)
modelRF <- train(
# taisyo ~ .,
taisyo ~ slope + uedokaburi + masuhonsuu + long + kyouyounensuu + kei,
data = train,
method = "nnet",
preProcess = c('center', 'scale'),
trControl = trainControl(method = "cv"),
tuneGrid = expand.grid(size=c(1:10), decay=seq(0.1, 1, 0.1)),
linout = FALSE
)
## # weights: 9
## initial value 158.773899
## iter 10 value 103.343488
## iter 20 value 103.068750
## iter 20 value 103.068750
## final value 103.068750
## converged
## # weights: 17
## initial value 131.494419
## iter 10 value 103.643970
## iter 20 value 98.749852
## iter 30 value 97.478256
## iter 40 value 97.251686
## iter 50 value 97.155261
## iter 60 value 96.971033
## final value 96.971030
## converged
## # weights: 25
## initial value 153.351835
## iter 10 value 98.921236
## iter 20 value 95.090943
## iter 30 value 94.585217
## iter 40 value 94.524078
## final value 94.524029
## converged
## # weights: 33
## initial value 113.268420
## iter 10 value 97.982991
## iter 20 value 93.598911
## iter 30 value 91.502264
## iter 40 value 91.002297
## iter 50 value 90.979959
## final value 90.979926
## converged
## # weights: 41
## initial value 113.217084
## iter 10 value 98.984479
## iter 20 value 95.281616
## iter 30 value 92.883209
## iter 40 value 92.263270
## iter 50 value 91.660561
## iter 60 value 91.025655
## iter 70 value 90.990083
## final value 90.989895
## converged
## # weights: 49
## initial value 138.830377
## iter 10 value 96.846473
## iter 20 value 93.039663
## iter 30 value 91.940137
## iter 40 value 90.858959
## iter 50 value 90.316795
## iter 60 value 89.588679
## iter 70 value 88.962778
## iter 80 value 88.950686
## final value 88.950578
## converged
## # weights: 57
## initial value 130.399188
## iter 10 value 97.080182
## iter 20 value 91.574851
## iter 30 value 90.495505
## iter 40 value 89.583249
## iter 50 value 88.859450
## iter 60 value 88.359538
## iter 70 value 88.180193
## iter 80 value 88.159773
## iter 90 value 88.128587
## iter 100 value 88.124518
## final value 88.124518
## stopped after 100 iterations
## # weights: 65
## initial value 124.139846
## iter 10 value 100.340151
## iter 20 value 93.444388
## iter 30 value 90.479999
## iter 40 value 87.627548
## iter 50 value 87.059165
## iter 60 value 86.765927
## iter 70 value 86.655257
## iter 80 value 86.633286
## iter 90 value 86.630368
## final value 86.630291
## converged
## # weights: 73
## initial value 117.109200
## iter 10 value 97.987957
## iter 20 value 91.616285
## iter 30 value 89.150240
## iter 40 value 87.318136
## iter 50 value 86.476843
## iter 60 value 85.653435
## iter 70 value 85.239676
## iter 80 value 85.183308
## iter 90 value 85.155820
## iter 100 value 85.150565
## final value 85.150565
## stopped after 100 iterations
## # weights: 81
## initial value 140.280277
## iter 10 value 100.513495
## iter 20 value 92.855168
## iter 30 value 89.791582
## iter 40 value 88.137809
## iter 50 value 86.952928
## iter 60 value 86.199411
## iter 70 value 85.632493
## iter 80 value 85.499151
## iter 90 value 85.324111
## iter 100 value 85.134849
## final value 85.134849
## stopped after 100 iterations
## # weights: 9
## initial value 127.190610
## iter 10 value 105.446599
## iter 20 value 105.042776
## iter 30 value 104.832623
## final value 104.822663
## converged
## # weights: 17
## initial value 137.229299
## iter 10 value 102.290681
## iter 20 value 100.955667
## iter 30 value 100.903894
## final value 100.903882
## converged
## # weights: 25
## initial value 126.748940
## iter 10 value 101.728831
## iter 20 value 99.992449
## iter 30 value 99.059758
## iter 40 value 98.944882
## final value 98.944755
## converged
## # weights: 33
## initial value 139.597473
## iter 10 value 101.630815
## iter 20 value 99.266856
## iter 30 value 98.810349
## iter 40 value 98.783133
## final value 98.780359
## converged
## # weights: 41
## initial value 120.742513
## iter 10 value 100.641065
## iter 20 value 99.267893
## iter 30 value 98.793047
## iter 40 value 98.722654
## iter 50 value 98.717429
## final value 98.717280
## converged
## # weights: 49
## initial value 122.515952
## iter 10 value 102.500208
## iter 20 value 99.642476
## iter 30 value 99.043989
## iter 40 value 98.677972
## iter 50 value 98.599781
## iter 60 value 98.593826
## iter 70 value 98.592868
## final value 98.592702
## converged
## # weights: 57
## initial value 125.952121
## iter 10 value 102.472789
## iter 20 value 101.366940
## iter 30 value 99.887102
## iter 40 value 99.283110
## iter 50 value 98.605307
## iter 60 value 98.524088
## iter 70 value 98.461372
## iter 80 value 98.396166
## iter 90 value 98.385843
## final value 98.385653
## converged
## # weights: 65
## initial value 166.871092
## iter 10 value 102.048824
## iter 20 value 99.506039
## iter 30 value 98.851293
## iter 40 value 98.601063
## iter 50 value 98.590752
## final value 98.590700
## converged
## # weights: 73
## initial value 157.534913
## iter 10 value 101.671189
## iter 20 value 99.719468
## iter 30 value 99.056734
## iter 40 value 98.835521
## iter 50 value 98.733008
## iter 60 value 98.613717
## iter 70 value 98.611022
## iter 80 value 98.610566
## final value 98.610552
## converged
## # weights: 81
## initial value 171.949772
## iter 10 value 101.973686
## iter 20 value 99.575807
## iter 30 value 98.732078
## iter 40 value 98.637330
## iter 50 value 98.615376
## iter 60 value 98.609112
## iter 70 value 98.608308
## iter 80 value 98.608259
## final value 98.608242
## converged
## # weights: 9
## initial value 165.095787
## iter 10 value 106.547533
## iter 20 value 105.441390
## iter 30 value 105.430329
## final value 105.430301
## converged
## # weights: 17
## initial value 148.036811
## iter 10 value 103.979624
## iter 20 value 103.037919
## iter 30 value 103.015924
## iter 30 value 103.015923
## iter 30 value 103.015923
## final value 103.015923
## converged
## # weights: 25
## initial value 116.082915
## iter 10 value 104.075668
## iter 20 value 103.468155
## iter 30 value 103.366202
## iter 40 value 103.316462
## final value 103.311745
## converged
## # weights: 33
## initial value 159.469236
## iter 10 value 103.518701
## iter 20 value 102.078878
## iter 30 value 101.954005
## iter 40 value 101.945867
## final value 101.945391
## converged
## # weights: 41
## initial value 144.810440
## iter 10 value 103.744079
## iter 20 value 102.708720
## iter 30 value 102.094247
## iter 40 value 101.953859
## iter 50 value 101.945371
## iter 60 value 101.944427
## final value 101.944412
## converged
## # weights: 49
## initial value 131.448797
## iter 10 value 103.151671
## iter 20 value 102.107403
## iter 30 value 101.951421
## iter 40 value 101.944361
## iter 50 value 101.942727
## iter 60 value 101.940750
## final value 101.940739
## converged
## # weights: 57
## initial value 126.709767
## iter 10 value 103.688159
## iter 20 value 102.385065
## iter 30 value 101.977362
## iter 40 value 101.940839
## iter 50 value 101.939513
## final value 101.939413
## converged
## # weights: 65
## initial value 117.325743
## iter 10 value 103.824156
## iter 20 value 102.628870
## iter 30 value 102.492893
## iter 40 value 102.485934
## iter 50 value 102.481585
## iter 60 value 102.396617
## iter 70 value 102.372904
## final value 102.372308
## converged
## # weights: 73
## initial value 117.014618
## iter 10 value 103.049037
## iter 20 value 102.503721
## iter 30 value 102.481223
## iter 40 value 102.445001
## iter 50 value 102.405126
## iter 60 value 102.404260
## iter 70 value 102.385411
## iter 80 value 102.378902
## iter 90 value 102.355856
## iter 100 value 102.278429
## final value 102.278429
## stopped after 100 iterations
## # weights: 81
## initial value 158.178852
## iter 10 value 103.740363
## iter 20 value 102.544314
## iter 30 value 102.495453
## iter 40 value 102.477716
## iter 50 value 102.418401
## iter 60 value 102.396283
## iter 70 value 102.382443
## iter 80 value 102.372595
## iter 90 value 102.371378
## iter 100 value 102.371242
## final value 102.371242
## stopped after 100 iterations
## # weights: 9
## initial value 126.886323
## iter 10 value 106.402430
## iter 20 value 106.132904
## final value 106.132873
## converged
## # weights: 17
## initial value 118.016824
## iter 10 value 104.455603
## iter 20 value 104.145949
## final value 104.145344
## converged
## # weights: 25
## initial value 117.250939
## iter 10 value 104.885132
## iter 20 value 103.865462
## iter 30 value 103.846770
## final value 103.846623
## converged
## # weights: 33
## initial value 138.389292
## iter 10 value 104.764136
## iter 20 value 103.918173
## iter 30 value 103.847776
## iter 40 value 103.833889
## iter 50 value 103.828145
## final value 103.828076
## converged
## # weights: 41
## initial value 128.791278
## iter 10 value 104.604235
## iter 20 value 104.138212
## final value 104.128790
## converged
## # weights: 49
## initial value 145.488156
## iter 10 value 104.266146
## iter 20 value 104.102814
## iter 30 value 104.053612
## iter 40 value 103.858508
## iter 50 value 103.833965
## iter 60 value 103.831942
## final value 103.831938
## converged
## # weights: 57
## initial value 189.381227
## iter 10 value 104.290926
## iter 20 value 103.863697
## iter 30 value 103.834820
## iter 40 value 103.831848
## iter 50 value 103.831285
## final value 103.831281
## converged
## # weights: 65
## initial value 126.432300
## iter 10 value 104.555225
## iter 20 value 104.091404
## iter 30 value 103.859267
## iter 40 value 103.834948
## iter 50 value 103.828080
## iter 60 value 103.827895
## final value 103.827890
## converged
## # weights: 73
## initial value 138.888552
## iter 10 value 104.797211
## iter 20 value 103.875740
## iter 30 value 103.853551
## iter 40 value 103.852930
## iter 50 value 103.848203
## iter 60 value 103.831113
## iter 70 value 103.830498
## final value 103.830438
## converged
## # weights: 81
## initial value 138.115449
## iter 10 value 104.998483
## iter 20 value 103.979389
## iter 30 value 103.856900
## iter 40 value 103.836613
## iter 50 value 103.833554
## iter 60 value 103.833500
## final value 103.833457
## converged
## # weights: 9
## initial value 125.721904
## iter 10 value 107.475049
## iter 20 value 106.764952
## final value 106.764893
## converged
## # weights: 17
## initial value 139.868977
## iter 10 value 105.408456
## iter 20 value 105.353068
## final value 105.352864
## converged
## # weights: 25
## initial value 117.637458
## iter 10 value 107.768330
## iter 20 value 107.396700
## iter 30 value 107.066855
## iter 40 value 105.526577
## iter 50 value 105.208086
## iter 60 value 105.102627
## iter 70 value 105.097165
## iter 80 value 105.096962
## iter 80 value 105.096962
## iter 80 value 105.096962
## final value 105.096962
## converged
## # weights: 33
## initial value 113.399203
## iter 10 value 105.369148
## iter 20 value 105.040720
## iter 30 value 105.032214
## final value 105.032082
## converged
## # weights: 41
## initial value 131.237780
## iter 10 value 105.277491
## iter 20 value 105.040725
## iter 30 value 105.019512
## iter 40 value 105.018804
## final value 105.018757
## converged
## # weights: 49
## initial value 123.336511
## iter 10 value 106.257381
## iter 20 value 105.081029
## iter 30 value 105.021357
## iter 40 value 105.017413
## iter 50 value 105.014392
## iter 60 value 105.013324
## iter 70 value 105.013148
## final value 105.013108
## converged
## # weights: 57
## initial value 113.912329
## iter 10 value 105.453653
## iter 20 value 105.105099
## iter 30 value 105.015555
## iter 40 value 105.009722
## iter 50 value 105.009464
## iter 50 value 105.009464
## iter 50 value 105.009464
## final value 105.009464
## converged
## # weights: 65
## initial value 117.754315
## iter 10 value 105.229449
## iter 20 value 105.014058
## iter 30 value 105.007007
## iter 40 value 105.006616
## final value 105.006479
## converged
## # weights: 73
## initial value 118.512349
## iter 10 value 105.500439
## iter 20 value 105.147562
## iter 30 value 105.016749
## iter 40 value 105.012346
## iter 50 value 105.007250
## iter 60 value 105.006497
## iter 70 value 105.005274
## iter 80 value 105.004330
## iter 90 value 105.004275
## final value 105.004260
## converged
## # weights: 81
## initial value 126.362217
## iter 10 value 106.466191
## iter 20 value 105.019534
## iter 30 value 105.003995
## iter 40 value 105.002941
## iter 50 value 105.002576
## iter 60 value 105.002489
## iter 70 value 105.002390
## iter 80 value 105.002195
## final value 105.002193
## converged
## # weights: 9
## initial value 118.332516
## iter 10 value 108.031369
## iter 20 value 107.343842
## iter 30 value 107.339117
## final value 107.339115
## converged
## # weights: 17
## initial value 116.043468
## iter 10 value 106.710742
## iter 20 value 106.308068
## final value 106.307824
## converged
## # weights: 25
## initial value 151.924895
## iter 10 value 106.266024
## iter 20 value 105.995888
## iter 30 value 105.983713
## final value 105.983707
## converged
## # weights: 33
## initial value 206.959203
## iter 10 value 106.928906
## iter 20 value 106.230199
## iter 30 value 105.985463
## iter 40 value 105.942960
## final value 105.942120
## converged
## # weights: 41
## initial value 120.075596
## iter 10 value 106.274093
## iter 20 value 105.903037
## iter 30 value 105.876963
## final value 105.876443
## converged
## # weights: 49
## initial value 120.400499
## iter 10 value 106.013604
## iter 20 value 105.863691
## iter 30 value 105.858294
## final value 105.858287
## converged
## # weights: 57
## initial value 173.257573
## iter 10 value 106.097790
## iter 20 value 105.847509
## iter 30 value 105.822956
## iter 40 value 105.821674
## iter 40 value 105.821673
## iter 40 value 105.821673
## final value 105.821673
## converged
## # weights: 65
## initial value 132.486402
## iter 10 value 107.670858
## iter 20 value 105.924695
## iter 30 value 105.823135
## iter 40 value 105.811161
## iter 50 value 105.810752
## final value 105.810735
## converged
## # weights: 73
## initial value 159.339269
## iter 10 value 106.055459
## iter 20 value 105.732855
## iter 30 value 105.724234
## final value 105.724215
## converged
## # weights: 81
## initial value 221.372887
## iter 10 value 106.221544
## iter 20 value 105.692861
## iter 30 value 105.679649
## final value 105.679132
## converged
## # weights: 9
## initial value 112.632258
## iter 10 value 107.915331
## final value 107.863756
## converged
## # weights: 17
## initial value 116.644767
## iter 10 value 107.473787
## iter 20 value 107.093203
## final value 107.089880
## converged
## # weights: 25
## initial value 116.020526
## iter 10 value 106.984047
## iter 20 value 106.744765
## iter 30 value 106.722639
## final value 106.720712
## converged
## # weights: 33
## initial value 125.307686
## iter 10 value 106.903887
## iter 20 value 106.649889
## iter 30 value 106.641888
## final value 106.641886
## converged
## # weights: 41
## initial value 128.957987
## iter 10 value 106.696916
## iter 20 value 106.494964
## iter 30 value 106.488961
## final value 106.488935
## converged
## # weights: 49
## initial value 124.678718
## iter 10 value 106.476544
## iter 20 value 106.366073
## final value 106.364792
## converged
## # weights: 57
## initial value 161.973589
## iter 10 value 106.926050
## iter 20 value 106.378081
## iter 30 value 106.311084
## final value 106.310399
## converged
## # weights: 65
## initial value 162.386447
## iter 10 value 106.465972
## iter 20 value 106.291128
## iter 30 value 106.282970
## final value 106.282952
## converged
## # weights: 73
## initial value 252.933391
## iter 10 value 106.356635
## iter 20 value 106.221826
## iter 30 value 106.218978
## final value 106.218970
## converged
## # weights: 81
## initial value 153.483098
## iter 10 value 106.432915
## iter 20 value 106.217900
## iter 30 value 106.192522
## final value 106.192287
## converged
## # weights: 9
## initial value 154.721513
## iter 10 value 110.912506
## iter 20 value 109.803266
## final value 109.800886
## converged
## # weights: 17
## initial value 125.746032
## iter 10 value 107.977574
## iter 20 value 107.751813
## iter 30 value 107.747521
## iter 40 value 107.725896
## final value 107.725829
## converged
## # weights: 25
## initial value 134.230379
## iter 10 value 107.660129
## iter 20 value 107.598444
## final value 107.597217
## converged
## # weights: 33
## initial value 133.818328
## iter 10 value 107.186051
## iter 20 value 107.066547
## iter 30 value 107.065797
## iter 30 value 107.065797
## iter 30 value 107.065797
## final value 107.065797
## converged
## # weights: 41
## initial value 120.960691
## iter 10 value 107.229818
## iter 20 value 106.970445
## iter 30 value 106.967900
## final value 106.967896
## converged
## # weights: 49
## initial value 166.725967
## iter 10 value 107.253089
## iter 20 value 106.955091
## iter 30 value 106.951031
## final value 106.951029
## converged
## # weights: 57
## initial value 136.199671
## iter 10 value 107.006513
## iter 20 value 106.832846
## iter 30 value 106.822537
## final value 106.822476
## converged
## # weights: 65
## initial value 166.471017
## iter 10 value 106.900967
## iter 20 value 106.753901
## iter 30 value 106.750081
## final value 106.750078
## converged
## # weights: 73
## initial value 147.140613
## iter 10 value 107.143360
## iter 20 value 106.732339
## iter 30 value 106.708725
## final value 106.708236
## converged
## # weights: 81
## initial value 121.371973
## iter 10 value 107.101109
## iter 20 value 106.721668
## iter 30 value 106.686972
## iter 40 value 106.684504
## final value 106.684502
## converged
## # weights: 9
## initial value 118.204162
## iter 10 value 110.307774
## iter 20 value 108.789224
## iter 30 value 108.787398
## iter 30 value 108.787397
## iter 30 value 108.787397
## final value 108.787397
## converged
## # weights: 17
## initial value 192.498073
## iter 10 value 109.076854
## iter 20 value 108.261076
## iter 30 value 108.240801
## final value 108.240794
## converged
## # weights: 25
## initial value 126.664285
## iter 10 value 108.139401
## iter 20 value 108.054011
## final value 108.053882
## converged
## # weights: 33
## initial value 171.669154
## iter 10 value 107.913532
## iter 20 value 107.677408
## iter 30 value 107.674490
## final value 107.674486
## converged
## # weights: 41
## initial value 190.258981
## iter 10 value 107.602296
## iter 20 value 107.455869
## iter 30 value 107.449201
## final value 107.449180
## converged
## # weights: 49
## initial value 128.929966
## iter 10 value 107.597911
## iter 20 value 107.331682
## final value 107.326536
## converged
## # weights: 57
## initial value 164.505311
## iter 10 value 107.499964
## iter 20 value 107.439219
## iter 30 value 107.437836
## final value 107.437824
## converged
## # weights: 65
## initial value 140.407960
## iter 10 value 107.305807
## iter 20 value 107.238892
## final value 107.238375
## converged
## # weights: 73
## initial value 132.171520
## iter 10 value 107.485145
## iter 20 value 107.223551
## iter 30 value 107.203085
## final value 107.203039
## converged
## # weights: 81
## initial value 126.484331
## iter 10 value 107.394005
## iter 20 value 107.162576
## iter 30 value 107.149760
## final value 107.149657
## converged
## # weights: 9
## initial value 119.563161
## iter 10 value 109.255460
## final value 109.195272
## converged
## # weights: 17
## initial value 117.953148
## iter 10 value 108.758649
## iter 20 value 108.703044
## final value 108.702978
## converged
## # weights: 25
## initial value 118.522031
## iter 10 value 108.226383
## iter 20 value 108.179821
## final value 108.179420
## converged
## # weights: 33
## initial value 141.948051
## iter 10 value 108.878605
## iter 20 value 108.765221
## iter 30 value 108.764866
## final value 108.764708
## converged
## # weights: 41
## initial value 165.883114
## iter 10 value 107.950589
## iter 20 value 107.891702
## final value 107.890740
## converged
## # weights: 49
## initial value 142.505693
## iter 10 value 107.962602
## iter 20 value 107.839648
## iter 30 value 107.772568
## iter 40 value 107.771822
## final value 107.771814
## converged
## # weights: 57
## initial value 133.471401
## iter 10 value 107.803200
## iter 20 value 107.742909
## final value 107.742428
## converged
## # weights: 65
## initial value 127.058789
## iter 10 value 107.743415
## iter 20 value 107.670765
## iter 30 value 107.664079
## final value 107.664034
## converged
## # weights: 73
## initial value 235.623175
## iter 10 value 107.973497
## iter 20 value 107.645211
## iter 30 value 107.619671
## final value 107.618808
## converged
## # weights: 81
## initial value 135.427257
## iter 10 value 107.849692
## iter 20 value 107.618652
## iter 30 value 107.595790
## final value 107.595245
## converged
## # weights: 9
## initial value 115.112367
## iter 10 value 99.879439
## iter 20 value 99.321777
## final value 99.321758
## converged
## # weights: 17
## initial value 125.897161
## iter 10 value 96.614921
## iter 20 value 93.183598
## iter 30 value 92.959186
## iter 40 value 92.948029
## final value 92.948020
## converged
## # weights: 25
## initial value 118.884547
## iter 10 value 96.542994
## iter 20 value 92.524431
## iter 30 value 92.254121
## iter 40 value 92.240137
## iter 40 value 92.240137
## iter 40 value 92.240137
## final value 92.240137
## converged
## # weights: 33
## initial value 117.656802
## iter 10 value 93.482311
## iter 20 value 88.847273
## iter 30 value 87.922833
## iter 40 value 86.904910
## iter 50 value 86.560601
## iter 60 value 86.554375
## iter 60 value 86.554375
## iter 60 value 86.554375
## final value 86.554375
## converged
## # weights: 41
## initial value 140.269712
## iter 10 value 97.731688
## iter 20 value 89.629620
## iter 30 value 88.276335
## iter 40 value 87.279249
## iter 50 value 87.189148
## iter 60 value 87.178895
## iter 70 value 87.132778
## iter 80 value 87.096968
## iter 90 value 87.094274
## iter 100 value 87.094155
## final value 87.094155
## stopped after 100 iterations
## # weights: 49
## initial value 120.692292
## iter 10 value 95.034030
## iter 20 value 89.169754
## iter 30 value 86.785432
## iter 40 value 85.872130
## iter 50 value 85.711542
## iter 60 value 85.605464
## iter 70 value 85.357276
## final value 85.355873
## converged
## # weights: 57
## initial value 115.259892
## iter 10 value 94.467984
## iter 20 value 89.827629
## iter 30 value 86.050055
## iter 40 value 83.897494
## iter 50 value 83.219798
## iter 60 value 83.063705
## iter 70 value 82.434079
## iter 80 value 82.249225
## iter 90 value 82.225955
## iter 100 value 82.225839
## final value 82.225839
## stopped after 100 iterations
## # weights: 65
## initial value 117.457616
## iter 10 value 93.901746
## iter 20 value 89.380579
## iter 30 value 87.182765
## iter 40 value 85.112588
## iter 50 value 84.106226
## iter 60 value 83.397074
## iter 70 value 83.152613
## iter 80 value 82.415728
## iter 90 value 81.693543
## iter 100 value 81.484897
## final value 81.484897
## stopped after 100 iterations
## # weights: 73
## initial value 117.124951
## iter 10 value 94.465979
## iter 20 value 88.566535
## iter 30 value 85.797970
## iter 40 value 83.045778
## iter 50 value 82.461470
## iter 60 value 82.276548
## iter 70 value 82.192210
## iter 80 value 82.180145
## iter 90 value 82.167907
## iter 100 value 82.133885
## final value 82.133885
## stopped after 100 iterations
## # weights: 81
## initial value 133.168506
## iter 10 value 94.499692
## iter 20 value 88.209552
## iter 30 value 86.249705
## iter 40 value 84.110465
## iter 50 value 82.960716
## iter 60 value 81.970884
## iter 70 value 80.994406
## iter 80 value 80.395188
## iter 90 value 79.694423
## iter 100 value 79.499615
## final value 79.499615
## stopped after 100 iterations
## # weights: 9
## initial value 119.999616
## iter 10 value 102.734866
## iter 20 value 101.303537
## iter 30 value 101.300856
## iter 30 value 101.300856
## iter 30 value 101.300856
## final value 101.300856
## converged
## # weights: 17
## initial value 113.204197
## iter 10 value 99.590268
## iter 20 value 96.718439
## iter 30 value 96.571620
## final value 96.570987
## converged
## # weights: 25
## initial value 151.935276
## iter 10 value 98.755295
## iter 20 value 96.629510
## iter 30 value 95.505454
## iter 40 value 95.348623
## final value 95.348269
## converged
## # weights: 33
## initial value 111.853554
## iter 10 value 96.383882
## iter 20 value 95.239852
## iter 30 value 95.048190
## iter 40 value 94.904547
## iter 50 value 94.827718
## final value 94.827632
## converged
## # weights: 41
## initial value 131.107478
## iter 10 value 97.291029
## iter 20 value 95.511976
## iter 30 value 94.436905
## iter 40 value 94.265098
## iter 50 value 94.256972
## final value 94.256926
## converged
## # weights: 49
## initial value 113.898562
## iter 10 value 97.787436
## iter 20 value 95.092626
## iter 30 value 94.742299
## iter 40 value 94.565476
## iter 50 value 94.464708
## iter 60 value 94.462164
## final value 94.462106
## converged
## # weights: 57
## initial value 135.012326
## iter 10 value 98.082608
## iter 20 value 95.327823
## iter 30 value 94.637437
## iter 40 value 94.479374
## iter 50 value 94.455657
## iter 60 value 94.452808
## iter 70 value 94.452276
## final value 94.452200
## converged
## # weights: 65
## initial value 160.503834
## iter 10 value 97.918728
## iter 20 value 95.510171
## iter 30 value 94.983768
## iter 40 value 94.938379
## iter 50 value 94.910234
## iter 60 value 94.901849
## final value 94.901539
## converged
## # weights: 73
## initial value 139.635038
## iter 10 value 98.217039
## iter 20 value 95.406149
## iter 30 value 94.623528
## iter 40 value 94.456252
## iter 50 value 94.411523
## iter 60 value 94.333261
## iter 70 value 94.009682
## iter 80 value 93.877075
## iter 90 value 93.871164
## iter 100 value 93.864522
## final value 93.864522
## stopped after 100 iterations
## # weights: 81
## initial value 113.163698
## iter 10 value 96.828718
## iter 20 value 95.359270
## iter 30 value 94.818854
## iter 40 value 94.550307
## iter 50 value 94.443367
## iter 60 value 94.436859
## iter 70 value 94.434224
## iter 80 value 94.432731
## final value 94.432714
## converged
## # weights: 9
## initial value 119.062311
## iter 10 value 103.773929
## iter 20 value 102.699504
## final value 102.699389
## converged
## # weights: 17
## initial value 176.090972
## iter 10 value 102.569113
## iter 20 value 99.746581
## iter 30 value 99.551824
## final value 99.551757
## converged
## # weights: 25
## initial value 133.309693
## iter 10 value 100.067240
## iter 20 value 98.619855
## iter 30 value 98.191037
## final value 98.190752
## converged
## # weights: 33
## initial value 132.629293
## iter 10 value 99.835029
## iter 20 value 98.605210
## iter 30 value 98.589517
## final value 98.589109
## converged
## # weights: 41
## initial value 114.115564
## iter 10 value 99.490631
## iter 20 value 98.796169
## iter 30 value 98.210814
## iter 40 value 98.138616
## iter 50 value 98.137885
## iter 60 value 98.136227
## iter 70 value 98.128338
## final value 98.127871
## converged
## # weights: 49
## initial value 116.789651
## iter 10 value 100.080421
## iter 20 value 98.967224
## iter 30 value 98.541595
## iter 40 value 98.466833
## iter 50 value 98.436857
## final value 98.436770
## converged
## # weights: 57
## initial value 121.346129
## iter 10 value 100.661451
## iter 20 value 98.970206
## iter 30 value 98.511286
## iter 40 value 98.469958
## iter 50 value 98.461351
## iter 60 value 98.422273
## iter 70 value 98.418110
## final value 98.417839
## converged
## # weights: 65
## initial value 115.436097
## iter 10 value 100.682412
## iter 20 value 98.435750
## iter 30 value 98.145096
## iter 40 value 98.122755
## iter 50 value 98.115869
## iter 60 value 98.114576
## final value 98.114555
## converged
## # weights: 73
## initial value 130.900936
## iter 10 value 100.231585
## iter 20 value 98.320217
## iter 30 value 98.122991
## iter 40 value 98.112146
## iter 50 value 98.110264
## iter 60 value 98.110134
## iter 60 value 98.110133
## iter 60 value 98.110133
## final value 98.110133
## converged
## # weights: 81
## initial value 125.822849
## iter 10 value 100.320868
## iter 20 value 98.794461
## iter 30 value 98.463299
## iter 40 value 98.419077
## iter 50 value 98.410636
## iter 60 value 98.401359
## iter 70 value 98.400569
## final value 98.400492
## converged
## # weights: 9
## initial value 131.947695
## iter 10 value 103.301693
## iter 20 value 103.154394
## final value 103.154382
## converged
## # weights: 17
## initial value 146.586766
## iter 10 value 101.208060
## iter 20 value 100.799795
## final value 100.799625
## converged
## # weights: 25
## initial value 126.630237
## iter 10 value 100.965740
## iter 20 value 100.458149
## iter 30 value 100.447597
## final value 100.447593
## converged
## # weights: 33
## initial value 129.743527
## iter 10 value 100.904123
## iter 20 value 100.403206
## iter 30 value 100.393020
## final value 100.392980
## converged
## # weights: 41
## initial value 121.668919
## iter 10 value 101.750384
## iter 20 value 100.847966
## iter 30 value 100.814381
## iter 40 value 100.813716
## final value 100.813688
## converged
## # weights: 49
## initial value 127.967004
## iter 10 value 101.725991
## iter 20 value 100.582275
## iter 30 value 100.441372
## iter 40 value 100.417452
## iter 50 value 100.413392
## iter 60 value 100.395474
## iter 70 value 100.380723
## iter 80 value 100.376702
## final value 100.376267
## converged
## # weights: 57
## initial value 125.968269
## iter 10 value 101.604510
## iter 20 value 100.483215
## iter 30 value 100.387615
## iter 40 value 100.375428
## final value 100.375265
## converged
## # weights: 65
## initial value 124.135099
## iter 10 value 101.587138
## iter 20 value 100.461303
## iter 30 value 100.390217
## iter 40 value 100.378309
## iter 50 value 100.376028
## iter 60 value 100.373442
## final value 100.373434
## converged
## # weights: 73
## initial value 175.435965
## iter 10 value 100.653090
## iter 20 value 100.395657
## iter 30 value 100.378462
## iter 40 value 100.373021
## iter 50 value 100.371608
## iter 60 value 100.371475
## iter 60 value 100.371474
## iter 60 value 100.371474
## final value 100.371474
## converged
## # weights: 81
## initial value 124.961310
## iter 10 value 101.859319
## iter 20 value 100.483729
## iter 30 value 100.393333
## iter 40 value 100.377186
## iter 50 value 100.372299
## iter 60 value 100.370914
## iter 70 value 100.370714
## final value 100.370679
## converged
## # weights: 9
## initial value 137.950356
## iter 10 value 105.434685
## iter 20 value 104.954739
## iter 20 value 104.954738
## final value 104.954738
## converged
## # weights: 17
## initial value 150.455611
## iter 10 value 103.574020
## iter 20 value 102.246628
## iter 30 value 102.218489
## final value 102.218462
## converged
## # weights: 25
## initial value 130.780775
## iter 10 value 102.750957
## iter 20 value 101.982835
## iter 30 value 101.953856
## final value 101.953833
## converged
## # weights: 33
## initial value 113.586170
## iter 10 value 102.285066
## iter 20 value 101.893323
## iter 30 value 101.889643
## iter 30 value 101.889643
## iter 30 value 101.889643
## final value 101.889643
## converged
## # weights: 41
## initial value 130.472471
## iter 10 value 102.767542
## iter 20 value 102.085503
## iter 30 value 101.891751
## iter 40 value 101.861671
## iter 50 value 101.860912
## final value 101.860909
## converged
## # weights: 49
## initial value 122.082090
## iter 10 value 102.367858
## iter 20 value 101.861466
## iter 30 value 101.858384
## iter 40 value 101.857934
## final value 101.857926
## converged
## # weights: 57
## initial value 127.644524
## iter 10 value 102.324723
## iter 20 value 101.875008
## iter 30 value 101.845201
## iter 40 value 101.833807
## iter 50 value 101.833394
## final value 101.833390
## converged
## # weights: 65
## initial value 123.611422
## iter 10 value 102.555006
## iter 20 value 101.851545
## iter 30 value 101.823451
## iter 40 value 101.821725
## iter 50 value 101.821623
## iter 50 value 101.821622
## iter 50 value 101.821622
## final value 101.821622
## converged
## # weights: 73
## initial value 151.319251
## iter 10 value 102.812137
## iter 20 value 101.864871
## iter 30 value 101.828646
## iter 40 value 101.819941
## iter 50 value 101.816739
## iter 60 value 101.816517
## final value 101.816509
## converged
## # weights: 81
## initial value 146.270067
## iter 10 value 103.071191
## iter 20 value 101.899987
## iter 30 value 101.828809
## iter 40 value 101.818488
## iter 50 value 101.816851
## iter 60 value 101.816460
## final value 101.816440
## converged
## # weights: 9
## initial value 122.458419
## iter 10 value 105.011897
## iter 20 value 104.742789
## iter 20 value 104.742789
## final value 104.742789
## converged
## # weights: 17
## initial value 116.785003
## iter 10 value 103.807945
## iter 20 value 103.755778
## final value 103.755675
## converged
## # weights: 25
## initial value 117.831146
## iter 10 value 104.092486
## iter 20 value 103.159388
## iter 30 value 103.147609
## final value 103.147599
## converged
## # weights: 33
## initial value 136.387990
## iter 10 value 104.003791
## iter 20 value 103.107572
## iter 30 value 103.024482
## iter 40 value 103.022656
## final value 103.022652
## converged
## # weights: 41
## initial value 123.301147
## iter 10 value 103.526330
## iter 20 value 103.089770
## iter 30 value 102.972872
## iter 40 value 102.972543
## final value 102.972534
## converged
## # weights: 49
## initial value 120.839123
## iter 10 value 103.388914
## iter 20 value 103.104623
## iter 30 value 102.965822
## iter 40 value 102.964968
## final value 102.964957
## converged
## # weights: 57
## initial value 146.837654
## iter 10 value 103.097456
## iter 20 value 102.937880
## iter 30 value 102.928630
## final value 102.928481
## converged
## # weights: 65
## initial value 146.527026
## iter 10 value 103.531040
## iter 20 value 102.953501
## iter 30 value 102.930807
## iter 40 value 102.916736
## iter 50 value 102.916212
## final value 102.916208
## converged
## # weights: 73
## initial value 123.657090
## iter 10 value 104.728326
## iter 20 value 103.018986
## iter 30 value 102.919011
## iter 40 value 102.912389
## iter 50 value 102.911677
## final value 102.911650
## converged
## # weights: 81
## initial value 185.122643
## iter 10 value 103.428191
## iter 20 value 102.958123
## iter 30 value 102.930840
## iter 40 value 102.901287
## iter 50 value 102.898346
## final value 102.898276
## converged
## # weights: 9
## initial value 124.942147
## iter 10 value 106.523336
## iter 20 value 105.423492
## final value 105.423482
## converged
## # weights: 17
## initial value 135.112423
## iter 10 value 104.764409
## iter 20 value 104.345695
## final value 104.344893
## converged
## # weights: 25
## initial value 122.164979
## iter 10 value 104.164503
## iter 20 value 104.020207
## final value 104.019011
## converged
## # weights: 33
## initial value 184.646057
## iter 10 value 104.113630
## iter 20 value 103.945314
## iter 30 value 103.943963
## iter 30 value 103.943963
## iter 30 value 103.943963
## final value 103.943963
## converged
## # weights: 41
## initial value 143.501372
## iter 10 value 104.098635
## iter 20 value 103.861494
## iter 30 value 103.858752
## final value 103.858749
## converged
## # weights: 49
## initial value 128.428041
## iter 10 value 103.970994
## iter 20 value 103.848039
## iter 30 value 103.831559
## final value 103.831550
## converged
## # weights: 57
## initial value 183.089936
## iter 10 value 104.159266
## iter 20 value 103.938968
## iter 30 value 103.822718
## iter 40 value 103.790371
## final value 103.790332
## converged
## # weights: 65
## initial value 122.434319
## iter 10 value 104.294278
## iter 20 value 103.791215
## iter 30 value 103.786516
## final value 103.786410
## converged
## # weights: 73
## initial value 123.278133
## iter 10 value 103.877949
## iter 20 value 103.778550
## iter 30 value 103.767463
## final value 103.767432
## converged
## # weights: 81
## initial value 122.519370
## iter 10 value 103.937854
## iter 20 value 103.755301
## iter 30 value 103.735101
## final value 103.735046
## converged
## # weights: 9
## initial value 116.199664
## iter 10 value 106.092390
## final value 106.044637
## converged
## # weights: 17
## initial value 165.555646
## iter 10 value 106.020702
## iter 20 value 105.411988
## final value 105.409391
## converged
## # weights: 25
## initial value 126.805599
## iter 10 value 104.854081
## iter 20 value 104.785471
## final value 104.784793
## converged
## # weights: 33
## initial value 126.067021
## iter 10 value 104.998817
## iter 20 value 104.706429
## iter 30 value 104.704698
## final value 104.704689
## converged
## # weights: 41
## initial value 137.141907
## iter 10 value 104.888386
## iter 20 value 104.602573
## iter 30 value 104.563961
## iter 40 value 104.561131
## iter 50 value 104.559217
## final value 104.559122
## converged
## # weights: 49
## initial value 188.978546
## iter 10 value 104.638855
## iter 20 value 104.540862
## iter 30 value 104.537668
## final value 104.537660
## converged
## # weights: 57
## initial value 117.916943
## iter 10 value 104.597868
## iter 20 value 104.429120
## iter 30 value 104.421754
## final value 104.421699
## converged
## # weights: 65
## initial value 167.991243
## iter 10 value 104.549744
## iter 20 value 104.359432
## iter 30 value 104.357656
## iter 30 value 104.357656
## iter 30 value 104.357656
## final value 104.357656
## converged
## # weights: 73
## initial value 123.286607
## iter 10 value 104.499130
## iter 20 value 104.346754
## iter 30 value 104.338955
## final value 104.338905
## converged
## # weights: 81
## initial value 123.759712
## iter 10 value 104.347107
## iter 20 value 104.302398
## final value 104.301784
## converged
## # weights: 9
## initial value 136.099675
## iter 10 value 106.891345
## iter 20 value 106.613699
## iter 20 value 106.613698
## final value 106.613698
## converged
## # weights: 17
## initial value 118.879916
## iter 10 value 105.890017
## iter 20 value 105.854903
## final value 105.854771
## converged
## # weights: 25
## initial value 115.823391
## iter 10 value 105.934134
## iter 20 value 105.831121
## final value 105.830505
## converged
## # weights: 33
## initial value 131.040970
## iter 10 value 105.588077
## iter 20 value 105.236553
## iter 30 value 105.226509
## final value 105.226476
## converged
## # weights: 41
## initial value 147.428365
## iter 10 value 105.436740
## iter 20 value 105.121391
## iter 30 value 105.116620
## final value 105.116581
## converged
## # weights: 49
## initial value 154.459836
## iter 10 value 105.185089
## iter 20 value 105.013144
## iter 30 value 105.010897
## final value 105.010891
## converged
## # weights: 57
## initial value 157.343681
## iter 10 value 105.236505
## iter 20 value 105.000198
## iter 30 value 104.972727
## iter 40 value 104.972153
## final value 104.972151
## converged
## # weights: 65
## initial value 119.130248
## iter 10 value 105.325014
## iter 20 value 104.925763
## iter 30 value 104.903172
## final value 104.902822
## converged
## # weights: 73
## initial value 179.501139
## iter 10 value 105.060246
## iter 20 value 104.889782
## iter 30 value 104.864006
## final value 104.863847
## converged
## # weights: 81
## initial value 129.654126
## iter 10 value 104.920807
## iter 20 value 104.837413
## final value 104.835857
## converged
## # weights: 9
## initial value 151.008830
## iter 10 value 109.953105
## iter 20 value 107.137289
## final value 107.136652
## converged
## # weights: 17
## initial value 125.083228
## iter 10 value 106.689762
## iter 20 value 106.550233
## final value 106.549870
## converged
## # weights: 25
## initial value 183.498558
## iter 10 value 106.350159
## iter 20 value 105.955290
## iter 30 value 105.948004
## final value 105.948000
## converged
## # weights: 33
## initial value 123.581941
## iter 10 value 106.067715
## iter 20 value 105.902468
## iter 30 value 105.880048
## final value 105.880034
## converged
## # weights: 41
## initial value 124.330682
## iter 10 value 105.797776
## iter 20 value 105.649966
## iter 30 value 105.646521
## final value 105.646517
## converged
## # weights: 49
## initial value 152.798066
## iter 10 value 105.732547
## iter 20 value 105.655511
## iter 30 value 105.654202
## final value 105.654197
## converged
## # weights: 57
## initial value 174.627973
## iter 10 value 105.885727
## iter 20 value 105.487912
## iter 30 value 105.478650
## final value 105.478581
## converged
## # weights: 65
## initial value 167.935205
## iter 10 value 105.574733
## iter 20 value 105.493235
## iter 30 value 105.491620
## final value 105.491612
## converged
## # weights: 73
## initial value 127.692552
## iter 10 value 105.677984
## iter 20 value 105.433602
## iter 30 value 105.406151
## final value 105.405661
## converged
## # weights: 81
## initial value 212.414110
## iter 10 value 105.888845
## iter 20 value 105.362751
## iter 30 value 105.353707
## final value 105.353649
## converged
## # weights: 9
## initial value 131.564153
## iter 10 value 103.042118
## iter 20 value 101.094148
## iter 30 value 100.682218
## final value 100.681248
## converged
## # weights: 17
## initial value 118.273269
## iter 10 value 102.283403
## iter 20 value 97.647251
## iter 30 value 96.634704
## final value 96.634181
## converged
## # weights: 25
## initial value 113.959412
## iter 10 value 98.466896
## iter 20 value 95.285702
## iter 30 value 92.488589
## iter 40 value 91.987581
## final value 91.980451
## converged
## # weights: 33
## initial value 178.711091
## iter 10 value 97.508829
## iter 20 value 92.866738
## iter 30 value 92.158813
## iter 40 value 91.762695
## iter 50 value 91.623345
## iter 60 value 91.422920
## iter 70 value 91.381185
## final value 91.381113
## converged
## # weights: 41
## initial value 116.945707
## iter 10 value 98.708514
## iter 20 value 95.951087
## iter 30 value 93.345097
## iter 40 value 92.252321
## iter 50 value 91.449007
## iter 60 value 89.435569
## iter 70 value 88.700491
## iter 80 value 88.270854
## iter 90 value 87.857797
## iter 100 value 87.836147
## final value 87.836147
## stopped after 100 iterations
## # weights: 49
## initial value 116.291336
## iter 10 value 98.762185
## iter 20 value 92.125410
## iter 30 value 88.385669
## iter 40 value 86.861665
## iter 50 value 86.627495
## iter 60 value 86.576541
## iter 70 value 86.576260
## iter 70 value 86.576260
## iter 70 value 86.576260
## final value 86.576260
## converged
## # weights: 57
## initial value 129.317707
## iter 10 value 98.261607
## iter 20 value 91.901661
## iter 30 value 88.853407
## iter 40 value 86.732717
## iter 50 value 85.597122
## iter 60 value 85.238430
## iter 70 value 85.150242
## iter 80 value 85.141146
## iter 90 value 85.140992
## iter 90 value 85.140992
## iter 90 value 85.140992
## final value 85.140992
## converged
## # weights: 65
## initial value 142.605339
## iter 10 value 97.629358
## iter 20 value 90.134217
## iter 30 value 85.821519
## iter 40 value 82.943440
## iter 50 value 82.343022
## iter 60 value 82.274403
## iter 70 value 82.260374
## iter 80 value 82.236631
## iter 90 value 82.134614
## iter 100 value 82.113550
## final value 82.113550
## stopped after 100 iterations
## # weights: 73
## initial value 119.654082
## iter 10 value 97.260461
## iter 20 value 89.644512
## iter 30 value 84.886930
## iter 40 value 82.651169
## iter 50 value 82.036888
## iter 60 value 81.229702
## iter 70 value 80.745560
## iter 80 value 80.503164
## iter 90 value 80.450989
## iter 100 value 80.443804
## final value 80.443804
## stopped after 100 iterations
## # weights: 81
## initial value 111.729310
## iter 10 value 95.683579
## iter 20 value 90.335640
## iter 30 value 88.340828
## iter 40 value 85.342408
## iter 50 value 83.921405
## iter 60 value 83.093348
## iter 70 value 82.551098
## iter 80 value 81.304625
## iter 90 value 80.979374
## iter 100 value 80.886599
## final value 80.886599
## stopped after 100 iterations
## # weights: 9
## initial value 116.206702
## iter 10 value 103.004167
## iter 20 value 102.638891
## iter 20 value 102.638891
## iter 20 value 102.638891
## final value 102.638891
## converged
## # weights: 17
## initial value 119.134454
## iter 10 value 101.507648
## iter 20 value 100.039251
## iter 30 value 99.957475
## final value 99.956011
## converged
## # weights: 25
## initial value 121.614346
## iter 10 value 101.594053
## iter 20 value 99.230425
## iter 30 value 98.627156
## iter 40 value 98.619679
## final value 98.619672
## converged
## # weights: 33
## initial value 142.910622
## iter 10 value 100.765355
## iter 20 value 98.548340
## iter 30 value 97.913587
## iter 40 value 97.794184
## final value 97.791831
## converged
## # weights: 41
## initial value 165.873286
## iter 10 value 99.915364
## iter 20 value 98.027638
## iter 30 value 97.685172
## iter 40 value 97.598896
## iter 50 value 97.589339
## final value 97.589270
## converged
## # weights: 49
## initial value 132.448471
## iter 10 value 100.613726
## iter 20 value 98.856609
## iter 30 value 98.010583
## iter 40 value 97.843349
## iter 50 value 97.835709
## final value 97.835344
## converged
## # weights: 57
## initial value 118.104921
## iter 10 value 101.609155
## iter 20 value 98.188702
## iter 30 value 97.697467
## iter 40 value 97.515094
## iter 50 value 97.457507
## iter 60 value 97.428035
## iter 70 value 97.395432
## final value 97.394292
## converged
## # weights: 65
## initial value 129.493352
## iter 10 value 100.611062
## iter 20 value 98.771236
## iter 30 value 98.118161
## iter 40 value 97.884983
## iter 50 value 97.857910
## iter 60 value 97.856713
## iter 70 value 97.856638
## final value 97.856636
## converged
## # weights: 73
## initial value 111.617108
## iter 10 value 100.057656
## iter 20 value 98.065820
## iter 30 value 97.515210
## iter 40 value 97.398132
## iter 50 value 97.259909
## iter 60 value 97.242615
## iter 70 value 97.242311
## final value 97.242307
## converged
## # weights: 81
## initial value 174.030582
## iter 10 value 101.002403
## iter 20 value 98.525938
## iter 30 value 98.043041
## iter 40 value 97.880952
## iter 50 value 97.291473
## iter 60 value 96.634548
## iter 70 value 96.589182
## iter 80 value 96.578346
## iter 90 value 96.576272
## iter 100 value 96.576029
## final value 96.576029
## stopped after 100 iterations
## # weights: 9
## initial value 127.620653
## iter 10 value 104.154946
## iter 20 value 103.914796
## iter 20 value 103.914796
## final value 103.914796
## converged
## # weights: 17
## initial value 123.753465
## iter 10 value 102.023809
## iter 20 value 101.907234
## final value 101.907163
## converged
## # weights: 25
## initial value 168.845375
## iter 10 value 102.329337
## iter 20 value 102.113156
## iter 30 value 101.749304
## iter 40 value 101.453904
## iter 50 value 101.146871
## iter 60 value 101.141591
## iter 60 value 101.141590
## iter 60 value 101.141590
## final value 101.141590
## converged
## # weights: 33
## initial value 127.641431
## iter 10 value 102.227946
## iter 20 value 101.197412
## iter 30 value 101.141755
## final value 101.141564
## converged
## # weights: 41
## initial value 155.836864
## iter 10 value 102.444416
## iter 20 value 101.225774
## iter 30 value 101.055047
## iter 40 value 101.047367
## iter 50 value 101.046055
## final value 101.046053
## converged
## # weights: 49
## initial value 118.085898
## iter 10 value 103.036650
## iter 20 value 101.402425
## iter 30 value 101.178174
## iter 40 value 101.142742
## iter 50 value 101.141576
## final value 101.141568
## converged
## # weights: 57
## initial value 119.955240
## iter 10 value 102.944040
## iter 20 value 101.688078
## iter 30 value 101.420124
## iter 40 value 101.406036
## iter 50 value 101.405375
## iter 60 value 101.405235
## final value 101.405228
## converged
## # weights: 65
## initial value 139.125037
## iter 10 value 102.992997
## iter 20 value 101.843172
## iter 30 value 101.336645
## iter 40 value 101.173389
## iter 50 value 101.141936
## iter 60 value 101.141581
## final value 101.141564
## converged
## # weights: 73
## initial value 122.645498
## iter 10 value 102.382303
## iter 20 value 101.168862
## iter 30 value 101.044542
## iter 40 value 101.042241
## iter 50 value 101.041970
## iter 60 value 101.041732
## final value 101.041729
## converged
## # weights: 81
## initial value 189.013768
## iter 10 value 102.978414
## iter 20 value 101.245699
## iter 30 value 101.145614
## iter 40 value 101.141592
## final value 101.141557
## converged
## # weights: 9
## initial value 115.757013
## iter 10 value 105.564757
## final value 105.453583
## converged
## # weights: 17
## initial value 125.616072
## iter 10 value 104.413676
## iter 20 value 103.257446
## iter 30 value 103.212101
## iter 30 value 103.212100
## iter 30 value 103.212100
## final value 103.212100
## converged
## # weights: 25
## initial value 118.414307
## iter 10 value 104.140095
## iter 20 value 103.233048
## iter 30 value 103.135193
## iter 40 value 102.991408
## iter 50 value 102.990485
## iter 50 value 102.990484
## iter 50 value 102.990484
## final value 102.990484
## converged
## # weights: 33
## initial value 159.837785
## iter 10 value 103.473091
## iter 20 value 102.995057
## iter 30 value 102.975986
## final value 102.975751
## converged
## # weights: 41
## initial value 122.696734
## iter 10 value 103.760741
## iter 20 value 103.164363
## iter 30 value 103.049141
## iter 40 value 102.986451
## iter 50 value 102.973507
## final value 102.973181
## converged
## # weights: 49
## initial value 141.956191
## iter 10 value 103.371669
## iter 20 value 103.119250
## iter 30 value 103.019665
## iter 40 value 102.975444
## iter 50 value 102.972824
## final value 102.972822
## converged
## # weights: 57
## initial value 134.607220
## iter 10 value 103.487198
## iter 20 value 103.009017
## iter 30 value 102.975711
## iter 40 value 102.973542
## final value 102.973512
## converged
## # weights: 65
## initial value 152.783637
## iter 10 value 103.619569
## iter 20 value 103.180257
## iter 30 value 103.067723
## iter 40 value 103.051761
## iter 50 value 103.050730
## final value 103.050608
## converged
## # weights: 73
## initial value 121.535561
## iter 10 value 104.028446
## iter 20 value 103.259897
## iter 30 value 103.110640
## iter 40 value 102.981628
## iter 50 value 102.972476
## final value 102.972314
## converged
## # weights: 81
## initial value 142.464341
## iter 10 value 103.338847
## iter 20 value 103.114574
## iter 30 value 103.053620
## iter 40 value 103.048025
## iter 50 value 103.047088
## final value 103.047056
## converged
## # weights: 9
## initial value 124.878028
## iter 10 value 105.847596
## final value 105.772634
## converged
## # weights: 17
## initial value 121.203179
## iter 10 value 105.789176
## iter 20 value 104.819974
## final value 104.798516
## converged
## # weights: 25
## initial value 116.633684
## iter 10 value 105.300509
## iter 20 value 104.887562
## iter 30 value 104.883915
## iter 40 value 104.869640
## iter 50 value 104.425292
## iter 60 value 104.261362
## iter 70 value 104.246088
## final value 104.246078
## converged
## # weights: 33
## initial value 126.897174
## iter 10 value 104.933413
## iter 20 value 104.398825
## iter 30 value 104.243107
## iter 40 value 104.238140
## final value 104.237969
## converged
## # weights: 41
## initial value 155.732609
## iter 10 value 104.538609
## iter 20 value 104.231591
## iter 30 value 104.215977
## final value 104.215932
## converged
## # weights: 49
## initial value 149.257381
## iter 10 value 104.712501
## iter 20 value 104.217268
## iter 30 value 104.211684
## iter 40 value 104.209734
## final value 104.209622
## converged
## # weights: 57
## initial value 121.791111
## iter 10 value 104.484653
## iter 20 value 104.209623
## iter 30 value 104.202553
## iter 40 value 104.202256
## iter 40 value 104.202255
## iter 40 value 104.202255
## final value 104.202255
## converged
## # weights: 65
## initial value 122.279669
## iter 10 value 104.628076
## iter 20 value 104.289697
## iter 30 value 104.212055
## iter 40 value 104.206798
## iter 50 value 104.196897
## iter 60 value 104.196306
## final value 104.196303
## converged
## # weights: 73
## initial value 146.906850
## iter 10 value 104.570544
## iter 20 value 104.214890
## iter 30 value 104.204590
## iter 40 value 104.195059
## iter 50 value 104.194303
## final value 104.194257
## converged
## # weights: 81
## initial value 135.571196
## iter 10 value 104.412963
## iter 20 value 104.207264
## iter 30 value 104.194125
## iter 40 value 104.193403
## final value 104.193354
## converged
## # weights: 9
## initial value 116.228997
## iter 10 value 106.869531
## iter 20 value 106.502706
## final value 106.502696
## converged
## # weights: 17
## initial value 119.461084
## iter 10 value 107.550062
## iter 20 value 107.319795
## final value 107.319592
## converged
## # weights: 25
## initial value 122.121125
## iter 10 value 106.058281
## iter 20 value 105.219339
## iter 30 value 105.181208
## final value 105.181148
## converged
## # weights: 33
## initial value 116.838498
## iter 10 value 105.311796
## iter 20 value 105.121782
## iter 30 value 105.115965
## iter 40 value 105.115708
## iter 40 value 105.115707
## iter 40 value 105.115707
## final value 105.115707
## converged
## # weights: 41
## initial value 136.460186
## iter 10 value 105.248245
## iter 20 value 105.073351
## iter 30 value 105.070444
## final value 105.070388
## converged
## # weights: 49
## initial value 178.283313
## iter 10 value 105.183647
## iter 20 value 105.049508
## iter 30 value 105.045204
## final value 105.045194
## converged
## # weights: 57
## initial value 227.528264
## iter 10 value 105.195524
## iter 20 value 105.020101
## iter 30 value 105.009416
## final value 105.009101
## converged
## # weights: 65
## initial value 158.929671
## iter 10 value 105.240153
## iter 20 value 104.968632
## iter 30 value 104.963589
## final value 104.963562
## converged
## # weights: 73
## initial value 124.658794
## iter 10 value 105.114058
## iter 20 value 104.967863
## iter 30 value 104.937566
## iter 40 value 104.928950
## final value 104.928914
## converged
## # weights: 81
## initial value 163.705420
## iter 10 value 105.061416
## iter 20 value 104.916693
## iter 30 value 104.897510
## final value 104.897380
## converged
## # weights: 9
## initial value 118.091096
## iter 10 value 108.717202
## final value 108.278892
## converged
## # weights: 17
## initial value 168.242173
## iter 10 value 106.875038
## iter 20 value 106.533724
## final value 106.525280
## converged
## # weights: 25
## initial value 152.454600
## iter 10 value 106.322413
## iter 20 value 105.950329
## iter 30 value 105.936602
## final value 105.936588
## converged
## # weights: 33
## initial value 140.720721
## iter 10 value 106.138991
## iter 20 value 105.856367
## iter 30 value 105.851140
## final value 105.851126
## converged
## # weights: 41
## initial value 117.087214
## iter 10 value 105.898312
## iter 20 value 105.719814
## iter 30 value 105.714874
## final value 105.714853
## converged
## # weights: 49
## initial value 140.000413
## iter 10 value 105.938636
## iter 20 value 105.740565
## iter 30 value 105.737611
## final value 105.737601
## converged
## # weights: 57
## initial value 151.035860
## iter 10 value 105.830021
## iter 20 value 105.578024
## iter 30 value 105.566661
## final value 105.566553
## converged
## # weights: 65
## initial value 165.350215
## iter 10 value 105.745103
## iter 20 value 105.509693
## iter 30 value 105.503752
## final value 105.503701
## converged
## # weights: 73
## initial value 127.284690
## iter 10 value 105.615693
## iter 20 value 105.483330
## iter 30 value 105.480098
## final value 105.480091
## converged
## # weights: 81
## initial value 137.102558
## iter 10 value 105.901250
## iter 20 value 105.489536
## iter 30 value 105.473123
## iter 40 value 105.472957
## final value 105.472954
## converged
## # weights: 9
## initial value 122.860343
## iter 10 value 107.847285
## final value 107.730230
## converged
## # weights: 17
## initial value 119.098538
## iter 10 value 107.359690
## iter 20 value 107.168354
## final value 107.168058
## converged
## # weights: 25
## initial value 134.130033
## iter 10 value 106.640765
## iter 20 value 106.550351
## iter 30 value 106.549747
## iter 30 value 106.549747
## iter 30 value 106.549747
## final value 106.549747
## converged
## # weights: 33
## initial value 134.543924
## iter 10 value 106.692088
## iter 20 value 106.459502
## iter 30 value 106.454425
## final value 106.454201
## converged
## # weights: 41
## initial value 124.524640
## iter 10 value 107.731838
## iter 20 value 106.288222
## iter 30 value 106.248429
## iter 40 value 106.245475
## final value 106.245464
## converged
## # weights: 49
## initial value 123.615978
## iter 10 value 106.443355
## iter 20 value 106.271858
## iter 30 value 106.270090
## iter 30 value 106.270089
## iter 30 value 106.270089
## final value 106.270089
## converged
## # weights: 57
## initial value 125.419424
## iter 10 value 106.369496
## iter 20 value 106.192866
## iter 30 value 106.093928
## iter 40 value 106.092045
## iter 40 value 106.092044
## iter 40 value 106.092044
## final value 106.092044
## converged
## # weights: 65
## initial value 122.441967
## iter 10 value 106.267214
## iter 20 value 106.030082
## iter 30 value 106.020828
## final value 106.020797
## converged
## # weights: 73
## initial value 128.640466
## iter 10 value 106.230454
## iter 20 value 106.010322
## iter 30 value 105.997752
## final value 105.997611
## converged
## # weights: 81
## initial value 128.215718
## iter 10 value 106.417653
## iter 20 value 106.045854
## iter 30 value 105.972054
## iter 40 value 105.946728
## final value 105.946566
## converged
## # weights: 9
## initial value 125.466087
## iter 10 value 108.907903
## iter 20 value 108.257080
## final value 108.256877
## converged
## # weights: 17
## initial value 116.407838
## iter 10 value 107.775512
## iter 20 value 107.697484
## final value 107.696641
## converged
## # weights: 25
## initial value 116.847985
## iter 10 value 107.097204
## iter 20 value 107.067135
## final value 107.067034
## converged
## # weights: 33
## initial value 121.095547
## iter 10 value 107.044027
## iter 20 value 106.978594
## final value 106.977783
## converged
## # weights: 41
## initial value 123.298808
## iter 10 value 106.894831
## iter 20 value 106.796694
## iter 30 value 106.795384
## final value 106.795381
## converged
## # weights: 49
## initial value 154.055401
## iter 10 value 106.887023
## iter 20 value 106.641947
## iter 30 value 106.637848
## final value 106.637840
## converged
## # weights: 57
## initial value 126.257987
## iter 10 value 106.620895
## iter 20 value 106.579930
## final value 106.579593
## converged
## # weights: 65
## initial value 140.135957
## iter 10 value 106.636083
## iter 20 value 106.519320
## final value 106.515725
## converged
## # weights: 73
## initial value 137.625862
## iter 10 value 106.772437
## iter 20 value 106.491161
## iter 30 value 106.472029
## final value 106.471699
## converged
## # weights: 81
## initial value 139.151507
## iter 10 value 106.806509
## iter 20 value 106.499276
## iter 30 value 106.488258
## final value 106.488112
## converged
## # weights: 9
## initial value 122.471638
## iter 10 value 110.535845
## final value 110.356409
## converged
## # weights: 17
## initial value 119.878795
## iter 10 value 108.217027
## iter 20 value 108.163776
## final value 108.163746
## converged
## # weights: 25
## initial value 119.025997
## iter 10 value 107.925598
## iter 20 value 107.575319
## iter 30 value 107.555146
## final value 107.555133
## converged
## # weights: 33
## initial value 154.175299
## iter 10 value 107.599930
## iter 20 value 107.354884
## iter 30 value 107.353732
## final value 107.353727
## converged
## # weights: 41
## initial value 136.961755
## iter 10 value 107.406712
## iter 20 value 107.236415
## iter 30 value 107.233502
## final value 107.233494
## converged
## # weights: 49
## initial value 126.746039
## iter 10 value 107.227259
## iter 20 value 107.114526
## iter 30 value 107.113561
## final value 107.113554
## converged
## # weights: 57
## initial value 212.978340
## iter 10 value 107.803179
## iter 20 value 107.153467
## iter 30 value 107.071880
## iter 40 value 107.069647
## final value 107.069633
## converged
## # weights: 65
## initial value 172.060125
## iter 10 value 107.283379
## iter 20 value 106.998903
## iter 30 value 106.989361
## final value 106.989158
## converged
## # weights: 73
## initial value 134.233333
## iter 10 value 107.153787
## iter 20 value 106.966898
## iter 30 value 106.942768
## final value 106.942700
## converged
## # weights: 81
## initial value 126.624260
## iter 10 value 107.013693
## iter 20 value 106.917220
## iter 30 value 106.916673
## final value 106.916669
## converged
## # weights: 9
## initial value 160.430318
## iter 10 value 99.700258
## iter 20 value 99.101580
## final value 98.763704
## converged
## # weights: 17
## initial value 174.398810
## iter 10 value 96.237631
## iter 20 value 92.457197
## iter 30 value 91.591891
## iter 40 value 91.502100
## final value 91.501948
## converged
## # weights: 25
## initial value 126.044093
## iter 10 value 94.877290
## iter 20 value 90.967295
## iter 30 value 86.460236
## iter 40 value 86.078578
## iter 50 value 86.070166
## final value 86.070162
## converged
## # weights: 33
## initial value 121.048493
## iter 10 value 94.104174
## iter 20 value 87.609514
## iter 30 value 86.267372
## iter 40 value 85.358235
## iter 50 value 83.687837
## iter 60 value 83.129726
## final value 83.129078
## converged
## # weights: 41
## initial value 166.558860
## iter 10 value 93.206764
## iter 20 value 88.624330
## iter 30 value 85.045791
## iter 40 value 83.257810
## iter 50 value 83.115741
## iter 60 value 82.983122
## iter 70 value 82.218775
## iter 80 value 81.908040
## iter 90 value 81.714558
## iter 100 value 81.706810
## final value 81.706810
## stopped after 100 iterations
## # weights: 49
## initial value 129.251428
## iter 10 value 93.092483
## iter 20 value 84.363813
## iter 30 value 82.079597
## iter 40 value 81.014552
## iter 50 value 80.931932
## iter 60 value 80.927277
## iter 70 value 80.927143
## final value 80.927139
## converged
## # weights: 57
## initial value 117.111580
## iter 10 value 93.745361
## iter 20 value 88.202099
## iter 30 value 86.513338
## iter 40 value 84.938591
## iter 50 value 83.724329
## iter 60 value 82.795543
## iter 70 value 81.958149
## iter 80 value 81.572001
## iter 90 value 81.502459
## iter 100 value 81.239003
## final value 81.239003
## stopped after 100 iterations
## # weights: 65
## initial value 171.787648
## iter 10 value 94.954501
## iter 20 value 88.445656
## iter 30 value 83.418785
## iter 40 value 82.466860
## iter 50 value 81.305359
## iter 60 value 80.710718
## iter 70 value 80.473004
## iter 80 value 80.415375
## iter 90 value 80.393533
## iter 100 value 80.228534
## final value 80.228534
## stopped after 100 iterations
## # weights: 73
## initial value 117.537957
## iter 10 value 93.549898
## iter 20 value 83.265509
## iter 30 value 81.144566
## iter 40 value 80.882927
## iter 50 value 80.490988
## iter 60 value 80.074715
## iter 70 value 79.760573
## iter 80 value 78.468868
## iter 90 value 77.454614
## iter 100 value 77.101658
## final value 77.101658
## stopped after 100 iterations
## # weights: 81
## initial value 143.764262
## iter 10 value 93.198773
## iter 20 value 83.917819
## iter 30 value 80.878608
## iter 40 value 80.105423
## iter 50 value 79.833691
## iter 60 value 79.530865
## iter 70 value 78.873921
## iter 80 value 76.645873
## iter 90 value 76.015908
## iter 100 value 75.525083
## final value 75.525083
## stopped after 100 iterations
## # weights: 9
## initial value 115.298133
## iter 10 value 102.494190
## iter 20 value 101.181691
## iter 30 value 101.122641
## final value 101.109637
## converged
## # weights: 17
## initial value 121.640876
## iter 10 value 101.215704
## iter 20 value 98.290384
## iter 30 value 96.850272
## iter 40 value 96.381501
## final value 96.377883
## converged
## # weights: 25
## initial value 113.833048
## iter 10 value 97.636826
## iter 20 value 95.772853
## iter 30 value 93.255809
## iter 40 value 92.790800
## iter 50 value 92.785382
## iter 50 value 92.785381
## iter 50 value 92.785381
## final value 92.785381
## converged
## # weights: 33
## initial value 166.153313
## iter 10 value 97.427569
## iter 20 value 93.872291
## iter 30 value 92.673013
## iter 40 value 92.537020
## iter 50 value 92.531555
## final value 92.531181
## converged
## # weights: 41
## initial value 120.200071
## iter 10 value 98.027304
## iter 20 value 94.901950
## iter 30 value 93.398494
## iter 40 value 92.964370
## iter 50 value 92.510312
## iter 60 value 92.297160
## iter 70 value 92.285928
## final value 92.285916
## converged
## # weights: 49
## initial value 203.476557
## iter 10 value 97.506933
## iter 20 value 93.700186
## iter 30 value 92.564350
## iter 40 value 92.303174
## iter 50 value 92.273795
## iter 60 value 92.272631
## final value 92.272625
## converged
## # weights: 57
## initial value 131.688666
## iter 10 value 97.763856
## iter 20 value 93.199694
## iter 30 value 92.393092
## iter 40 value 92.143227
## iter 50 value 92.090333
## iter 60 value 92.084418
## final value 92.084396
## converged
## # weights: 65
## initial value 123.681821
## iter 10 value 97.696618
## iter 20 value 93.228370
## iter 30 value 92.498881
## iter 40 value 91.727314
## iter 50 value 91.645479
## iter 60 value 91.577954
## iter 70 value 91.543159
## iter 80 value 91.531303
## iter 90 value 91.529064
## final value 91.529002
## converged
## # weights: 73
## initial value 138.980269
## iter 10 value 95.605928
## iter 20 value 93.404600
## iter 30 value 92.623727
## iter 40 value 91.826566
## iter 50 value 91.720300
## iter 60 value 91.693404
## iter 70 value 91.681138
## iter 80 value 91.657841
## iter 90 value 91.633767
## iter 100 value 91.569023
## final value 91.569023
## stopped after 100 iterations
## # weights: 81
## initial value 155.757805
## iter 10 value 96.394972
## iter 20 value 92.923694
## iter 30 value 91.930078
## iter 40 value 91.768609
## iter 50 value 91.720185
## iter 60 value 91.682851
## iter 70 value 91.655770
## iter 80 value 91.648417
## iter 90 value 91.638145
## iter 100 value 91.636016
## final value 91.636016
## stopped after 100 iterations
## # weights: 9
## initial value 150.651267
## iter 10 value 102.947671
## iter 20 value 102.138539
## iter 30 value 102.015664
## final value 102.015643
## converged
## # weights: 17
## initial value 114.687657
## iter 10 value 102.599220
## iter 20 value 100.054559
## iter 30 value 99.112320
## iter 40 value 99.028981
## final value 99.028608
## converged
## # weights: 25
## initial value 129.656528
## iter 10 value 99.905438
## iter 20 value 97.441648
## iter 30 value 97.017359
## iter 40 value 97.000471
## iter 40 value 97.000470
## iter 40 value 97.000470
## final value 97.000470
## converged
## # weights: 33
## initial value 117.452778
## iter 10 value 100.300627
## iter 20 value 97.618841
## iter 30 value 97.129156
## iter 40 value 96.977282
## iter 50 value 96.975960
## final value 96.975948
## converged
## # weights: 41
## initial value 136.664262
## iter 10 value 99.452030
## iter 20 value 97.228900
## iter 30 value 96.951763
## iter 40 value 96.939313
## iter 50 value 96.938649
## iter 50 value 96.938649
## iter 50 value 96.938649
## final value 96.938649
## converged
## # weights: 49
## initial value 125.357166
## iter 10 value 99.976620
## iter 20 value 97.380286
## iter 30 value 97.263073
## iter 40 value 97.250402
## iter 50 value 97.250059
## final value 97.250055
## converged
## # weights: 57
## initial value 128.672438
## iter 10 value 98.767425
## iter 20 value 97.384295
## iter 30 value 97.278759
## iter 40 value 97.221524
## iter 50 value 97.054174
## iter 60 value 96.924568
## iter 70 value 96.919691
## final value 96.919654
## converged
## # weights: 65
## initial value 155.711276
## iter 10 value 98.096710
## iter 20 value 97.027266
## iter 30 value 96.942421
## iter 40 value 96.935064
## iter 50 value 96.933139
## final value 96.933012
## converged
## # weights: 73
## initial value 155.181569
## iter 10 value 100.092706
## iter 20 value 97.855034
## iter 30 value 97.286240
## iter 40 value 97.051656
## iter 50 value 97.031726
## iter 60 value 97.029547
## final value 97.029451
## converged
## # weights: 81
## initial value 120.586865
## iter 10 value 99.950563
## iter 20 value 97.162388
## iter 30 value 96.946619
## iter 40 value 96.931927
## iter 50 value 96.931329
## iter 60 value 96.930838
## iter 70 value 96.930606
## final value 96.930564
## converged
## # weights: 9
## initial value 154.621881
## iter 10 value 105.257030
## iter 20 value 103.020032
## final value 103.018953
## converged
## # weights: 17
## initial value 132.294527
## iter 10 value 101.663612
## iter 20 value 100.936055
## final value 100.935317
## converged
## # weights: 25
## initial value 152.423021
## iter 10 value 101.770956
## iter 20 value 100.843126
## iter 30 value 99.702624
## iter 40 value 99.660226
## final value 99.659926
## converged
## # weights: 33
## initial value 136.318630
## iter 10 value 101.732878
## iter 20 value 100.028702
## iter 30 value 99.658291
## iter 40 value 99.635941
## iter 50 value 99.631834
## iter 60 value 99.612928
## iter 70 value 99.611176
## final value 99.611173
## converged
## # weights: 41
## initial value 129.109355
## iter 10 value 101.060121
## iter 20 value 100.000657
## iter 30 value 99.631237
## iter 40 value 99.616962
## iter 50 value 99.606028
## iter 60 value 99.605479
## iter 60 value 99.605478
## iter 60 value 99.605478
## final value 99.605478
## converged
## # weights: 49
## initial value 149.770846
## iter 10 value 100.899619
## iter 20 value 99.878507
## iter 30 value 99.709451
## iter 40 value 99.633198
## iter 50 value 99.627472
## iter 60 value 99.627098
## final value 99.626931
## converged
## # weights: 57
## initial value 136.037124
## iter 10 value 101.058296
## iter 20 value 99.791770
## iter 30 value 99.639646
## iter 40 value 99.627024
## iter 50 value 99.626655
## iter 60 value 99.626522
## final value 99.626481
## converged
## # weights: 65
## initial value 234.459473
## iter 10 value 101.252445
## iter 20 value 99.848352
## iter 30 value 99.664530
## iter 40 value 99.629246
## iter 50 value 99.626186
## iter 60 value 99.615537
## iter 70 value 99.613888
## final value 99.613851
## converged
## # weights: 73
## initial value 135.345765
## iter 10 value 100.795385
## iter 20 value 99.844910
## iter 30 value 99.693825
## iter 40 value 99.628726
## iter 50 value 99.626524
## iter 60 value 99.625850
## iter 70 value 99.625608
## final value 99.625524
## converged
## # weights: 81
## initial value 120.794526
## iter 10 value 100.674008
## iter 20 value 99.777421
## iter 30 value 99.634993
## iter 40 value 99.609757
## iter 50 value 99.603490
## iter 60 value 99.602814
## final value 99.602793
## converged
## # weights: 9
## initial value 115.513631
## iter 10 value 105.619640
## iter 20 value 104.871530
## iter 20 value 104.871529
## final value 104.871529
## converged
## # weights: 17
## initial value 147.066981
## iter 10 value 102.521476
## iter 20 value 101.872914
## final value 101.868215
## converged
## # weights: 25
## initial value 142.916563
## iter 10 value 101.861738
## iter 20 value 101.490590
## iter 30 value 101.440027
## final value 101.439918
## converged
## # weights: 33
## initial value 152.617926
## iter 10 value 102.961666
## iter 20 value 101.567714
## iter 30 value 101.417865
## iter 40 value 101.406126
## final value 101.405848
## converged
## # weights: 41
## initial value 148.501146
## iter 10 value 101.800970
## iter 20 value 101.381291
## iter 30 value 101.375718
## iter 40 value 101.372610
## iter 50 value 101.370005
## final value 101.370002
## converged
## # weights: 49
## initial value 162.874155
## iter 10 value 102.203833
## iter 20 value 101.476957
## iter 30 value 101.369576
## iter 40 value 101.354889
## iter 50 value 101.353636
## final value 101.353594
## converged
## # weights: 57
## initial value 124.403762
## iter 10 value 101.693513
## iter 20 value 101.372101
## iter 30 value 101.349526
## iter 40 value 101.345966
## iter 50 value 101.345833
## iter 50 value 101.345833
## iter 50 value 101.345833
## final value 101.345833
## converged
## # weights: 65
## initial value 131.602756
## iter 10 value 101.574867
## iter 20 value 101.369236
## iter 30 value 101.343026
## iter 40 value 101.341470
## final value 101.341224
## converged
## # weights: 73
## initial value 125.527659
## iter 10 value 101.778518
## iter 20 value 101.359093
## iter 30 value 101.341221
## iter 40 value 101.338085
## iter 50 value 101.329315
## iter 60 value 101.327366
## final value 101.327361
## converged
## # weights: 81
## initial value 120.072796
## iter 10 value 101.607264
## iter 20 value 101.336484
## iter 30 value 101.327269
## iter 40 value 101.323378
## final value 101.323324
## converged
## # weights: 9
## initial value 113.411188
## iter 10 value 104.683737
## final value 104.678903
## converged
## # weights: 17
## initial value 115.677591
## iter 10 value 104.219467
## iter 20 value 103.032459
## final value 103.022370
## converged
## # weights: 25
## initial value 143.407970
## iter 10 value 102.839984
## iter 20 value 102.660086
## iter 30 value 102.653648
## final value 102.653578
## converged
## # weights: 33
## initial value 144.488790
## iter 10 value 102.793111
## iter 20 value 102.591314
## iter 30 value 102.579045
## final value 102.578705
## converged
## # weights: 41
## initial value 167.557384
## iter 10 value 102.801349
## iter 20 value 102.551526
## iter 30 value 102.533332
## iter 40 value 102.532188
## iter 50 value 102.529326
## iter 50 value 102.529326
## iter 50 value 102.529326
## final value 102.529326
## converged
## # weights: 49
## initial value 151.021551
## iter 10 value 102.649842
## iter 20 value 102.494325
## iter 30 value 102.492243
## final value 102.492242
## converged
## # weights: 57
## initial value 160.931148
## iter 10 value 102.662783
## iter 20 value 102.483450
## iter 30 value 102.470189
## final value 102.470018
## converged
## # weights: 65
## initial value 130.704866
## iter 10 value 102.900188
## iter 20 value 102.501292
## iter 30 value 102.449012
## iter 40 value 102.447388
## final value 102.447380
## converged
## # weights: 73
## initial value 152.534207
## iter 10 value 102.888952
## iter 20 value 102.448002
## iter 30 value 102.434900
## iter 40 value 102.434698
## iter 40 value 102.434697
## iter 40 value 102.434697
## final value 102.434697
## converged
## # weights: 81
## initial value 126.642822
## iter 10 value 102.743919
## iter 20 value 102.428037
## iter 30 value 102.416833
## final value 102.416690
## converged
## # weights: 9
## initial value 114.340014
## iter 10 value 105.436841
## final value 105.383047
## converged
## # weights: 17
## initial value 153.194149
## iter 10 value 104.032771
## iter 20 value 104.009566
## final value 104.009090
## converged
## # weights: 25
## initial value 118.946153
## iter 10 value 104.061973
## iter 20 value 103.597728
## iter 30 value 103.589177
## final value 103.589162
## converged
## # weights: 33
## initial value 124.568173
## iter 10 value 103.675659
## iter 20 value 103.479802
## iter 30 value 103.477453
## iter 30 value 103.477453
## iter 30 value 103.477453
## final value 103.477453
## converged
## # weights: 41
## initial value 147.477694
## iter 10 value 103.548430
## iter 20 value 103.386863
## iter 30 value 103.382894
## final value 103.382777
## converged
## # weights: 49
## initial value 165.297525
## iter 10 value 105.060164
## iter 20 value 103.414378
## iter 30 value 103.315312
## iter 40 value 103.300304
## iter 50 value 103.299007
## final value 103.299004
## converged
## # weights: 57
## initial value 164.014250
## iter 10 value 103.281279
## iter 20 value 103.234551
## final value 103.233287
## converged
## # weights: 65
## initial value 133.027863
## iter 10 value 103.592822
## iter 20 value 103.187190
## iter 30 value 103.172564
## iter 40 value 103.172144
## iter 40 value 103.172143
## iter 40 value 103.172143
## final value 103.172143
## converged
## # weights: 73
## initial value 129.883627
## iter 10 value 103.480028
## iter 20 value 103.165119
## iter 30 value 103.138749
## final value 103.138561
## converged
## # weights: 81
## initial value 171.059221
## iter 10 value 103.352895
## iter 20 value 103.091104
## iter 30 value 103.084239
## iter 40 value 103.080747
## final value 103.080737
## converged
## # weights: 9
## initial value 120.538834
## iter 10 value 106.605533
## iter 20 value 106.024423
## iter 20 value 106.024422
## final value 106.024422
## converged
## # weights: 17
## initial value 117.907933
## iter 10 value 105.225287
## iter 20 value 104.865299
## final value 104.863791
## converged
## # weights: 25
## initial value 136.929718
## iter 10 value 104.768820
## iter 20 value 104.388823
## iter 30 value 104.381216
## final value 104.378064
## converged
## # weights: 33
## initial value 129.614255
## iter 10 value 104.545018
## iter 20 value 104.337569
## iter 30 value 104.256045
## final value 104.255645
## converged
## # weights: 41
## initial value 154.381941
## iter 10 value 104.437239
## iter 20 value 104.082965
## iter 30 value 104.064519
## iter 40 value 104.058012
## final value 104.057494
## converged
## # weights: 49
## initial value 145.851210
## iter 10 value 104.231912
## iter 20 value 104.016808
## iter 30 value 104.014470
## final value 104.014457
## converged
## # weights: 57
## initial value 119.510242
## iter 10 value 104.070468
## iter 20 value 103.883325
## iter 30 value 103.870090
## final value 103.870031
## converged
## # weights: 65
## initial value 147.045592
## iter 10 value 104.019819
## iter 20 value 103.796359
## iter 30 value 103.783149
## final value 103.783114
## converged
## # weights: 73
## initial value 191.173391
## iter 10 value 104.160343
## iter 20 value 103.803250
## iter 30 value 103.752185
## iter 40 value 103.751165
## iter 40 value 103.751165
## iter 40 value 103.751165
## final value 103.751165
## converged
## # weights: 81
## initial value 123.955837
## iter 10 value 104.179956
## iter 20 value 103.762197
## iter 30 value 103.704108
## iter 40 value 103.703602
## final value 103.703595
## converged
## # weights: 9
## initial value 130.432233
## iter 10 value 108.819716
## iter 20 value 108.365541
## final value 108.365533
## converged
## # weights: 17
## initial value 129.787083
## iter 10 value 105.701198
## iter 20 value 105.610429
## final value 105.610038
## converged
## # weights: 25
## initial value 153.613792
## iter 10 value 105.780327
## iter 20 value 105.538623
## iter 30 value 105.062747
## iter 40 value 105.054190
## iter 50 value 105.045398
## final value 105.045389
## converged
## # weights: 33
## initial value 143.279810
## iter 10 value 104.888210
## iter 20 value 104.818192
## iter 30 value 104.817480
## iter 30 value 104.817480
## iter 30 value 104.817480
## final value 104.817480
## converged
## # weights: 41
## initial value 160.085727
## iter 10 value 104.785869
## iter 20 value 104.665003
## iter 30 value 104.660915
## final value 104.660895
## converged
## # weights: 49
## initial value 121.774198
## iter 10 value 105.019626
## iter 20 value 104.646381
## iter 30 value 104.624434
## iter 40 value 104.623901
## final value 104.623892
## converged
## # weights: 57
## initial value 165.397113
## iter 10 value 104.797780
## iter 20 value 104.489912
## iter 30 value 104.451973
## iter 40 value 104.450913
## final value 104.450908
## converged
## # weights: 65
## initial value 136.275168
## iter 10 value 104.755974
## iter 20 value 104.472596
## iter 30 value 104.445169
## iter 40 value 104.444717
## iter 40 value 104.444717
## iter 40 value 104.444716
## final value 104.444716
## converged
## # weights: 73
## initial value 129.687179
## iter 10 value 104.883960
## iter 20 value 104.432785
## iter 30 value 104.341919
## iter 40 value 104.338547
## final value 104.338531
## converged
## # weights: 81
## initial value 123.767597
## iter 10 value 104.491672
## iter 20 value 104.315945
## iter 30 value 104.273814
## final value 104.273295
## converged
## # weights: 9
## initial value 115.587990
## iter 10 value 107.295179
## iter 20 value 107.155290
## iter 20 value 107.155290
## iter 20 value 107.155290
## final value 107.155290
## converged
## # weights: 17
## initial value 116.391995
## iter 10 value 106.525517
## iter 20 value 106.382347
## final value 106.382212
## converged
## # weights: 25
## initial value 122.982055
## iter 10 value 105.728537
## iter 20 value 105.635785
## iter 30 value 105.634906
## final value 105.634890
## converged
## # weights: 33
## initial value 123.197148
## iter 10 value 105.464851
## iter 20 value 105.385996
## final value 105.385175
## converged
## # weights: 41
## initial value 134.262517
## iter 10 value 105.426235
## iter 20 value 105.281497
## iter 30 value 105.278258
## final value 105.278256
## converged
## # weights: 49
## initial value 172.010654
## iter 10 value 106.281829
## iter 20 value 105.243939
## iter 30 value 105.199368
## iter 40 value 105.196966
## final value 105.196898
## converged
## # weights: 57
## initial value 132.623906
## iter 10 value 105.215208
## iter 20 value 105.018535
## iter 30 value 105.011238
## final value 105.011183
## converged
## # weights: 65
## initial value 148.461189
## iter 10 value 105.152899
## iter 20 value 104.944053
## iter 30 value 104.930692
## final value 104.930546
## converged
## # weights: 73
## initial value 144.345972
## iter 10 value 105.115639
## iter 20 value 104.883949
## iter 30 value 104.875570
## final value 104.875441
## converged
## # weights: 81
## initial value 125.907530
## iter 10 value 105.020376
## iter 20 value 104.903706
## iter 30 value 104.894474
## final value 104.894249
## converged
## # weights: 9
## initial value 117.907640
## iter 10 value 100.322360
## iter 20 value 99.785702
## iter 20 value 99.785702
## final value 99.785702
## converged
## # weights: 17
## initial value 115.382550
## iter 10 value 100.330261
## iter 20 value 98.048731
## iter 30 value 95.680386
## iter 40 value 95.350979
## iter 50 value 95.332262
## final value 95.332234
## converged
## # weights: 25
## initial value 126.264611
## iter 10 value 99.248612
## iter 20 value 93.772277
## iter 30 value 91.925227
## iter 40 value 91.761793
## iter 50 value 91.549748
## iter 60 value 91.149073
## iter 70 value 91.143410
## final value 91.143335
## converged
## # weights: 33
## initial value 113.962173
## iter 10 value 99.301944
## iter 20 value 92.832664
## iter 30 value 90.088688
## iter 40 value 88.524018
## iter 50 value 88.190744
## iter 60 value 88.183012
## final value 88.182924
## converged
## # weights: 41
## initial value 129.960083
## iter 10 value 98.218619
## iter 20 value 93.044742
## iter 30 value 91.231360
## iter 40 value 90.646733
## iter 50 value 90.410525
## iter 60 value 90.287246
## iter 70 value 90.284752
## iter 70 value 90.284751
## iter 70 value 90.284751
## final value 90.284751
## converged
## # weights: 49
## initial value 114.481245
## iter 10 value 95.770471
## iter 20 value 90.682727
## iter 30 value 88.451124
## iter 40 value 87.415819
## iter 50 value 86.167869
## iter 60 value 85.875820
## iter 70 value 85.739980
## iter 80 value 85.734889
## final value 85.734884
## converged
## # weights: 57
## initial value 113.874287
## iter 10 value 97.330609
## iter 20 value 91.688272
## iter 30 value 87.241341
## iter 40 value 85.681947
## iter 50 value 85.478156
## iter 60 value 85.446249
## final value 85.445358
## converged
## # weights: 65
## initial value 122.106374
## iter 10 value 95.291706
## iter 20 value 91.413435
## iter 30 value 89.338588
## iter 40 value 88.301795
## iter 50 value 87.823378
## iter 60 value 86.453579
## iter 70 value 86.075047
## iter 80 value 85.865094
## iter 90 value 85.813387
## iter 100 value 85.490817
## final value 85.490817
## stopped after 100 iterations
## # weights: 73
## initial value 131.224185
## iter 10 value 95.028912
## iter 20 value 90.093435
## iter 30 value 88.568069
## iter 40 value 87.618543
## iter 50 value 87.346245
## iter 60 value 86.105600
## iter 70 value 85.170797
## iter 80 value 84.936425
## iter 90 value 84.836718
## iter 100 value 84.438600
## final value 84.438600
## stopped after 100 iterations
## # weights: 81
## initial value 143.286641
## iter 10 value 95.762071
## iter 20 value 88.928353
## iter 30 value 86.927234
## iter 40 value 85.336493
## iter 50 value 84.171714
## iter 60 value 83.397968
## iter 70 value 83.079381
## iter 80 value 82.953703
## iter 90 value 82.332726
## iter 100 value 81.796624
## final value 81.796624
## stopped after 100 iterations
## # weights: 9
## initial value 112.955984
## iter 10 value 101.705777
## final value 101.682911
## converged
## # weights: 17
## initial value 121.130982
## iter 10 value 101.766788
## iter 20 value 99.645243
## iter 30 value 99.516882
## final value 99.516860
## converged
## # weights: 25
## initial value 143.940165
## iter 10 value 101.645510
## iter 20 value 99.181018
## iter 30 value 99.004085
## iter 40 value 98.573092
## iter 50 value 98.529885
## iter 50 value 98.529884
## iter 50 value 98.529884
## final value 98.529884
## converged
## # weights: 33
## initial value 116.278549
## iter 10 value 101.377844
## iter 20 value 98.238849
## iter 30 value 97.361035
## iter 40 value 97.325857
## final value 97.325748
## converged
## # weights: 41
## initial value 120.722691
## iter 10 value 100.775449
## iter 20 value 97.971448
## iter 30 value 97.661458
## iter 40 value 97.442781
## iter 50 value 97.344092
## iter 60 value 97.341097
## final value 97.341085
## converged
## # weights: 49
## initial value 128.999069
## iter 10 value 100.189595
## iter 20 value 97.547756
## iter 30 value 96.984585
## iter 40 value 96.884400
## iter 50 value 96.878902
## iter 60 value 96.857575
## iter 70 value 96.855625
## final value 96.855619
## converged
## # weights: 57
## initial value 128.052726
## iter 10 value 99.662346
## iter 20 value 98.219531
## iter 30 value 97.397651
## iter 40 value 97.265292
## iter 50 value 97.227967
## iter 60 value 97.224585
## final value 97.224564
## converged
## # weights: 65
## initial value 138.796296
## iter 10 value 100.096269
## iter 20 value 97.532910
## iter 30 value 97.203139
## iter 40 value 97.176781
## iter 50 value 97.169165
## iter 60 value 97.168078
## final value 97.168054
## converged
## # weights: 73
## initial value 114.237647
## iter 10 value 99.613595
## iter 20 value 98.161927
## iter 30 value 97.496620
## iter 40 value 97.369782
## iter 50 value 97.252265
## iter 60 value 97.213376
## iter 70 value 97.212387
## final value 97.212367
## converged
## # weights: 81
## initial value 129.930568
## iter 10 value 99.581016
## iter 20 value 97.522697
## iter 30 value 97.296203
## iter 40 value 97.225903
## iter 50 value 97.206309
## iter 60 value 97.205336
## final value 97.205317
## converged
## # weights: 9
## initial value 144.672605
## iter 10 value 107.318017
## iter 20 value 103.895634
## iter 30 value 103.066628
## final value 103.066422
## converged
## # weights: 17
## initial value 126.710282
## iter 10 value 102.059106
## iter 20 value 101.581247
## final value 101.390495
## converged
## # weights: 25
## initial value 119.154418
## iter 10 value 102.117814
## iter 20 value 100.842987
## iter 30 value 100.530139
## final value 100.529678
## converged
## # weights: 33
## initial value 121.460530
## iter 10 value 101.908406
## iter 20 value 100.961565
## iter 30 value 100.758708
## iter 40 value 100.755326
## final value 100.755313
## converged
## # weights: 41
## initial value 114.931031
## iter 10 value 102.148357
## iter 20 value 101.067100
## iter 30 value 100.965609
## iter 40 value 100.954416
## iter 50 value 100.952177
## final value 100.952107
## converged
## # weights: 49
## initial value 117.107595
## iter 10 value 102.767502
## iter 20 value 100.995686
## iter 30 value 100.534203
## iter 40 value 100.516332
## iter 50 value 100.515297
## final value 100.515292
## converged
## # weights: 57
## initial value 121.065883
## iter 10 value 101.707929
## iter 20 value 100.954891
## iter 30 value 100.569227
## iter 40 value 100.524327
## iter 50 value 100.522808
## final value 100.522800
## converged
## # weights: 65
## initial value 119.342360
## iter 10 value 102.241523
## iter 20 value 101.186677
## iter 30 value 100.937645
## iter 40 value 100.901231
## iter 50 value 100.898730
## final value 100.898708
## converged
## # weights: 73
## initial value 183.760354
## iter 10 value 101.663610
## iter 20 value 100.772247
## iter 30 value 100.537384
## iter 40 value 100.519469
## iter 50 value 100.518457
## final value 100.518447
## converged
## # weights: 81
## initial value 131.287983
## iter 10 value 101.473426
## iter 20 value 100.787200
## iter 30 value 100.713618
## iter 40 value 100.712327
## final value 100.712301
## converged
## # weights: 9
## initial value 135.177270
## iter 10 value 105.270398
## iter 20 value 104.845518
## iter 20 value 104.845518
## final value 104.845518
## converged
## # weights: 17
## initial value 117.121861
## iter 10 value 103.546476
## iter 20 value 103.065976
## final value 103.053562
## converged
## # weights: 25
## initial value 145.813871
## iter 10 value 103.859379
## iter 20 value 102.945318
## iter 30 value 102.677619
## iter 40 value 102.668383
## iter 40 value 102.668382
## iter 40 value 102.668382
## final value 102.668382
## converged
## # weights: 33
## initial value 143.466935
## iter 10 value 103.517759
## iter 20 value 103.143680
## iter 30 value 102.714826
## iter 40 value 102.688104
## final value 102.686790
## converged
## # weights: 41
## initial value 168.373057
## iter 10 value 103.656868
## iter 20 value 102.754231
## iter 30 value 102.689817
## iter 40 value 102.679282
## iter 50 value 102.664432
## final value 102.664357
## converged
## # weights: 49
## initial value 187.602142
## iter 10 value 103.195366
## iter 20 value 102.725087
## iter 30 value 102.687651
## iter 40 value 102.672681
## iter 50 value 102.665419
## final value 102.665360
## converged
## # weights: 57
## initial value 118.191514
## iter 10 value 103.503892
## iter 20 value 102.711973
## iter 30 value 102.685100
## iter 40 value 102.681824
## iter 50 value 102.678438
## iter 60 value 102.664203
## final value 102.663237
## converged
## # weights: 65
## initial value 140.854470
## iter 10 value 103.591745
## iter 20 value 102.849483
## iter 30 value 102.684764
## iter 40 value 102.677582
## iter 50 value 102.672804
## iter 60 value 102.672430
## final value 102.672398
## converged
## # weights: 73
## initial value 162.432699
## iter 10 value 103.410231
## iter 20 value 102.754259
## iter 30 value 102.670314
## iter 40 value 102.667593
## final value 102.667540
## converged
## # weights: 81
## initial value 121.908320
## iter 10 value 104.058468
## iter 20 value 102.982542
## iter 30 value 102.707726
## iter 40 value 102.673111
## iter 50 value 102.667838
## iter 60 value 102.667430
## iter 70 value 102.667202
## final value 102.667170
## converged
## # weights: 9
## initial value 122.924856
## iter 10 value 105.755290
## iter 20 value 105.154855
## iter 20 value 105.154854
## final value 105.154854
## converged
## # weights: 17
## initial value 128.067100
## iter 10 value 105.436017
## iter 20 value 104.892614
## iter 30 value 104.710848
## iter 40 value 104.704570
## iter 40 value 104.704570
## iter 40 value 104.704570
## final value 104.704570
## converged
## # weights: 25
## initial value 125.299925
## iter 10 value 104.508147
## iter 20 value 104.245061
## iter 30 value 104.227635
## final value 104.227626
## converged
## # weights: 33
## initial value 122.520178
## iter 10 value 104.680097
## iter 20 value 104.216919
## iter 30 value 104.052697
## iter 40 value 104.046649
## final value 104.046634
## converged
## # weights: 41
## initial value 142.550266
## iter 10 value 104.506274
## iter 20 value 104.021953
## iter 30 value 104.002532
## final value 104.002194
## converged
## # weights: 49
## initial value 172.452786
## iter 10 value 104.440232
## iter 20 value 103.996600
## iter 30 value 103.988920
## final value 103.988853
## converged
## # weights: 57
## initial value 129.126439
## iter 10 value 104.748045
## iter 20 value 104.092721
## iter 30 value 103.968923
## iter 40 value 103.967382
## iter 50 value 103.967202
## final value 103.967199
## converged
## # weights: 65
## initial value 123.806025
## iter 10 value 104.643477
## iter 20 value 103.976099
## iter 30 value 103.955521
## iter 40 value 103.949154
## iter 50 value 103.947843
## final value 103.947841
## converged
## # weights: 73
## initial value 206.611900
## iter 10 value 104.835367
## iter 20 value 104.013040
## iter 30 value 103.955386
## iter 40 value 103.937276
## iter 50 value 103.933660
## final value 103.933619
## converged
## # weights: 81
## initial value 191.377222
## iter 10 value 104.187299
## iter 20 value 103.945655
## iter 30 value 103.929791
## iter 40 value 103.928813
## final value 103.928809
## converged
## # weights: 9
## initial value 121.990130
## iter 10 value 107.180119
## iter 20 value 107.035800
## iter 20 value 107.035800
## final value 107.035800
## converged
## # weights: 17
## initial value 127.405539
## iter 10 value 105.891208
## iter 20 value 105.662448
## final value 105.656649
## converged
## # weights: 25
## initial value 119.130131
## iter 10 value 106.010088
## iter 20 value 105.204710
## iter 30 value 105.184917
## final value 105.183987
## converged
## # weights: 33
## initial value 133.166769
## iter 10 value 105.611085
## iter 20 value 105.056871
## iter 30 value 104.981831
## final value 104.981482
## converged
## # weights: 41
## initial value 126.219647
## iter 10 value 105.201040
## iter 20 value 104.941960
## iter 30 value 104.924790
## final value 104.924527
## converged
## # weights: 49
## initial value 123.999422
## iter 10 value 105.643134
## iter 20 value 104.919570
## iter 30 value 104.906945
## final value 104.906906
## converged
## # weights: 57
## initial value 144.292651
## iter 10 value 105.313352
## iter 20 value 104.919081
## iter 30 value 104.907040
## iter 40 value 104.905002
## iter 40 value 104.905001
## iter 40 value 104.905001
## final value 104.905001
## converged
## # weights: 65
## initial value 141.044439
## iter 10 value 105.506477
## iter 20 value 104.950785
## iter 30 value 104.860766
## iter 40 value 104.859759
## final value 104.859700
## converged
## # weights: 73
## initial value 154.448505
## iter 10 value 105.208346
## iter 20 value 104.885173
## iter 30 value 104.827186
## iter 40 value 104.812108
## final value 104.811907
## converged
## # weights: 81
## initial value 131.757333
## iter 10 value 105.063296
## iter 20 value 104.802295
## iter 30 value 104.797920
## final value 104.797845
## converged
## # weights: 9
## initial value 115.562214
## iter 10 value 108.039935
## final value 107.961713
## converged
## # weights: 17
## initial value 124.773426
## iter 10 value 106.478085
## iter 20 value 106.051735
## final value 106.049320
## converged
## # weights: 25
## initial value 158.004679
## iter 10 value 106.654145
## iter 20 value 105.879867
## iter 30 value 105.828721
## final value 105.828648
## converged
## # weights: 33
## initial value 125.257098
## iter 10 value 105.909073
## iter 20 value 105.757851
## iter 30 value 105.714521
## iter 40 value 105.712102
## iter 40 value 105.712101
## iter 40 value 105.712101
## final value 105.712101
## converged
## # weights: 41
## initial value 161.966287
## iter 10 value 105.720576
## iter 20 value 105.636040
## final value 105.635216
## converged
## # weights: 49
## initial value 117.516029
## iter 10 value 105.707560
## iter 20 value 105.593548
## iter 30 value 105.592178
## final value 105.592150
## converged
## # weights: 57
## initial value 130.577488
## iter 10 value 105.745191
## iter 20 value 105.572175
## iter 30 value 105.544661
## iter 40 value 105.541599
## final value 105.541592
## converged
## # weights: 65
## initial value 201.622842
## iter 10 value 106.990911
## iter 20 value 105.678833
## iter 30 value 105.530101
## iter 40 value 105.523301
## final value 105.523294
## converged
## # weights: 73
## initial value 166.681266
## iter 10 value 105.922335
## iter 20 value 105.508925
## iter 30 value 105.496992
## final value 105.496962
## converged
## # weights: 81
## initial value 129.924751
## iter 10 value 105.943914
## iter 20 value 105.498863
## iter 30 value 105.455521
## iter 40 value 105.445941
## final value 105.445778
## converged
## # weights: 9
## initial value 119.130266
## iter 10 value 107.470789
## final value 107.394191
## converged
## # weights: 17
## initial value 119.825671
## iter 10 value 107.087036
## iter 20 value 106.732789
## final value 106.732238
## converged
## # weights: 25
## initial value 164.997841
## iter 10 value 106.989387
## iter 20 value 106.679818
## iter 30 value 106.673610
## final value 106.673606
## converged
## # weights: 33
## initial value 141.597254
## iter 10 value 106.463046
## iter 20 value 106.304805
## iter 30 value 106.302657
## final value 106.302632
## converged
## # weights: 41
## initial value 118.636036
## iter 10 value 106.535079
## iter 20 value 106.225408
## iter 30 value 106.207557
## iter 40 value 106.207141
## iter 40 value 106.207140
## iter 40 value 106.207140
## final value 106.207140
## converged
## # weights: 49
## initial value 121.578038
## iter 10 value 106.295830
## iter 20 value 106.183031
## iter 30 value 106.178090
## final value 106.178059
## converged
## # weights: 57
## initial value 120.902623
## iter 10 value 106.154524
## iter 20 value 106.085763
## iter 30 value 106.084500
## final value 106.084498
## converged
## # weights: 65
## initial value 130.237821
## iter 10 value 106.447895
## iter 20 value 106.075018
## iter 30 value 106.060606
## final value 106.060404
## converged
## # weights: 73
## initial value 129.411417
## iter 10 value 106.078407
## iter 20 value 106.004162
## iter 30 value 105.998725
## final value 105.998714
## converged
## # weights: 81
## initial value 194.760373
## iter 10 value 106.538951
## iter 20 value 105.983012
## iter 30 value 105.970209
## final value 105.970068
## converged
## # weights: 9
## initial value 124.209080
## iter 10 value 108.153514
## final value 107.990203
## converged
## # weights: 17
## initial value 135.157469
## iter 10 value 107.397747
## iter 20 value 107.346522
## final value 107.346396
## converged
## # weights: 25
## initial value 116.656681
## iter 10 value 107.591797
## iter 20 value 107.492710
## iter 30 value 107.492262
## iter 30 value 107.492262
## iter 30 value 107.492262
## final value 107.492262
## converged
## # weights: 33
## initial value 121.472438
## iter 10 value 107.042776
## iter 20 value 106.840478
## iter 30 value 106.834968
## final value 106.834962
## converged
## # weights: 41
## initial value 174.108353
## iter 10 value 106.862298
## iter 20 value 106.734569
## iter 30 value 106.731830
## final value 106.731816
## converged
## # weights: 49
## initial value 126.301833
## iter 10 value 106.821783
## iter 20 value 106.646918
## final value 106.642119
## converged
## # weights: 57
## initial value 148.255287
## iter 10 value 106.684616
## iter 20 value 106.595534
## iter 30 value 106.593532
## final value 106.593523
## converged
## # weights: 65
## initial value 154.581000
## iter 10 value 106.704122
## iter 20 value 106.569949
## final value 106.568895
## converged
## # weights: 73
## initial value 141.025400
## iter 10 value 106.893930
## iter 20 value 106.534707
## iter 30 value 106.505102
## iter 40 value 106.501378
## final value 106.501374
## converged
## # weights: 81
## initial value 128.214982
## iter 10 value 106.790260
## iter 20 value 106.483707
## iter 30 value 106.479629
## final value 106.479621
## converged
## # weights: 9
## initial value 159.851839
## iter 10 value 110.569622
## iter 20 value 110.244463
## iter 20 value 110.244463
## final value 110.244463
## converged
## # weights: 17
## initial value 117.823068
## iter 10 value 108.291480
## iter 20 value 108.076460
## final value 108.073686
## converged
## # weights: 25
## initial value 121.040288
## iter 10 value 107.579865
## iter 20 value 107.496017
## final value 107.495894
## converged
## # weights: 33
## initial value 129.274392
## iter 10 value 107.369263
## iter 20 value 107.334346
## final value 107.334273
## converged
## # weights: 41
## initial value 122.797395
## iter 10 value 107.329542
## iter 20 value 107.265497
## iter 30 value 107.264384
## final value 107.264382
## converged
## # weights: 49
## initial value 165.539022
## iter 10 value 107.354865
## iter 20 value 107.213213
## iter 30 value 107.209307
## final value 107.209287
## converged
## # weights: 57
## initial value 138.458883
## iter 10 value 107.239192
## iter 20 value 107.093070
## iter 30 value 107.091446
## final value 107.091423
## converged
## # weights: 65
## initial value 138.724102
## iter 10 value 107.418628
## iter 20 value 107.069795
## iter 30 value 107.024573
## iter 40 value 107.022736
## final value 107.022721
## converged
## # weights: 73
## initial value 136.460795
## iter 10 value 107.353513
## iter 20 value 107.012016
## iter 30 value 106.982619
## iter 40 value 106.982104
## iter 40 value 106.982103
## iter 40 value 106.982103
## final value 106.982103
## converged
## # weights: 81
## initial value 128.524763
## iter 10 value 107.353914
## iter 20 value 107.005755
## iter 30 value 106.998797
## final value 106.998762
## converged
## # weights: 9
## initial value 113.450354
## iter 10 value 101.067174
## iter 20 value 100.630050
## iter 30 value 100.623734
## final value 100.623727
## converged
## # weights: 17
## initial value 122.625146
## iter 10 value 98.405374
## iter 20 value 95.070405
## iter 30 value 95.010511
## final value 95.009954
## converged
## # weights: 25
## initial value 123.265795
## iter 10 value 97.871429
## iter 20 value 93.504477
## iter 30 value 91.447910
## iter 40 value 90.545800
## iter 50 value 90.330454
## iter 60 value 90.330305
## final value 90.330302
## converged
## # weights: 33
## initial value 132.959302
## iter 10 value 96.280399
## iter 20 value 93.168323
## iter 30 value 92.373988
## iter 40 value 91.879386
## iter 50 value 91.639010
## iter 60 value 90.804648
## iter 70 value 90.604536
## iter 80 value 90.602632
## iter 80 value 90.602631
## iter 80 value 90.602631
## final value 90.602631
## converged
## # weights: 41
## initial value 118.700366
## iter 10 value 96.141631
## iter 20 value 91.070644
## iter 30 value 90.460015
## iter 40 value 89.552198
## iter 50 value 88.840848
## iter 60 value 88.582637
## iter 70 value 88.564107
## final value 88.562694
## converged
## # weights: 49
## initial value 149.864042
## iter 10 value 97.701575
## iter 20 value 93.785707
## iter 30 value 91.821043
## iter 40 value 88.637535
## iter 50 value 88.163407
## iter 60 value 87.890886
## iter 70 value 87.537901
## iter 80 value 87.525055
## iter 90 value 87.524918
## final value 87.524916
## converged
## # weights: 57
## initial value 142.074630
## iter 10 value 94.931833
## iter 20 value 89.939354
## iter 30 value 88.807445
## iter 40 value 87.030530
## iter 50 value 86.058401
## iter 60 value 85.851860
## iter 70 value 85.827012
## iter 80 value 85.596985
## iter 90 value 85.274358
## iter 100 value 85.248219
## final value 85.248219
## stopped after 100 iterations
## # weights: 65
## initial value 113.925038
## iter 10 value 96.928431
## iter 20 value 89.375621
## iter 30 value 88.619903
## iter 40 value 88.079212
## iter 50 value 87.642774
## iter 60 value 87.473156
## iter 70 value 86.410251
## iter 80 value 86.288154
## iter 90 value 86.211462
## iter 100 value 85.783340
## final value 85.783340
## stopped after 100 iterations
## # weights: 73
## initial value 171.288388
## iter 10 value 95.772783
## iter 20 value 92.530168
## iter 30 value 90.357164
## iter 40 value 88.032853
## iter 50 value 87.260241
## iter 60 value 86.481238
## iter 70 value 85.271909
## iter 80 value 84.452555
## iter 90 value 83.887128
## iter 100 value 83.648830
## final value 83.648830
## stopped after 100 iterations
## # weights: 81
## initial value 141.440745
## iter 10 value 96.454127
## iter 20 value 89.897065
## iter 30 value 86.565662
## iter 40 value 85.110413
## iter 50 value 84.742571
## iter 60 value 84.607130
## iter 70 value 84.549528
## iter 80 value 84.535579
## iter 90 value 84.517200
## iter 100 value 84.136269
## final value 84.136269
## stopped after 100 iterations
## # weights: 9
## initial value 160.086591
## iter 10 value 102.459226
## iter 20 value 102.178936
## final value 102.178881
## converged
## # weights: 17
## initial value 115.617713
## iter 10 value 102.376073
## iter 20 value 99.028717
## iter 30 value 98.882712
## final value 98.882705
## converged
## # weights: 25
## initial value 130.996235
## iter 10 value 99.126083
## iter 20 value 98.162919
## iter 30 value 97.150435
## iter 40 value 96.863461
## final value 96.863026
## converged
## # weights: 33
## initial value 141.697461
## iter 10 value 99.833213
## iter 20 value 98.298057
## iter 30 value 98.000842
## iter 40 value 97.908878
## iter 50 value 97.854294
## final value 97.854041
## converged
## # weights: 41
## initial value 123.351075
## iter 10 value 99.111682
## iter 20 value 98.106474
## iter 30 value 97.997898
## iter 40 value 97.914393
## iter 50 value 97.873234
## iter 60 value 97.844899
## iter 70 value 97.840433
## final value 97.840401
## converged
## # weights: 49
## initial value 122.150127
## iter 10 value 101.227793
## iter 20 value 98.543391
## iter 30 value 97.976012
## iter 40 value 97.288560
## iter 50 value 96.746711
## iter 60 value 96.723834
## iter 70 value 96.720774
## iter 80 value 96.720349
## final value 96.720321
## converged
## # weights: 57
## initial value 151.286308
## iter 10 value 99.866712
## iter 20 value 98.031770
## iter 30 value 97.308923
## iter 40 value 97.260559
## iter 50 value 97.255197
## iter 60 value 97.254720
## final value 97.254685
## converged
## # weights: 65
## initial value 120.942949
## iter 10 value 98.928434
## iter 20 value 98.122163
## iter 30 value 97.566442
## iter 40 value 97.082307
## iter 50 value 96.859906
## iter 60 value 96.764180
## iter 70 value 96.755244
## final value 96.755033
## converged
## # weights: 73
## initial value 146.929312
## iter 10 value 99.749998
## iter 20 value 97.854498
## iter 30 value 97.296353
## iter 40 value 96.984309
## iter 50 value 96.902587
## iter 60 value 96.881706
## final value 96.881536
## converged
## # weights: 81
## initial value 126.966842
## iter 10 value 99.862946
## iter 20 value 98.157144
## iter 30 value 97.000663
## iter 40 value 96.809952
## iter 50 value 96.734008
## iter 60 value 96.725991
## iter 70 value 96.720320
## iter 80 value 96.719456
## iter 90 value 96.719334
## iter 100 value 96.719187
## final value 96.719187
## stopped after 100 iterations
## # weights: 9
## initial value 125.587403
## iter 10 value 103.771447
## iter 20 value 103.329334
## final value 103.329269
## converged
## # weights: 17
## initial value 155.848199
## iter 10 value 101.432031
## iter 20 value 101.005156
## final value 100.981914
## converged
## # weights: 25
## initial value 116.135075
## iter 10 value 102.247437
## iter 20 value 100.683063
## iter 30 value 100.628905
## iter 40 value 100.628390
## iter 50 value 100.608020
## iter 60 value 100.590779
## iter 70 value 100.585765
## iter 80 value 100.583583
## final value 100.583575
## converged
## # weights: 33
## initial value 151.077567
## iter 10 value 100.819147
## iter 20 value 100.283621
## iter 30 value 100.243689
## final value 100.242448
## converged
## # weights: 41
## initial value 156.133817
## iter 10 value 100.973207
## iter 20 value 100.503445
## iter 30 value 100.488968
## iter 40 value 100.415733
## iter 50 value 100.249733
## iter 60 value 100.242899
## final value 100.242209
## converged
## # weights: 49
## initial value 129.736290
## iter 10 value 101.857205
## iter 20 value 100.509670
## iter 30 value 100.411042
## iter 40 value 100.249508
## iter 50 value 100.242959
## iter 60 value 100.242089
## final value 100.242087
## converged
## # weights: 57
## initial value 118.513239
## iter 10 value 101.612976
## iter 20 value 100.516223
## iter 30 value 100.403272
## iter 40 value 100.272068
## iter 50 value 100.247017
## iter 60 value 100.243331
## iter 70 value 100.243133
## final value 100.243130
## converged
## # weights: 65
## initial value 123.310568
## iter 10 value 101.709930
## iter 20 value 100.498653
## iter 30 value 100.302164
## iter 40 value 100.244689
## iter 50 value 100.243190
## iter 60 value 100.242905
## final value 100.242892
## converged
## # weights: 73
## initial value 117.487451
## iter 10 value 101.514860
## iter 20 value 100.666566
## iter 30 value 100.594912
## iter 40 value 100.464098
## iter 50 value 100.274549
## iter 60 value 100.245038
## iter 70 value 100.242052
## final value 100.241926
## converged
## # weights: 81
## initial value 140.296222
## iter 10 value 101.483838
## iter 20 value 100.533777
## iter 30 value 100.337580
## iter 40 value 100.252604
## iter 50 value 100.247472
## iter 60 value 100.247054
## final value 100.247048
## converged
## # weights: 9
## initial value 115.148619
## iter 10 value 104.652349
## iter 20 value 104.274553
## iter 20 value 104.274553
## final value 104.274553
## converged
## # weights: 17
## initial value 115.100248
## iter 10 value 102.914578
## iter 20 value 102.339312
## final value 102.337369
## converged
## # weights: 25
## initial value 138.410332
## iter 10 value 103.156687
## iter 20 value 102.219012
## iter 30 value 102.193481
## final value 102.191682
## converged
## # weights: 33
## initial value 140.069317
## iter 10 value 103.341360
## iter 20 value 102.204047
## iter 30 value 102.194311
## iter 40 value 102.185699
## iter 50 value 102.184313
## iter 60 value 102.184137
## iter 60 value 102.184137
## iter 60 value 102.184137
## final value 102.184137
## converged
## # weights: 41
## initial value 117.464007
## iter 10 value 103.155451
## iter 20 value 102.209924
## iter 30 value 102.183260
## iter 40 value 102.175497
## final value 102.175372
## converged
## # weights: 49
## initial value 129.041308
## iter 10 value 102.499884
## iter 20 value 102.179424
## iter 30 value 102.163892
## iter 40 value 102.163626
## final value 102.163599
## converged
## # weights: 57
## initial value 142.101357
## iter 10 value 102.454914
## iter 20 value 102.188594
## iter 30 value 102.169308
## iter 40 value 102.158947
## iter 50 value 102.157401
## final value 102.157379
## converged
## # weights: 65
## initial value 139.502593
## iter 10 value 102.677423
## iter 20 value 102.194984
## iter 30 value 102.178461
## iter 40 value 102.178082
## iter 50 value 102.169728
## iter 60 value 102.158487
## iter 70 value 102.157266
## final value 102.157224
## converged
## # weights: 73
## initial value 120.448619
## iter 10 value 103.264133
## iter 20 value 102.361251
## iter 30 value 102.172021
## iter 40 value 102.157652
## iter 50 value 102.154569
## iter 60 value 102.153346
## final value 102.153263
## converged
## # weights: 81
## initial value 143.898378
## iter 10 value 103.871131
## iter 20 value 102.304152
## iter 30 value 102.174936
## iter 40 value 102.157391
## iter 50 value 102.154244
## iter 60 value 102.153661
## iter 70 value 102.153561
## iter 80 value 102.153519
## iter 90 value 102.151294
## final value 102.150920
## converged
## # weights: 9
## initial value 130.221280
## iter 10 value 105.181127
## iter 20 value 105.084200
## iter 20 value 105.084200
## final value 105.084200
## converged
## # weights: 17
## initial value 156.608424
## iter 10 value 104.863850
## iter 20 value 103.713306
## iter 30 value 103.674822
## iter 30 value 103.674821
## iter 30 value 103.674821
## final value 103.674821
## converged
## # weights: 25
## initial value 134.061989
## iter 10 value 103.743150
## iter 20 value 103.518106
## final value 103.510484
## converged
## # weights: 33
## initial value 125.628613
## iter 10 value 104.217869
## iter 20 value 103.656853
## iter 30 value 103.651560
## final value 103.651342
## converged
## # weights: 41
## initial value 122.315735
## iter 10 value 104.096623
## iter 20 value 103.659395
## iter 30 value 103.608474
## iter 40 value 103.454203
## iter 50 value 103.451371
## final value 103.451334
## converged
## # weights: 49
## initial value 125.288890
## iter 10 value 104.445015
## iter 20 value 103.722878
## iter 30 value 103.452788
## iter 40 value 103.444458
## final value 103.444328
## converged
## # weights: 57
## initial value 172.297448
## iter 10 value 104.085052
## iter 20 value 103.657694
## iter 30 value 103.456811
## iter 40 value 103.441941
## final value 103.441642
## converged
## # weights: 65
## initial value 136.912202
## iter 10 value 104.020884
## iter 20 value 103.617575
## iter 30 value 103.568457
## iter 40 value 103.433095
## iter 50 value 103.424992
## final value 103.424945
## converged
## # weights: 73
## initial value 133.609031
## iter 10 value 104.163097
## iter 20 value 103.593966
## iter 30 value 103.448866
## iter 40 value 103.430298
## iter 50 value 103.424578
## iter 60 value 103.419432
## iter 70 value 103.419146
## final value 103.419138
## converged
## # weights: 81
## initial value 129.579235
## iter 10 value 103.945710
## iter 20 value 103.776315
## iter 30 value 103.438716
## iter 40 value 103.426241
## iter 50 value 103.418004
## iter 60 value 103.415873
## final value 103.415851
## converged
## # weights: 9
## initial value 132.466035
## iter 10 value 106.259006
## iter 20 value 105.795184
## final value 105.795172
## converged
## # weights: 17
## initial value 117.863130
## iter 10 value 105.766097
## iter 20 value 104.995729
## iter 30 value 104.973430
## final value 104.973407
## converged
## # weights: 25
## initial value 144.702399
## iter 10 value 104.579673
## iter 20 value 104.535842
## final value 104.534950
## converged
## # weights: 33
## initial value 141.972248
## iter 10 value 104.559771
## iter 20 value 104.498696
## final value 104.498486
## converged
## # weights: 41
## initial value 132.226175
## iter 10 value 104.672469
## iter 20 value 104.535359
## iter 30 value 104.528495
## iter 40 value 104.528188
## final value 104.528186
## converged
## # weights: 49
## initial value 142.793607
## iter 10 value 104.702900
## iter 20 value 104.416554
## iter 30 value 104.411391
## iter 40 value 104.411368
## final value 104.411357
## converged
## # weights: 57
## initial value 117.427169
## iter 10 value 104.671481
## iter 20 value 104.384520
## iter 30 value 104.376804
## final value 104.376769
## converged
## # weights: 65
## initial value 123.238611
## iter 10 value 104.697634
## iter 20 value 104.399170
## iter 30 value 104.367212
## iter 40 value 104.366271
## iter 40 value 104.366271
## iter 40 value 104.366271
## final value 104.366271
## converged
## # weights: 73
## initial value 125.830848
## iter 10 value 104.656061
## iter 20 value 104.453683
## iter 30 value 104.359311
## iter 40 value 104.346037
## final value 104.345955
## converged
## # weights: 81
## initial value 244.184986
## iter 10 value 104.953897
## iter 20 value 104.356725
## iter 30 value 104.343701
## iter 40 value 104.339374
## final value 104.339186
## converged
## # weights: 9
## initial value 121.346746
## iter 10 value 107.754406
## final value 107.713430
## converged
## # weights: 17
## initial value 126.261873
## iter 10 value 105.725976
## iter 20 value 105.608516
## final value 105.608381
## converged
## # weights: 25
## initial value 119.259197
## iter 10 value 105.624670
## iter 20 value 105.350205
## iter 30 value 105.346048
## iter 30 value 105.346048
## iter 30 value 105.346048
## final value 105.346048
## converged
## # weights: 33
## initial value 120.197632
## iter 10 value 105.460695
## iter 20 value 105.276007
## iter 30 value 105.267581
## iter 30 value 105.267580
## iter 30 value 105.267580
## final value 105.267580
## converged
## # weights: 41
## initial value 117.746831
## iter 10 value 105.324480
## iter 20 value 105.181350
## iter 30 value 105.177792
## final value 105.177791
## converged
## # weights: 49
## initial value 163.332166
## iter 10 value 105.354035
## iter 20 value 105.152410
## iter 30 value 105.137126
## final value 105.137022
## converged
## # weights: 57
## initial value 205.480203
## iter 10 value 105.186616
## iter 20 value 105.101034
## iter 30 value 105.096385
## final value 105.096359
## converged
## # weights: 65
## initial value 121.055403
## iter 10 value 105.259293
## iter 20 value 105.080372
## iter 30 value 105.076703
## final value 105.076701
## converged
## # weights: 73
## initial value 127.526607
## iter 10 value 105.250328
## iter 20 value 105.083885
## iter 30 value 105.026103
## iter 40 value 105.023010
## final value 105.023007
## converged
## # weights: 81
## initial value 140.110024
## iter 10 value 105.278477
## iter 20 value 105.033427
## iter 30 value 105.019909
## iter 40 value 104.978668
## final value 104.978314
## converged
## # weights: 9
## initial value 148.876059
## iter 10 value 107.238641
## iter 20 value 107.002285
## iter 20 value 107.002285
## final value 107.002285
## converged
## # weights: 17
## initial value 151.981346
## iter 10 value 106.406594
## iter 20 value 106.322562
## final value 106.322304
## converged
## # weights: 25
## initial value 130.780952
## iter 10 value 106.519472
## iter 20 value 106.014276
## iter 30 value 105.993709
## final value 105.993653
## converged
## # weights: 33
## initial value 114.671876
## iter 10 value 106.028180
## iter 20 value 105.897785
## iter 30 value 105.850020
## iter 40 value 105.848501
## iter 40 value 105.848500
## iter 40 value 105.848500
## final value 105.848500
## converged
## # weights: 41
## initial value 160.944534
## iter 10 value 105.868254
## iter 20 value 105.752634
## iter 30 value 105.751254
## final value 105.751241
## converged
## # weights: 49
## initial value 127.499100
## iter 10 value 106.244095
## iter 20 value 105.708529
## iter 30 value 105.654575
## iter 40 value 105.653028
## iter 40 value 105.653027
## iter 40 value 105.653027
## final value 105.653027
## converged
## # weights: 57
## initial value 118.739772
## iter 10 value 105.702917
## iter 20 value 105.618276
## iter 30 value 105.615003
## final value 105.615000
## converged
## # weights: 65
## initial value 156.263277
## iter 10 value 105.735492
## iter 20 value 105.607974
## iter 30 value 105.606439
## final value 105.606423
## converged
## # weights: 73
## initial value 128.265835
## iter 10 value 105.683794
## iter 20 value 105.520169
## iter 30 value 105.508211
## final value 105.508179
## converged
## # weights: 81
## initial value 126.607022
## iter 10 value 105.754506
## iter 20 value 105.494467
## iter 30 value 105.481080
## iter 40 value 105.480805
## iter 40 value 105.480805
## iter 40 value 105.480805
## final value 105.480805
## converged
## # weights: 9
## initial value 135.293321
## iter 10 value 107.664687
## iter 20 value 107.523219
## iter 20 value 107.523218
## final value 107.523218
## converged
## # weights: 17
## initial value 116.305415
## iter 10 value 107.650608
## iter 20 value 106.946112
## iter 30 value 106.925230
## final value 106.925224
## converged
## # weights: 25
## initial value 134.893784
## iter 10 value 107.155424
## iter 20 value 106.902325
## iter 30 value 106.899417
## final value 106.899415
## converged
## # weights: 33
## initial value 149.697893
## iter 10 value 106.466813
## iter 20 value 106.343399
## iter 30 value 106.342314
## iter 30 value 106.342314
## iter 30 value 106.342314
## final value 106.342314
## converged
## # weights: 41
## initial value 126.757058
## iter 10 value 106.322925
## iter 20 value 106.246474
## iter 30 value 106.245315
## final value 106.245313
## converged
## # weights: 49
## initial value 131.980657
## iter 10 value 106.387803
## iter 20 value 106.169747
## iter 30 value 106.139439
## final value 106.139136
## converged
## # weights: 57
## initial value 220.635166
## iter 10 value 106.252314
## iter 20 value 106.146203
## iter 30 value 106.137799
## iter 40 value 106.086811
## iter 50 value 106.083145
## iter 50 value 106.083144
## iter 50 value 106.083144
## final value 106.083144
## converged
## # weights: 65
## initial value 148.955406
## iter 10 value 106.222317
## iter 20 value 106.042617
## iter 30 value 106.031578
## final value 106.031468
## converged
## # weights: 73
## initial value 189.356859
## iter 10 value 106.357835
## iter 20 value 106.018509
## iter 30 value 105.989357
## final value 105.989193
## converged
## # weights: 81
## initial value 149.970040
## iter 10 value 106.283880
## iter 20 value 105.980137
## iter 30 value 105.965142
## iter 40 value 105.964963
## iter 40 value 105.964963
## iter 40 value 105.964963
## final value 105.964963
## converged
## # weights: 9
## initial value 165.724422
## iter 10 value 108.156488
## iter 20 value 108.000060
## iter 20 value 108.000060
## final value 108.000060
## converged
## # weights: 17
## initial value 123.109909
## iter 10 value 109.929085
## iter 20 value 107.552606
## iter 30 value 107.457012
## final value 107.456970
## converged
## # weights: 25
## initial value 119.979025
## iter 10 value 107.488379
## iter 20 value 107.445092
## final value 107.443680
## converged
## # weights: 33
## initial value 119.675368
## iter 10 value 106.849249
## iter 20 value 106.808537
## final value 106.808481
## converged
## # weights: 41
## initial value 123.851381
## iter 10 value 106.781528
## iter 20 value 106.716408
## final value 106.716135
## converged
## # weights: 49
## initial value 147.080081
## iter 10 value 106.748304
## iter 20 value 106.604973
## iter 30 value 106.603338
## final value 106.603334
## converged
## # weights: 57
## initial value 157.952718
## iter 10 value 107.040397
## iter 20 value 106.631148
## iter 30 value 106.547262
## iter 40 value 106.544447
## final value 106.544418
## converged
## # weights: 65
## initial value 127.458656
## iter 10 value 106.825587
## iter 20 value 106.543297
## iter 30 value 106.498146
## iter 40 value 106.494198
## iter 40 value 106.494197
## iter 40 value 106.494197
## final value 106.494197
## converged
## # weights: 73
## initial value 141.796363
## iter 10 value 106.608944
## iter 20 value 106.505441
## iter 30 value 106.503848
## final value 106.503827
## converged
## # weights: 81
## initial value 132.838735
## iter 10 value 106.629273
## iter 20 value 106.435974
## iter 30 value 106.424072
## final value 106.424034
## converged
## # weights: 9
## initial value 135.666104
## iter 10 value 103.729640
## iter 20 value 100.845553
## final value 100.845440
## converged
## # weights: 17
## initial value 116.451637
## iter 10 value 101.655129
## iter 20 value 98.701392
## iter 30 value 98.620793
## final value 98.620394
## converged
## # weights: 25
## initial value 142.234164
## iter 10 value 102.034104
## iter 20 value 98.059977
## iter 30 value 95.911907
## iter 40 value 94.813945
## iter 50 value 94.339016
## iter 60 value 93.687171
## iter 70 value 93.631042
## final value 93.631020
## converged
## # weights: 33
## initial value 117.798242
## iter 10 value 99.045903
## iter 20 value 94.221373
## iter 30 value 93.330735
## iter 40 value 93.115368
## iter 50 value 93.103486
## final value 93.103471
## converged
## # weights: 41
## initial value 138.697879
## iter 10 value 98.689545
## iter 20 value 95.226695
## iter 30 value 92.200440
## iter 40 value 90.556356
## iter 50 value 90.259304
## iter 60 value 90.199906
## final value 90.199340
## converged
## # weights: 49
## initial value 112.961989
## iter 10 value 99.626878
## iter 20 value 95.644414
## iter 30 value 91.544365
## iter 40 value 89.866969
## iter 50 value 88.991535
## iter 60 value 88.506899
## iter 70 value 87.764972
## iter 80 value 87.735730
## iter 90 value 87.735256
## iter 90 value 87.735255
## iter 90 value 87.735255
## final value 87.735255
## converged
## # weights: 57
## initial value 198.828735
## iter 10 value 100.245837
## iter 20 value 94.157535
## iter 30 value 93.026252
## iter 40 value 92.047304
## iter 50 value 89.123641
## iter 60 value 87.866950
## iter 70 value 86.806466
## iter 80 value 86.505675
## iter 90 value 86.489282
## iter 100 value 86.488346
## final value 86.488346
## stopped after 100 iterations
## # weights: 65
## initial value 121.080002
## iter 10 value 98.775646
## iter 20 value 94.281024
## iter 30 value 91.296924
## iter 40 value 90.452938
## iter 50 value 89.844220
## iter 60 value 89.347082
## iter 70 value 88.774128
## iter 80 value 87.065304
## iter 90 value 86.532377
## iter 100 value 85.473840
## final value 85.473840
## stopped after 100 iterations
## # weights: 73
## initial value 114.862696
## iter 10 value 98.202352
## iter 20 value 93.959524
## iter 30 value 92.673390
## iter 40 value 90.507307
## iter 50 value 88.707378
## iter 60 value 86.810052
## iter 70 value 85.532852
## iter 80 value 85.349110
## iter 90 value 85.332951
## iter 100 value 85.331634
## final value 85.331634
## stopped after 100 iterations
## # weights: 81
## initial value 133.957112
## iter 10 value 98.183491
## iter 20 value 92.227837
## iter 30 value 88.547312
## iter 40 value 86.303678
## iter 50 value 85.205908
## iter 60 value 84.948771
## iter 70 value 84.544172
## iter 80 value 84.478494
## iter 90 value 84.471269
## iter 100 value 84.469734
## final value 84.469734
## stopped after 100 iterations
## # weights: 9
## initial value 113.446347
## iter 10 value 103.308582
## iter 20 value 102.613369
## iter 20 value 102.613368
## final value 102.613368
## converged
## # weights: 17
## initial value 123.451260
## iter 10 value 102.078522
## iter 20 value 100.755394
## iter 30 value 100.613609
## final value 100.613486
## converged
## # weights: 25
## initial value 124.902838
## iter 10 value 101.669758
## iter 20 value 99.898639
## iter 30 value 99.658232
## final value 99.655743
## converged
## # weights: 33
## initial value 117.868096
## iter 10 value 100.995088
## iter 20 value 100.238824
## iter 30 value 99.969791
## iter 40 value 99.673289
## iter 50 value 99.469663
## iter 60 value 99.463274
## final value 99.463271
## converged
## # weights: 41
## initial value 138.080726
## iter 10 value 100.731367
## iter 20 value 99.788072
## iter 30 value 99.476187
## iter 40 value 99.397769
## iter 50 value 99.393200
## iter 60 value 99.392205
## final value 99.392176
## converged
## # weights: 49
## initial value 122.919744
## iter 10 value 102.182649
## iter 20 value 100.539111
## iter 30 value 99.744229
## iter 40 value 99.238192
## iter 50 value 99.085177
## iter 60 value 99.079602
## iter 70 value 99.079492
## iter 70 value 99.079491
## iter 70 value 99.079491
## final value 99.079491
## converged
## # weights: 57
## initial value 162.655683
## iter 10 value 100.798914
## iter 20 value 100.011272
## iter 30 value 99.522425
## iter 40 value 98.972852
## iter 50 value 98.924962
## iter 60 value 98.846254
## iter 70 value 98.788470
## iter 80 value 98.788368
## final value 98.788367
## converged
## # weights: 65
## initial value 120.552800
## iter 10 value 100.893539
## iter 20 value 99.552883
## iter 30 value 99.353455
## iter 40 value 99.246765
## iter 50 value 99.232937
## iter 60 value 99.232185
## final value 99.232167
## converged
## # weights: 73
## initial value 140.978076
## iter 10 value 101.616939
## iter 20 value 99.818562
## iter 30 value 99.017631
## iter 40 value 98.920249
## iter 50 value 98.902988
## iter 60 value 98.901891
## iter 70 value 98.901827
## iter 80 value 98.901664
## iter 90 value 98.900554
## iter 100 value 98.900470
## final value 98.900470
## stopped after 100 iterations
## # weights: 81
## initial value 132.780453
## iter 10 value 100.994005
## iter 20 value 99.793830
## iter 30 value 99.148769
## iter 40 value 98.891923
## iter 50 value 98.797586
## iter 60 value 98.785264
## iter 70 value 98.782608
## final value 98.782581
## converged
## # weights: 9
## initial value 115.342838
## iter 10 value 104.549997
## final value 104.307881
## converged
## # weights: 17
## initial value 117.705689
## iter 10 value 104.449846
## iter 20 value 102.802802
## iter 30 value 102.789761
## iter 30 value 102.789761
## iter 30 value 102.789761
## final value 102.789761
## converged
## # weights: 25
## initial value 153.997516
## iter 10 value 102.788704
## iter 20 value 102.260535
## iter 30 value 102.252379
## iter 40 value 102.222837
## iter 50 value 102.107044
## iter 60 value 102.095228
## iter 60 value 102.095228
## iter 60 value 102.095228
## final value 102.095228
## converged
## # weights: 33
## initial value 225.880766
## iter 10 value 102.708342
## iter 20 value 102.285509
## iter 30 value 102.238286
## iter 40 value 102.230767
## iter 50 value 102.224579
## final value 102.224517
## converged
## # weights: 41
## initial value 120.071874
## iter 10 value 103.018995
## iter 20 value 102.227860
## iter 30 value 102.098550
## iter 40 value 102.097581
## final value 102.097568
## converged
## # weights: 49
## initial value 114.011605
## iter 10 value 102.726433
## iter 20 value 102.117579
## iter 30 value 102.085732
## iter 40 value 102.084256
## final value 102.084253
## converged
## # weights: 57
## initial value 125.294105
## iter 10 value 102.478492
## iter 20 value 102.118099
## iter 30 value 102.084195
## iter 40 value 102.082424
## final value 102.082405
## converged
## # weights: 65
## initial value 123.132365
## iter 10 value 103.694770
## iter 20 value 102.259372
## iter 30 value 102.107852
## iter 40 value 102.091062
## iter 50 value 102.082200
## iter 60 value 102.082059
## final value 102.082056
## converged
## # weights: 73
## initial value 134.913549
## iter 10 value 103.027679
## iter 20 value 102.261805
## iter 30 value 102.225594
## iter 40 value 102.222838
## iter 50 value 102.221956
## final value 102.221917
## converged
## # weights: 81
## initial value 117.781175
## iter 10 value 103.255828
## iter 20 value 102.224811
## iter 30 value 102.097216
## iter 40 value 102.081278
## iter 50 value 102.080849
## final value 102.080843
## converged
## # weights: 9
## initial value 115.250934
## iter 10 value 105.221798
## iter 20 value 104.932173
## iter 20 value 104.932173
## final value 104.932173
## converged
## # weights: 17
## initial value 141.748806
## iter 10 value 103.747749
## iter 20 value 103.709952
## final value 103.709936
## converged
## # weights: 25
## initial value 131.317997
## iter 10 value 103.740482
## iter 20 value 103.634527
## final value 103.633565
## converged
## # weights: 33
## initial value 163.161873
## iter 10 value 104.158673
## iter 20 value 103.732947
## iter 30 value 103.642617
## iter 40 value 103.625746
## final value 103.625384
## converged
## # weights: 41
## initial value 112.272414
## iter 10 value 103.900586
## iter 20 value 103.716950
## iter 30 value 103.627360
## iter 40 value 103.621913
## final value 103.621852
## converged
## # weights: 49
## initial value 133.138818
## iter 10 value 104.017962
## iter 20 value 103.658920
## iter 30 value 103.622242
## iter 40 value 103.620517
## final value 103.620512
## converged
## # weights: 57
## initial value 136.576475
## iter 10 value 105.406905
## iter 20 value 103.700438
## iter 30 value 103.663534
## iter 40 value 103.640973
## iter 50 value 103.637463
## final value 103.637095
## converged
## # weights: 65
## initial value 115.571040
## iter 10 value 104.493849
## iter 20 value 103.717352
## iter 30 value 103.663467
## iter 40 value 103.628233
## iter 50 value 103.621785
## final value 103.621682
## converged
## # weights: 73
## initial value 129.825568
## iter 10 value 104.088623
## iter 20 value 103.680958
## iter 30 value 103.638888
## iter 40 value 103.619673
## iter 50 value 103.617568
## iter 60 value 103.617298
## final value 103.617291
## converged
## # weights: 81
## initial value 232.430980
## iter 10 value 104.509066
## iter 20 value 103.773761
## iter 30 value 103.681995
## iter 40 value 103.680834
## iter 50 value 103.679736
## iter 60 value 103.622606
## iter 70 value 103.617062
## iter 80 value 103.616788
## final value 103.616766
## converged
## # weights: 9
## initial value 120.917485
## iter 10 value 106.576442
## final value 106.574216
## converged
## # weights: 17
## initial value 123.737889
## iter 10 value 106.484709
## iter 20 value 105.123311
## iter 30 value 105.086365
## iter 30 value 105.086365
## iter 30 value 105.086365
## final value 105.086365
## converged
## # weights: 25
## initial value 134.114446
## iter 10 value 105.012039
## iter 20 value 104.779040
## iter 30 value 104.748443
## final value 104.748290
## converged
## # weights: 33
## initial value 180.849817
## iter 10 value 104.953219
## iter 20 value 104.732859
## iter 30 value 104.727846
## final value 104.727794
## converged
## # weights: 41
## initial value 152.252091
## iter 10 value 104.952302
## iter 20 value 104.723744
## iter 30 value 104.717324
## final value 104.716940
## converged
## # weights: 49
## initial value 118.191792
## iter 10 value 105.289735
## iter 20 value 104.721041
## iter 30 value 104.710174
## iter 40 value 104.707579
## iter 50 value 104.704930
## iter 50 value 104.704929
## iter 50 value 104.704929
## final value 104.704929
## converged
## # weights: 57
## initial value 123.310993
## iter 10 value 104.975869
## iter 20 value 104.713058
## iter 30 value 104.704302
## iter 40 value 104.698059
## iter 50 value 104.697864
## final value 104.697862
## converged
## # weights: 65
## initial value 128.105989
## iter 10 value 105.074498
## iter 20 value 104.704717
## iter 30 value 104.689655
## iter 40 value 104.689076
## iter 40 value 104.689075
## iter 40 value 104.689075
## final value 104.689075
## converged
## # weights: 73
## initial value 162.553489
## iter 10 value 106.085448
## iter 20 value 104.721881
## iter 30 value 104.701894
## iter 40 value 104.686101
## iter 50 value 104.682862
## iter 60 value 104.682419
## final value 104.682412
## converged
## # weights: 81
## initial value 117.740157
## iter 10 value 105.192113
## iter 20 value 104.761884
## iter 30 value 104.684859
## iter 40 value 104.676775
## iter 50 value 104.675102
## final value 104.675058
## converged
## # weights: 9
## initial value 117.766127
## iter 10 value 106.629684
## final value 106.521729
## converged
## # weights: 17
## initial value 147.830220
## iter 10 value 106.501078
## iter 20 value 106.071868
## final value 106.068450
## converged
## # weights: 25
## initial value 128.749958
## iter 10 value 105.843419
## iter 20 value 105.624174
## final value 105.622676
## converged
## # weights: 33
## initial value 151.999959
## iter 10 value 105.874569
## iter 20 value 105.596421
## iter 30 value 105.594797
## final value 105.594795
## converged
## # weights: 41
## initial value 122.176369
## iter 10 value 105.749065
## iter 20 value 105.543471
## iter 30 value 105.532708
## final value 105.532542
## converged
## # weights: 49
## initial value 134.194556
## iter 10 value 105.814487
## iter 20 value 105.532508
## iter 30 value 105.519474
## iter 40 value 105.518573
## final value 105.518563
## converged
## # weights: 57
## initial value 209.186262
## iter 10 value 105.563089
## iter 20 value 105.483751
## iter 30 value 105.482089
## iter 30 value 105.482088
## iter 30 value 105.482088
## final value 105.482088
## converged
## # weights: 65
## initial value 124.020092
## iter 10 value 105.651094
## iter 20 value 105.480615
## iter 30 value 105.465662
## final value 105.465315
## converged
## # weights: 73
## initial value 161.222242
## iter 10 value 105.856231
## iter 20 value 105.475851
## iter 30 value 105.451510
## iter 30 value 105.451509
## iter 30 value 105.451509
## final value 105.451509
## converged
## # weights: 81
## initial value 135.723653
## iter 10 value 105.756995
## iter 20 value 105.461175
## iter 30 value 105.444127
## final value 105.443763
## converged
## # weights: 9
## initial value 123.890152
## iter 10 value 108.953646
## iter 20 value 108.328831
## final value 108.328765
## converged
## # weights: 17
## initial value 125.634340
## iter 10 value 107.069146
## iter 20 value 106.781903
## final value 106.781866
## converged
## # weights: 25
## initial value 116.101653
## iter 10 value 106.617812
## iter 20 value 106.498146
## final value 106.498014
## converged
## # weights: 33
## initial value 142.378251
## iter 10 value 106.376642
## iter 20 value 106.289600
## iter 30 value 106.271803
## final value 106.270921
## converged
## # weights: 41
## initial value 118.829362
## iter 10 value 106.397666
## iter 20 value 106.259240
## iter 30 value 106.179229
## final value 106.178952
## converged
## # weights: 49
## initial value 213.187647
## iter 10 value 106.437518
## iter 20 value 106.152445
## iter 30 value 106.125029
## iter 40 value 106.122561
## final value 106.122550
## converged
## # weights: 57
## initial value 117.941983
## iter 10 value 106.242764
## iter 20 value 106.092205
## iter 30 value 106.080195
## final value 106.079907
## converged
## # weights: 65
## initial value 131.067706
## iter 10 value 107.990337
## iter 20 value 106.216467
## iter 30 value 106.095354
## iter 40 value 106.042927
## iter 50 value 106.034994
## iter 60 value 106.033440
## final value 106.033438
## converged
## # weights: 73
## initial value 253.577032
## iter 10 value 106.178261
## iter 20 value 106.055003
## iter 30 value 106.049477
## iter 40 value 106.048337
## iter 40 value 106.048336
## iter 40 value 106.048336
## final value 106.048336
## converged
## # weights: 81
## initial value 158.056883
## iter 10 value 106.362187
## iter 20 value 106.083376
## iter 30 value 106.016408
## iter 40 value 106.010748
## final value 106.010623
## converged
## # weights: 9
## initial value 120.698649
## iter 10 value 109.263107
## final value 109.075304
## converged
## # weights: 17
## initial value 155.976316
## iter 10 value 107.847394
## iter 20 value 107.250863
## iter 30 value 107.174198
## final value 107.174192
## converged
## # weights: 25
## initial value 117.718407
## iter 10 value 107.266167
## iter 20 value 107.145281
## final value 107.145025
## converged
## # weights: 33
## initial value 146.886325
## iter 10 value 106.971798
## iter 20 value 106.781478
## iter 30 value 106.777950
## final value 106.777939
## converged
## # weights: 41
## initial value 140.746542
## iter 10 value 107.277422
## iter 20 value 106.774496
## iter 30 value 106.692784
## iter 40 value 106.690001
## final value 106.689985
## converged
## # weights: 49
## initial value 123.371088
## iter 10 value 107.003725
## iter 20 value 106.671838
## final value 106.670792
## converged
## # weights: 57
## initial value 142.748914
## iter 10 value 106.834620
## iter 20 value 106.569791
## iter 30 value 106.558416
## final value 106.558305
## converged
## # weights: 65
## initial value 124.052585
## iter 10 value 107.018917
## iter 20 value 106.530084
## iter 30 value 106.512481
## iter 40 value 106.510855
## iter 40 value 106.510855
## iter 40 value 106.510854
## final value 106.510854
## converged
## # weights: 73
## initial value 148.945182
## iter 10 value 106.693273
## iter 20 value 106.516149
## iter 30 value 106.511070
## iter 40 value 106.502958
## iter 50 value 106.494134
## final value 106.494103
## converged
## # weights: 81
## initial value 128.711699
## iter 10 value 106.888646
## iter 20 value 106.524064
## iter 30 value 106.463752
## iter 40 value 106.455871
## final value 106.455856
## converged
## # weights: 9
## initial value 166.235321
## iter 10 value 110.430606
## iter 20 value 109.752197
## final value 109.751987
## converged
## # weights: 17
## initial value 117.418718
## iter 10 value 108.193813
## iter 20 value 107.878930
## final value 107.877827
## converged
## # weights: 25
## initial value 117.498148
## iter 10 value 107.819480
## iter 20 value 107.784669
## final value 107.784483
## converged
## # weights: 33
## initial value 149.163331
## iter 10 value 107.499890
## iter 20 value 107.340483
## iter 30 value 107.338651
## final value 107.338623
## converged
## # weights: 41
## initial value 118.838426
## iter 10 value 107.226777
## iter 20 value 107.158344
## iter 30 value 107.157722
## iter 30 value 107.157721
## iter 30 value 107.157721
## final value 107.157721
## converged
## # weights: 49
## initial value 130.445777
## iter 10 value 107.645578
## iter 20 value 107.156531
## iter 30 value 107.142320
## final value 107.142198
## converged
## # weights: 57
## initial value 123.371061
## iter 10 value 107.108482
## iter 20 value 107.016549
## iter 30 value 107.015142
## final value 107.015140
## converged
## # weights: 65
## initial value 157.066642
## iter 10 value 107.088808
## iter 20 value 106.971381
## iter 30 value 106.967976
## final value 106.967961
## converged
## # weights: 73
## initial value 149.443612
## iter 10 value 107.194758
## iter 20 value 106.938705
## iter 30 value 106.930704
## final value 106.930548
## converged
## # weights: 81
## initial value 135.330005
## iter 10 value 107.233973
## iter 20 value 106.966414
## iter 30 value 106.906643
## final value 106.906207
## converged
## # weights: 9
## initial value 123.156322
## iter 10 value 108.759843
## final value 108.733428
## converged
## # weights: 17
## initial value 118.214483
## iter 10 value 108.323945
## iter 20 value 108.248322
## final value 108.248233
## converged
## # weights: 25
## initial value 147.698397
## iter 10 value 107.999443
## iter 20 value 107.852609
## iter 30 value 107.851424
## final value 107.851421
## converged
## # weights: 33
## initial value 120.123709
## iter 10 value 107.874232
## iter 20 value 107.802840
## iter 30 value 107.800781
## final value 107.800778
## converged
## # weights: 41
## initial value 140.316844
## iter 10 value 107.746243
## iter 20 value 107.604275
## final value 107.603683
## converged
## # weights: 49
## initial value 278.018310
## iter 10 value 107.786770
## iter 20 value 107.506777
## iter 30 value 107.503595
## final value 107.503580
## converged
## # weights: 57
## initial value 124.416666
## iter 10 value 107.759267
## iter 20 value 107.537985
## iter 30 value 107.476027
## iter 40 value 107.473440
## iter 40 value 107.473439
## iter 40 value 107.473439
## final value 107.473439
## converged
## # weights: 65
## initial value 154.733075
## iter 10 value 107.905340
## iter 20 value 107.492089
## iter 30 value 107.471002
## iter 40 value 107.469599
## iter 40 value 107.469598
## iter 40 value 107.469598
## final value 107.469598
## converged
## # weights: 73
## initial value 137.378396
## iter 10 value 107.534777
## iter 20 value 107.373528
## iter 30 value 107.365971
## final value 107.365934
## converged
## # weights: 81
## initial value 133.024036
## iter 10 value 107.437496
## iter 20 value 107.370068
## iter 30 value 107.345201
## iter 40 value 107.343549
## final value 107.343545
## converged
## # weights: 9
## initial value 127.699048
## iter 10 value 100.178754
## iter 20 value 99.755606
## iter 30 value 99.734491
## final value 99.734481
## converged
## # weights: 17
## initial value 170.383825
## iter 10 value 101.730643
## iter 20 value 95.274952
## iter 30 value 95.104850
## final value 95.104812
## converged
## # weights: 25
## initial value 139.355058
## iter 10 value 99.491433
## iter 20 value 95.333142
## iter 30 value 93.651950
## iter 40 value 90.905126
## iter 50 value 90.575444
## iter 60 value 90.565490
## final value 90.565389
## converged
## # weights: 33
## initial value 181.421010
## iter 10 value 98.142928
## iter 20 value 93.996130
## iter 30 value 91.181450
## iter 40 value 89.884159
## iter 50 value 89.440574
## iter 60 value 88.476240
## iter 70 value 88.328656
## iter 80 value 88.327831
## final value 88.327829
## converged
## # weights: 41
## initial value 182.791649
## iter 10 value 97.554633
## iter 20 value 92.823336
## iter 30 value 91.300719
## iter 40 value 90.373728
## iter 50 value 89.407552
## iter 60 value 89.247682
## iter 70 value 89.058522
## iter 80 value 88.774699
## iter 90 value 88.774207
## final value 88.774204
## converged
## # weights: 49
## initial value 120.827750
## iter 10 value 97.359623
## iter 20 value 92.695287
## iter 30 value 90.825340
## iter 40 value 90.351230
## iter 50 value 89.831430
## iter 60 value 89.347967
## iter 70 value 89.182658
## iter 80 value 89.169590
## iter 90 value 89.168866
## final value 89.168843
## converged
## # weights: 57
## initial value 123.796127
## iter 10 value 97.239874
## iter 20 value 91.513979
## iter 30 value 87.913952
## iter 40 value 87.320303
## iter 50 value 86.570103
## iter 60 value 85.811206
## iter 70 value 85.417110
## iter 80 value 85.279054
## iter 90 value 85.272157
## final value 85.271761
## converged
## # weights: 65
## initial value 117.489824
## iter 10 value 97.803686
## iter 20 value 92.074776
## iter 30 value 89.787532
## iter 40 value 87.931146
## iter 50 value 86.786145
## iter 60 value 85.245300
## iter 70 value 84.279626
## iter 80 value 84.138583
## iter 90 value 84.127657
## iter 100 value 84.126885
## final value 84.126885
## stopped after 100 iterations
## # weights: 73
## initial value 149.302961
## iter 10 value 98.162872
## iter 20 value 93.253146
## iter 30 value 91.138353
## iter 40 value 89.336604
## iter 50 value 88.023815
## iter 60 value 86.274455
## iter 70 value 85.102971
## iter 80 value 84.598451
## iter 90 value 84.063553
## iter 100 value 83.824562
## final value 83.824562
## stopped after 100 iterations
## # weights: 81
## initial value 112.968930
## iter 10 value 97.089413
## iter 20 value 93.314172
## iter 30 value 89.220259
## iter 40 value 88.419098
## iter 50 value 88.042749
## iter 60 value 87.050659
## iter 70 value 86.698142
## iter 80 value 86.112438
## iter 90 value 85.080952
## iter 100 value 84.767118
## final value 84.767118
## stopped after 100 iterations
## # weights: 9
## initial value 140.387088
## iter 10 value 104.171035
## iter 20 value 101.736500
## iter 30 value 101.704126
## final value 101.704080
## converged
## # weights: 17
## initial value 125.518631
## iter 10 value 101.022795
## iter 20 value 99.228477
## iter 30 value 99.095194
## final value 99.093666
## converged
## # weights: 25
## initial value 185.665587
## iter 10 value 100.168767
## iter 20 value 98.668789
## iter 30 value 97.936094
## iter 40 value 97.369154
## iter 50 value 96.809727
## iter 60 value 96.786444
## final value 96.786333
## converged
## # weights: 33
## initial value 142.145394
## iter 10 value 99.448937
## iter 20 value 98.619124
## iter 30 value 98.417699
## iter 40 value 98.050774
## iter 50 value 97.321969
## iter 60 value 96.832784
## iter 70 value 96.792371
## iter 80 value 96.787871
## final value 96.786609
## converged
## # weights: 41
## initial value 159.386259
## iter 10 value 99.341380
## iter 20 value 97.900287
## iter 30 value 97.202091
## iter 40 value 96.892033
## iter 50 value 96.794892
## iter 60 value 96.786603
## iter 70 value 96.785925
## final value 96.785828
## converged
## # weights: 49
## initial value 116.276633
## iter 10 value 99.668588
## iter 20 value 97.560839
## iter 30 value 97.107546
## iter 40 value 97.051426
## iter 50 value 97.042834
## final value 97.042510
## converged
## # weights: 57
## initial value 152.275476
## iter 10 value 99.650272
## iter 20 value 98.318932
## iter 30 value 97.793384
## iter 40 value 97.418326
## iter 50 value 97.027173
## iter 60 value 96.979861
## iter 70 value 96.973123
## iter 80 value 96.970720
## final value 96.970670
## converged
## # weights: 65
## initial value 114.179089
## iter 10 value 100.294980
## iter 20 value 98.155634
## iter 30 value 97.343244
## iter 40 value 96.997310
## iter 50 value 96.886356
## iter 60 value 96.819145
## iter 70 value 96.799251
## iter 80 value 96.786855
## iter 90 value 96.785758
## final value 96.785588
## converged
## # weights: 73
## initial value 160.198760
## iter 10 value 100.207308
## iter 20 value 98.573795
## iter 30 value 97.473230
## iter 40 value 97.010797
## iter 50 value 96.890949
## iter 60 value 96.801484
## iter 70 value 96.795042
## iter 80 value 96.786911
## iter 90 value 96.785668
## iter 100 value 96.785577
## final value 96.785577
## stopped after 100 iterations
## # weights: 81
## initial value 115.835290
## iter 10 value 101.046481
## iter 20 value 98.189614
## iter 30 value 97.448966
## iter 40 value 97.067913
## iter 50 value 96.970785
## iter 60 value 96.960268
## iter 70 value 96.959252
## iter 80 value 96.959055
## iter 90 value 96.958026
## iter 100 value 96.957600
## final value 96.957600
## stopped after 100 iterations
## # weights: 9
## initial value 114.450422
## iter 10 value 104.547412
## iter 20 value 103.159384
## final value 103.159342
## converged
## # weights: 17
## initial value 162.630661
## iter 10 value 101.695391
## iter 20 value 100.722169
## iter 30 value 100.704507
## final value 100.704501
## converged
## # weights: 25
## initial value 119.873154
## iter 10 value 102.623742
## iter 20 value 100.946292
## iter 30 value 100.825914
## final value 100.823451
## converged
## # weights: 33
## initial value 140.552905
## iter 10 value 102.454549
## iter 20 value 100.715518
## iter 30 value 100.249257
## iter 40 value 100.125843
## iter 50 value 100.114137
## iter 60 value 100.097773
## final value 100.097590
## converged
## # weights: 41
## initial value 140.559108
## iter 10 value 100.649540
## iter 20 value 100.157958
## iter 30 value 100.100054
## iter 40 value 100.099082
## iter 40 value 100.099081
## iter 40 value 100.099081
## final value 100.099081
## converged
## # weights: 49
## initial value 139.986038
## iter 10 value 101.723612
## iter 20 value 100.528187
## iter 30 value 100.232787
## iter 40 value 100.138144
## iter 50 value 100.098681
## iter 60 value 100.095692
## iter 70 value 100.095372
## final value 100.095371
## converged
## # weights: 57
## initial value 149.053503
## iter 10 value 101.350759
## iter 20 value 100.776897
## iter 30 value 100.487168
## iter 40 value 100.331125
## iter 50 value 100.180619
## iter 60 value 100.123021
## iter 70 value 100.097677
## iter 80 value 100.095483
## iter 90 value 100.095362
## iter 100 value 100.095323
## final value 100.095323
## stopped after 100 iterations
## # weights: 65
## initial value 119.488531
## iter 10 value 101.782771
## iter 20 value 100.630676
## iter 30 value 100.185744
## iter 40 value 100.161898
## iter 50 value 100.130329
## iter 60 value 100.099789
## iter 70 value 100.095524
## iter 80 value 100.095303
## iter 80 value 100.095303
## iter 80 value 100.095303
## final value 100.095303
## converged
## # weights: 73
## initial value 139.614894
## iter 10 value 101.587172
## iter 20 value 100.372831
## iter 30 value 100.129094
## iter 40 value 100.096867
## iter 50 value 100.095702
## iter 60 value 100.095528
## final value 100.095491
## converged
## # weights: 81
## initial value 118.639540
## iter 10 value 101.225421
## iter 20 value 100.252905
## iter 30 value 100.158859
## iter 40 value 100.147250
## iter 50 value 100.120871
## iter 60 value 100.097299
## iter 70 value 100.095694
## iter 80 value 100.095301
## iter 90 value 100.095275
## final value 100.095271
## converged
## # weights: 9
## initial value 132.358949
## iter 10 value 103.977690
## final value 103.845369
## converged
## # weights: 17
## initial value 117.275903
## iter 10 value 102.452484
## iter 20 value 102.280587
## final value 102.279502
## converged
## # weights: 25
## initial value 135.544559
## iter 10 value 103.026626
## iter 20 value 102.074894
## iter 30 value 102.021094
## final value 102.020087
## converged
## # weights: 33
## initial value 159.111690
## iter 10 value 103.404286
## iter 20 value 102.056693
## iter 30 value 102.016139
## iter 40 value 101.997723
## final value 101.997688
## converged
## # weights: 41
## initial value 201.271261
## iter 10 value 102.820045
## iter 20 value 102.093717
## iter 30 value 102.033258
## iter 40 value 102.029614
## final value 102.028581
## converged
## # weights: 49
## initial value 153.301791
## iter 10 value 102.688649
## iter 20 value 102.066720
## iter 30 value 102.001245
## iter 40 value 101.993036
## iter 50 value 101.991652
## iter 50 value 101.991651
## iter 50 value 101.991651
## final value 101.991651
## converged
## # weights: 57
## initial value 115.579886
## iter 10 value 102.538651
## iter 20 value 102.021876
## iter 30 value 101.992288
## iter 40 value 101.991649
## iter 50 value 101.991578
## iter 50 value 101.991578
## final value 101.991578
## converged
## # weights: 65
## initial value 118.441633
## iter 10 value 102.488454
## iter 20 value 102.085542
## iter 30 value 102.066616
## iter 40 value 102.065833
## iter 50 value 102.035298
## iter 60 value 101.990459
## iter 70 value 101.990237
## final value 101.990234
## converged
## # weights: 73
## initial value 137.642536
## iter 10 value 102.673655
## iter 20 value 102.076354
## iter 30 value 102.010628
## iter 40 value 101.991081
## iter 50 value 101.990058
## iter 60 value 101.989807
## final value 101.989805
## converged
## # weights: 81
## initial value 133.343541
## iter 10 value 103.129337
## iter 20 value 102.085436
## iter 30 value 102.055851
## iter 40 value 102.008529
## iter 50 value 101.990249
## iter 60 value 101.989726
## iter 70 value 101.989610
## final value 101.989601
## converged
## # weights: 9
## initial value 117.975205
## iter 10 value 105.979365
## iter 20 value 105.547921
## iter 20 value 105.547921
## final value 105.547921
## converged
## # weights: 17
## initial value 134.876463
## iter 10 value 104.824774
## iter 20 value 104.047451
## iter 30 value 103.539279
## final value 103.536861
## converged
## # weights: 25
## initial value 121.134034
## iter 10 value 103.664420
## iter 20 value 103.384052
## iter 30 value 103.374788
## iter 40 value 103.370084
## final value 103.370066
## converged
## # weights: 33
## initial value 156.549648
## iter 10 value 103.664681
## iter 20 value 103.371157
## iter 30 value 103.364312
## final value 103.364287
## converged
## # weights: 41
## initial value 115.165696
## iter 10 value 103.609941
## iter 20 value 103.308340
## iter 30 value 103.290591
## final value 103.290420
## converged
## # weights: 49
## initial value 130.771039
## iter 10 value 103.651147
## iter 20 value 103.294054
## iter 30 value 103.279474
## iter 40 value 103.278980
## final value 103.278950
## converged
## # weights: 57
## initial value 115.877843
## iter 10 value 103.580608
## iter 20 value 103.250231
## iter 30 value 103.247012
## final value 103.246881
## converged
## # weights: 65
## initial value 145.827748
## iter 10 value 104.185560
## iter 20 value 103.318771
## iter 30 value 103.248325
## iter 40 value 103.238663
## iter 50 value 103.233404
## iter 60 value 103.233001
## final value 103.232974
## converged
## # weights: 73
## initial value 203.673766
## iter 10 value 104.328153
## iter 20 value 103.272752
## iter 30 value 103.230871
## iter 40 value 103.229298
## final value 103.229276
## converged
## # weights: 81
## initial value 119.861572
## iter 10 value 103.665536
## iter 20 value 103.412620
## iter 30 value 103.249119
## iter 40 value 103.219827
## iter 50 value 103.218367
## final value 103.218336
## converged
## # weights: 9
## initial value 153.923202
## iter 10 value 107.020193
## iter 20 value 106.561411
## iter 20 value 106.561411
## final value 106.561411
## converged
## # weights: 17
## initial value 119.720783
## iter 10 value 105.469212
## iter 20 value 104.595839
## final value 104.562340
## converged
## # weights: 25
## initial value 122.065764
## iter 10 value 105.206489
## iter 20 value 104.388449
## iter 30 value 104.365633
## final value 104.365568
## converged
## # weights: 33
## initial value 136.030514
## iter 10 value 104.738335
## iter 20 value 104.348429
## iter 30 value 104.326082
## final value 104.326066
## converged
## # weights: 41
## initial value 121.292011
## iter 10 value 104.612656
## iter 20 value 104.328205
## iter 30 value 104.323699
## final value 104.323696
## converged
## # weights: 49
## initial value 118.476271
## iter 10 value 104.323387
## iter 20 value 104.172141
## iter 30 value 104.169578
## iter 30 value 104.169577
## iter 30 value 104.169577
## final value 104.169577
## converged
## # weights: 57
## initial value 155.048884
## iter 10 value 104.483893
## iter 20 value 104.141955
## iter 30 value 104.136846
## final value 104.136839
## converged
## # weights: 65
## initial value 233.642663
## iter 10 value 104.492559
## iter 20 value 104.143048
## iter 30 value 104.116361
## iter 40 value 104.115806
## iter 40 value 104.115805
## iter 40 value 104.115805
## final value 104.115805
## converged
## # weights: 73
## initial value 122.324326
## iter 10 value 104.258150
## iter 20 value 104.107951
## iter 30 value 104.093982
## final value 104.093955
## converged
## # weights: 81
## initial value 132.826535
## iter 10 value 104.384066
## iter 20 value 104.140309
## iter 30 value 104.107977
## iter 40 value 104.086697
## iter 50 value 104.082526
## final value 104.082523
## converged
## # weights: 9
## initial value 125.662303
## iter 10 value 106.486125
## final value 106.315441
## converged
## # weights: 17
## initial value 161.737275
## iter 10 value 105.972734
## iter 20 value 105.424805
## final value 105.412192
## converged
## # weights: 25
## initial value 117.741145
## iter 10 value 105.308167
## iter 20 value 105.159419
## iter 30 value 105.150240
## final value 105.150238
## converged
## # weights: 33
## initial value 122.764251
## iter 10 value 105.174979
## iter 20 value 105.042673
## iter 30 value 105.039791
## iter 30 value 105.039791
## iter 30 value 105.039791
## final value 105.039791
## converged
## # weights: 41
## initial value 127.131330
## iter 10 value 105.296685
## iter 20 value 104.946650
## iter 30 value 104.926541
## final value 104.926463
## converged
## # weights: 49
## initial value 121.398700
## iter 10 value 105.111143
## iter 20 value 104.896415
## iter 30 value 104.887029
## final value 104.886774
## converged
## # weights: 57
## initial value 122.611105
## iter 10 value 105.101272
## iter 20 value 104.904471
## iter 30 value 104.879172
## final value 104.878974
## converged
## # weights: 65
## initial value 161.862629
## iter 10 value 105.467879
## iter 20 value 104.777346
## iter 30 value 104.738818
## iter 40 value 104.737602
## final value 104.737591
## converged
## # weights: 73
## initial value 163.974735
## iter 10 value 105.746552
## iter 20 value 104.814482
## iter 30 value 104.721745
## iter 40 value 104.711308
## iter 50 value 104.711103
## iter 50 value 104.711102
## iter 50 value 104.711102
## final value 104.711102
## converged
## # weights: 81
## initial value 183.345574
## iter 10 value 105.039318
## iter 20 value 104.702605
## iter 30 value 104.667410
## iter 40 value 104.666845
## iter 40 value 104.666844
## iter 40 value 104.666844
## final value 104.666844
## converged
## # weights: 9
## initial value 126.255524
## iter 10 value 107.109476
## final value 106.974459
## converged
## # weights: 17
## initial value 115.921844
## iter 10 value 106.882172
## iter 20 value 106.143093
## final value 106.134293
## converged
## # weights: 25
## initial value 152.831488
## iter 10 value 106.180718
## iter 20 value 105.799773
## iter 30 value 105.784475
## iter 40 value 105.783850
## iter 40 value 105.783849
## iter 40 value 105.783849
## final value 105.783849
## converged
## # weights: 33
## initial value 120.309592
## iter 10 value 105.866768
## iter 20 value 105.679374
## iter 30 value 105.678895
## final value 105.678872
## converged
## # weights: 41
## initial value 152.743635
## iter 10 value 105.918816
## iter 20 value 105.675455
## iter 30 value 105.667909
## final value 105.667787
## converged
## # weights: 49
## initial value 183.748682
## iter 10 value 105.896430
## iter 20 value 105.490473
## iter 30 value 105.465911
## final value 105.465526
## converged
## # weights: 57
## initial value 116.218873
## iter 10 value 105.503436
## iter 20 value 105.463743
## final value 105.463330
## converged
## # weights: 65
## initial value 123.852947
## iter 10 value 105.780695
## iter 20 value 105.499490
## iter 30 value 105.473668
## iter 40 value 105.472809
## final value 105.472805
## converged
## # weights: 73
## initial value 164.597474
## iter 10 value 105.705438
## iter 20 value 105.304211
## iter 30 value 105.248648
## iter 40 value 105.246072
## iter 40 value 105.246072
## iter 40 value 105.246071
## final value 105.246071
## converged
## # weights: 81
## initial value 130.622231
## iter 10 value 105.841473
## iter 20 value 105.289083
## iter 30 value 105.212667
## iter 40 value 105.209996
## final value 105.209917
## converged
## # weights: 9
## initial value 133.728595
## iter 10 value 107.798889
## iter 20 value 107.573064
## iter 20 value 107.573064
## final value 107.573064
## converged
## # weights: 17
## initial value 148.413997
## iter 10 value 107.236857
## iter 20 value 107.063316
## iter 30 value 107.042774
## final value 107.042762
## converged
## # weights: 25
## initial value 142.716812
## iter 10 value 106.396281
## iter 20 value 106.346814
## final value 106.344618
## converged
## # weights: 33
## initial value 120.713836
## iter 10 value 106.333039
## iter 20 value 106.247031
## final value 106.246348
## converged
## # weights: 41
## initial value 124.317296
## iter 10 value 106.207043
## iter 20 value 106.041525
## final value 106.040991
## converged
## # weights: 49
## initial value 129.640866
## iter 10 value 106.026051
## iter 20 value 105.936279
## iter 30 value 105.935552
## final value 105.935550
## converged
## # weights: 57
## initial value 149.269963
## iter 10 value 106.217643
## iter 20 value 105.907598
## iter 30 value 105.884396
## final value 105.884269
## converged
## # weights: 65
## initial value 172.573162
## iter 10 value 105.936756
## iter 20 value 105.871460
## iter 30 value 105.870745
## iter 30 value 105.870744
## iter 30 value 105.870744
## final value 105.870744
## converged
## # weights: 73
## initial value 132.106781
## iter 10 value 106.030339
## iter 20 value 105.769779
## iter 30 value 105.768203
## final value 105.768191
## converged
## # weights: 81
## initial value 148.370679
## iter 10 value 106.188421
## iter 20 value 105.753826
## iter 30 value 105.732551
## iter 40 value 105.731681
## iter 40 value 105.731681
## iter 40 value 105.731681
## final value 105.731681
## converged
## # weights: 9
## initial value 156.684893
## iter 10 value 108.163410
## final value 108.119714
## converged
## # weights: 17
## initial value 123.322860
## iter 10 value 107.410975
## iter 20 value 107.350645
## final value 107.350507
## converged
## # weights: 25
## initial value 120.967090
## iter 10 value 107.314716
## iter 20 value 106.877591
## iter 30 value 106.875777
## iter 30 value 106.875777
## iter 30 value 106.875777
## final value 106.875777
## converged
## # weights: 33
## initial value 121.889938
## iter 10 value 106.995640
## iter 20 value 106.788180
## iter 30 value 106.783920
## iter 30 value 106.783920
## iter 30 value 106.783920
## final value 106.783920
## converged
## # weights: 41
## initial value 128.529400
## iter 10 value 106.859830
## iter 20 value 106.787990
## iter 30 value 106.784286
## final value 106.784215
## converged
## # weights: 49
## initial value 124.853021
## iter 10 value 106.725718
## iter 20 value 106.499677
## iter 30 value 106.445365
## final value 106.443909
## converged
## # weights: 57
## initial value 131.738068
## iter 10 value 106.449056
## iter 20 value 106.394311
## iter 30 value 106.393974
## final value 106.393972
## converged
## # weights: 65
## initial value 133.578574
## iter 10 value 106.450211
## iter 20 value 106.318223
## iter 30 value 106.315170
## final value 106.315157
## converged
## # weights: 73
## initial value 181.726579
## iter 10 value 106.584590
## iter 20 value 106.313468
## iter 30 value 106.270451
## iter 40 value 106.269170
## final value 106.269167
## converged
## # weights: 81
## initial value 128.745316
## iter 10 value 106.330923
## iter 20 value 106.245603
## iter 30 value 106.243285
## final value 106.243274
## converged
## # weights: 9
## initial value 112.473481
## iter 10 value 101.312098
## iter 20 value 100.538207
## iter 30 value 100.528298
## final value 100.528289
## converged
## # weights: 17
## initial value 150.813175
## iter 10 value 98.803356
## iter 20 value 95.397454
## iter 30 value 95.353887
## final value 95.353802
## converged
## # weights: 25
## initial value 114.365571
## iter 10 value 96.908684
## iter 20 value 92.985389
## iter 30 value 92.759356
## final value 92.753724
## converged
## # weights: 33
## initial value 110.669906
## iter 10 value 95.987268
## iter 20 value 91.041359
## iter 30 value 88.297166
## iter 40 value 87.622251
## iter 50 value 87.558830
## final value 87.558258
## converged
## # weights: 41
## initial value 122.477020
## iter 10 value 97.391853
## iter 20 value 92.486401
## iter 30 value 91.059003
## iter 40 value 90.495146
## iter 50 value 89.368861
## iter 60 value 87.158703
## iter 70 value 84.880160
## iter 80 value 83.904387
## iter 90 value 83.648881
## iter 100 value 83.592735
## final value 83.592735
## stopped after 100 iterations
## # weights: 49
## initial value 125.766564
## iter 10 value 96.506657
## iter 20 value 91.292503
## iter 30 value 88.606401
## iter 40 value 86.277840
## iter 50 value 85.556076
## iter 60 value 85.171432
## iter 70 value 84.854474
## iter 80 value 84.751446
## iter 90 value 84.742334
## final value 84.742314
## converged
## # weights: 57
## initial value 113.749695
## iter 10 value 94.126660
## iter 20 value 88.009813
## iter 30 value 82.923559
## iter 40 value 82.284073
## iter 50 value 82.232525
## iter 60 value 82.222090
## iter 70 value 82.220835
## final value 82.220812
## converged
## # weights: 65
## initial value 125.181984
## iter 10 value 95.437529
## iter 20 value 89.257976
## iter 30 value 86.839134
## iter 40 value 85.019287
## iter 50 value 83.500139
## iter 60 value 82.926185
## iter 70 value 82.844098
## iter 80 value 82.832086
## iter 90 value 82.831828
## final value 82.831813
## converged
## # weights: 73
## initial value 177.789636
## iter 10 value 97.190721
## iter 20 value 89.625914
## iter 30 value 87.228749
## iter 40 value 85.020116
## iter 50 value 83.953981
## iter 60 value 83.048348
## iter 70 value 82.033584
## iter 80 value 81.925525
## iter 90 value 81.437818
## iter 100 value 81.237831
## final value 81.237831
## stopped after 100 iterations
## # weights: 81
## initial value 126.528191
## iter 10 value 94.839325
## iter 20 value 88.077937
## iter 30 value 84.357048
## iter 40 value 82.638546
## iter 50 value 82.316303
## iter 60 value 82.243374
## iter 70 value 82.214737
## iter 80 value 82.193960
## iter 90 value 82.049570
## iter 100 value 81.863830
## final value 81.863830
## stopped after 100 iterations
## # weights: 9
## initial value 113.116502
## iter 10 value 102.100855
## iter 20 value 101.767400
## final value 101.767367
## converged
## # weights: 17
## initial value 112.584782
## iter 10 value 100.787048
## iter 20 value 98.580224
## iter 30 value 98.541866
## iter 40 value 98.496269
## final value 98.495891
## converged
## # weights: 25
## initial value 118.975068
## iter 10 value 101.165013
## iter 20 value 98.396735
## iter 30 value 97.029569
## iter 40 value 96.814696
## iter 50 value 96.812945
## iter 50 value 96.812945
## iter 50 value 96.812945
## final value 96.812945
## converged
## # weights: 33
## initial value 123.641008
## iter 10 value 99.129345
## iter 20 value 97.376066
## iter 30 value 96.675387
## iter 40 value 96.533111
## iter 50 value 96.529891
## iter 60 value 96.510302
## iter 70 value 96.131184
## iter 80 value 96.116850
## final value 96.116841
## converged
## # weights: 41
## initial value 117.133000
## iter 10 value 99.093920
## iter 20 value 97.116878
## iter 30 value 96.408863
## iter 40 value 96.237676
## iter 50 value 95.893598
## iter 60 value 95.640217
## iter 70 value 95.635750
## final value 95.635644
## converged
## # weights: 49
## initial value 133.855633
## iter 10 value 99.408052
## iter 20 value 97.074980
## iter 30 value 96.433586
## iter 40 value 96.251084
## iter 50 value 96.246108
## final value 96.246067
## converged
## # weights: 57
## initial value 175.764272
## iter 10 value 99.665260
## iter 20 value 97.250865
## iter 30 value 96.193322
## iter 40 value 95.858443
## iter 50 value 95.693739
## iter 60 value 95.601493
## iter 70 value 95.412324
## iter 80 value 95.359560
## iter 90 value 95.338481
## iter 100 value 95.338260
## final value 95.338260
## stopped after 100 iterations
## # weights: 65
## initial value 126.197505
## iter 10 value 98.613348
## iter 20 value 96.822665
## iter 30 value 96.137192
## iter 40 value 95.799480
## iter 50 value 95.515212
## iter 60 value 95.477915
## iter 70 value 95.462830
## iter 80 value 95.462205
## iter 90 value 95.462138
## iter 90 value 95.462138
## iter 90 value 95.462138
## final value 95.462138
## converged
## # weights: 73
## initial value 135.806560
## iter 10 value 99.433846
## iter 20 value 97.625266
## iter 30 value 96.879768
## iter 40 value 96.739635
## iter 50 value 96.467396
## iter 60 value 96.235392
## iter 70 value 96.199134
## iter 80 value 96.198027
## iter 90 value 96.197929
## final value 96.197927
## converged
## # weights: 81
## initial value 115.783346
## iter 10 value 99.995852
## iter 20 value 96.271754
## iter 30 value 95.374944
## iter 40 value 95.206785
## iter 50 value 95.187664
## iter 60 value 95.184577
## iter 70 value 95.184020
## final value 95.183944
## converged
## # weights: 9
## initial value 118.987843
## iter 10 value 102.835294
## final value 102.820770
## converged
## # weights: 17
## initial value 123.973839
## iter 10 value 103.001813
## iter 20 value 102.323989
## final value 102.323175
## converged
## # weights: 25
## initial value 121.448411
## iter 10 value 102.528988
## iter 20 value 100.287218
## iter 30 value 99.904345
## iter 40 value 99.833085
## final value 99.833060
## converged
## # weights: 33
## initial value 144.865755
## iter 10 value 100.444611
## iter 20 value 99.924092
## iter 30 value 99.816174
## iter 40 value 99.768028
## final value 99.767668
## converged
## # weights: 41
## initial value 129.855540
## iter 10 value 101.986389
## iter 20 value 100.313800
## iter 30 value 99.954522
## iter 40 value 99.786125
## iter 50 value 99.767401
## iter 60 value 99.766957
## final value 99.766945
## converged
## # weights: 49
## initial value 119.577012
## iter 10 value 101.571086
## iter 20 value 100.342715
## iter 30 value 99.978008
## iter 40 value 99.903799
## iter 50 value 99.820068
## iter 60 value 99.775001
## iter 70 value 99.766709
## final value 99.766662
## converged
## # weights: 57
## initial value 126.012332
## iter 10 value 100.887888
## iter 20 value 100.053747
## iter 30 value 99.910077
## iter 40 value 99.906936
## iter 50 value 99.905630
## iter 60 value 99.905414
## iter 60 value 99.905413
## iter 60 value 99.905413
## final value 99.905413
## converged
## # weights: 65
## initial value 117.031435
## iter 10 value 101.426981
## iter 20 value 100.366552
## iter 30 value 99.943311
## iter 40 value 99.845227
## iter 50 value 99.777027
## iter 60 value 99.776479
## final value 99.776356
## converged
## # weights: 73
## initial value 121.394971
## iter 10 value 102.750062
## iter 20 value 100.739796
## iter 30 value 100.091437
## iter 40 value 99.934257
## iter 50 value 99.926702
## iter 60 value 99.796819
## iter 70 value 99.770490
## iter 80 value 99.767755
## iter 90 value 99.767326
## iter 100 value 99.767295
## final value 99.767295
## stopped after 100 iterations
## # weights: 81
## initial value 180.757103
## iter 10 value 101.156368
## iter 20 value 100.307976
## iter 30 value 100.201919
## iter 40 value 100.200173
## iter 50 value 100.197324
## iter 60 value 100.180462
## iter 70 value 100.178496
## final value 100.178489
## converged
## # weights: 9
## initial value 139.929793
## iter 10 value 105.139213
## iter 20 value 103.756539
## iter 30 value 103.744097
## final value 103.744059
## converged
## # weights: 17
## initial value 139.805846
## iter 10 value 102.526869
## iter 20 value 102.210445
## final value 102.203267
## converged
## # weights: 25
## initial value 124.901780
## iter 10 value 102.305609
## iter 20 value 101.665032
## iter 30 value 101.646633
## iter 40 value 101.642514
## final value 101.642512
## converged
## # weights: 33
## initial value 171.807978
## iter 10 value 102.129600
## iter 20 value 101.693290
## iter 30 value 101.664518
## final value 101.664329
## converged
## # weights: 41
## initial value 113.770614
## iter 10 value 101.919732
## iter 20 value 101.653812
## iter 30 value 101.607233
## iter 40 value 101.606376
## final value 101.606373
## converged
## # weights: 49
## initial value 152.409706
## iter 10 value 101.804966
## iter 20 value 101.612196
## iter 30 value 101.607545
## iter 40 value 101.605955
## iter 50 value 101.596142
## final value 101.595919
## converged
## # weights: 57
## initial value 148.265601
## iter 10 value 102.145141
## iter 20 value 101.627558
## iter 30 value 101.596032
## iter 40 value 101.595707
## final value 101.595705
## converged
## # weights: 65
## initial value 136.176067
## iter 10 value 102.332167
## iter 20 value 101.735438
## iter 30 value 101.604265
## iter 40 value 101.590521
## iter 50 value 101.588029
## iter 60 value 101.587885
## final value 101.587874
## converged
## # weights: 73
## initial value 121.106556
## iter 10 value 102.635939
## iter 20 value 101.818028
## iter 30 value 101.594436
## iter 40 value 101.589320
## iter 50 value 101.588937
## iter 60 value 101.588807
## iter 70 value 101.588753
## final value 101.588750
## converged
## # weights: 81
## initial value 134.341540
## iter 10 value 102.517471
## iter 20 value 101.649893
## iter 30 value 101.603569
## iter 40 value 101.590882
## iter 50 value 101.590535
## iter 50 value 101.590534
## iter 50 value 101.590534
## final value 101.590534
## converged
## # weights: 9
## initial value 114.001058
## iter 10 value 104.682423
## iter 20 value 104.564915
## iter 20 value 104.564915
## final value 104.564915
## converged
## # weights: 17
## initial value 121.752178
## iter 10 value 104.129589
## iter 20 value 103.283598
## iter 30 value 103.235347
## final value 103.235346
## converged
## # weights: 25
## initial value 115.711040
## iter 10 value 103.154632
## iter 20 value 102.988703
## iter 30 value 102.986529
## iter 30 value 102.986528
## iter 30 value 102.986528
## final value 102.986528
## converged
## # weights: 33
## initial value 125.046228
## iter 10 value 103.473338
## iter 20 value 102.937641
## iter 30 value 102.864201
## iter 40 value 102.863527
## iter 40 value 102.863527
## iter 40 value 102.863527
## final value 102.863527
## converged
## # weights: 41
## initial value 134.089114
## iter 10 value 103.461923
## iter 20 value 102.772047
## iter 30 value 102.767886
## final value 102.767859
## converged
## # weights: 49
## initial value 120.733964
## iter 10 value 103.110890
## iter 20 value 102.824662
## iter 30 value 102.714110
## iter 40 value 102.706169
## final value 102.706149
## converged
## # weights: 57
## initial value 142.296033
## iter 10 value 103.022492
## iter 20 value 102.721684
## iter 30 value 102.713700
## final value 102.713646
## converged
## # weights: 65
## initial value 127.514273
## iter 10 value 103.627460
## iter 20 value 102.774489
## iter 30 value 102.688009
## iter 40 value 102.662817
## iter 50 value 102.655059
## final value 102.655025
## converged
## # weights: 73
## initial value 121.102603
## iter 10 value 103.326218
## iter 20 value 102.862984
## iter 30 value 102.652395
## iter 40 value 102.649288
## final value 102.649112
## converged
## # weights: 81
## initial value 163.839692
## iter 10 value 103.360183
## iter 20 value 102.639234
## iter 30 value 102.627995
## final value 102.627789
## converged
## # weights: 9
## initial value 114.075024
## iter 10 value 105.584671
## iter 20 value 105.301126
## final value 105.301121
## converged
## # weights: 17
## initial value 163.542716
## iter 10 value 105.230910
## iter 20 value 104.301168
## iter 30 value 104.233519
## final value 104.233368
## converged
## # weights: 25
## initial value 142.189586
## iter 10 value 106.408960
## iter 20 value 104.628331
## iter 30 value 104.520097
## iter 40 value 104.045692
## iter 50 value 103.947578
## iter 60 value 103.947222
## final value 103.947207
## converged
## # weights: 33
## initial value 133.209416
## iter 10 value 104.236277
## iter 20 value 103.760744
## iter 30 value 103.724719
## final value 103.724569
## converged
## # weights: 41
## initial value 154.685398
## iter 10 value 103.858325
## iter 20 value 103.631511
## iter 30 value 103.630121
## iter 30 value 103.630120
## iter 30 value 103.630120
## final value 103.630120
## converged
## # weights: 49
## initial value 140.846097
## iter 10 value 103.990768
## iter 20 value 103.626215
## iter 30 value 103.608436
## final value 103.608318
## converged
## # weights: 57
## initial value 140.261834
## iter 10 value 103.885546
## iter 20 value 103.555902
## iter 30 value 103.549458
## final value 103.549419
## converged
## # weights: 65
## initial value 163.297263
## iter 10 value 103.898477
## iter 20 value 103.553008
## iter 30 value 103.538862
## final value 103.538824
## converged
## # weights: 73
## initial value 118.472167
## iter 10 value 103.786192
## iter 20 value 103.515699
## iter 30 value 103.503836
## final value 103.503703
## converged
## # weights: 81
## initial value 127.477551
## iter 10 value 105.114187
## iter 20 value 103.633178
## iter 30 value 103.527792
## iter 40 value 103.501602
## iter 50 value 103.500642
## final value 103.500615
## converged
## # weights: 9
## initial value 116.357842
## iter 10 value 106.119610
## iter 20 value 105.965465
## iter 20 value 105.965465
## final value 105.965465
## converged
## # weights: 17
## initial value 119.786521
## iter 10 value 105.441226
## iter 20 value 105.107073
## iter 30 value 105.078829
## iter 30 value 105.078829
## iter 30 value 105.078829
## final value 105.078829
## converged
## # weights: 25
## initial value 130.415293
## iter 10 value 105.186158
## iter 20 value 105.061537
## final value 105.061001
## converged
## # weights: 33
## initial value 120.975997
## iter 10 value 104.735400
## iter 20 value 104.543138
## iter 30 value 104.541813
## iter 30 value 104.541812
## iter 30 value 104.541812
## final value 104.541812
## converged
## # weights: 41
## initial value 118.310179
## iter 10 value 104.572612
## iter 20 value 104.460472
## iter 30 value 104.450471
## final value 104.450462
## converged
## # weights: 49
## initial value 124.103510
## iter 10 value 104.740650
## iter 20 value 104.463837
## iter 30 value 104.368547
## final value 104.367502
## converged
## # weights: 57
## initial value 161.600286
## iter 10 value 104.396592
## iter 20 value 104.338788
## iter 30 value 104.294924
## final value 104.294099
## converged
## # weights: 65
## initial value 151.696384
## iter 10 value 104.509809
## iter 20 value 104.300114
## iter 30 value 104.296171
## final value 104.296117
## converged
## # weights: 73
## initial value 144.982537
## iter 10 value 104.368800
## iter 20 value 104.253057
## iter 30 value 104.236619
## iter 40 value 104.234762
## final value 104.234759
## converged
## # weights: 81
## initial value 124.456019
## iter 10 value 104.434560
## iter 20 value 104.240710
## iter 30 value 104.227681
## final value 104.227546
## converged
## # weights: 9
## initial value 127.509932
## iter 10 value 106.674882
## iter 20 value 106.567821
## iter 20 value 106.567821
## final value 106.567821
## converged
## # weights: 17
## initial value 117.354749
## iter 10 value 106.708739
## iter 20 value 105.925514
## iter 30 value 105.916779
## final value 105.916767
## converged
## # weights: 25
## initial value 118.287747
## iter 10 value 105.406305
## iter 20 value 105.294275
## final value 105.294045
## converged
## # weights: 33
## initial value 138.638901
## iter 10 value 105.190772
## iter 20 value 105.140815
## final value 105.140467
## converged
## # weights: 41
## initial value 157.095434
## iter 10 value 105.257222
## iter 20 value 105.086512
## iter 30 value 105.084918
## iter 40 value 105.083718
## iter 50 value 105.038994
## iter 60 value 105.030708
## final value 105.030704
## converged
## # weights: 49
## initial value 170.698740
## iter 10 value 105.231161
## iter 20 value 104.964545
## iter 30 value 104.957970
## final value 104.957913
## converged
## # weights: 57
## initial value 157.448455
## iter 10 value 105.292850
## iter 20 value 104.961211
## iter 30 value 104.924946
## final value 104.922090
## converged
## # weights: 65
## initial value 138.010390
## iter 10 value 104.960653
## iter 20 value 104.875629
## iter 30 value 104.872546
## final value 104.872516
## converged
## # weights: 73
## initial value 126.000814
## iter 10 value 105.129682
## iter 20 value 104.869932
## iter 30 value 104.844856
## iter 40 value 104.836272
## final value 104.836247
## converged
## # weights: 81
## initial value 138.858044
## iter 10 value 105.018473
## iter 20 value 104.817076
## iter 30 value 104.791865
## final value 104.791579
## converged
## # weights: 9
## initial value 153.668695
## iter 10 value 107.362909
## iter 20 value 107.116171
## final value 107.116166
## converged
## # weights: 17
## initial value 133.586588
## iter 10 value 106.598190
## iter 20 value 106.479239
## iter 30 value 106.441346
## iter 30 value 106.441346
## iter 30 value 106.441346
## final value 106.441346
## converged
## # weights: 25
## initial value 125.369429
## iter 10 value 105.999942
## iter 20 value 105.896341
## final value 105.894661
## converged
## # weights: 33
## initial value 117.370180
## iter 10 value 106.021084
## iter 20 value 105.839805
## iter 30 value 105.834073
## iter 30 value 105.834072
## iter 30 value 105.834072
## final value 105.834072
## converged
## # weights: 41
## initial value 120.241994
## iter 10 value 105.937769
## iter 20 value 105.681932
## iter 30 value 105.603960
## final value 105.603750
## converged
## # weights: 49
## initial value 177.715266
## iter 10 value 105.722331
## iter 20 value 105.495699
## iter 30 value 105.486980
## final value 105.486960
## converged
## # weights: 57
## initial value 213.631250
## iter 10 value 105.583494
## iter 20 value 105.436782
## iter 30 value 105.429567
## final value 105.429550
## converged
## # weights: 65
## initial value 125.208630
## iter 10 value 105.667226
## iter 20 value 105.458289
## iter 30 value 105.451483
## final value 105.451305
## converged
## # weights: 73
## initial value 160.959658
## iter 10 value 105.699114
## iter 20 value 105.430988
## iter 30 value 105.392605
## iter 40 value 105.392111
## iter 40 value 105.392110
## iter 40 value 105.392110
## final value 105.392110
## converged
## # weights: 81
## initial value 190.992330
## iter 10 value 105.601429
## iter 20 value 105.342122
## iter 30 value 105.315945
## iter 40 value 105.315621
## iter 40 value 105.315621
## iter 40 value 105.315621
## final value 105.315621
## converged
## # weights: 9
## initial value 147.532365
## iter 10 value 107.705964
## final value 107.617121
## converged
## # weights: 17
## initial value 125.510972
## iter 10 value 107.116509
## iter 20 value 107.006706
## final value 107.006525
## converged
## # weights: 25
## initial value 180.719016
## iter 10 value 107.362994
## iter 20 value 107.016278
## final value 107.003002
## converged
## # weights: 33
## initial value 123.225071
## iter 10 value 107.100952
## iter 20 value 106.555498
## iter 30 value 106.373268
## iter 40 value 106.372188
## final value 106.372150
## converged
## # weights: 41
## initial value 150.031035
## iter 10 value 106.291733
## iter 20 value 106.116697
## iter 30 value 106.115736
## final value 106.115734
## converged
## # weights: 49
## initial value 129.248944
## iter 10 value 106.461245
## iter 20 value 106.080161
## iter 30 value 105.996249
## final value 105.994196
## converged
## # weights: 57
## initial value 132.110937
## iter 10 value 106.170611
## iter 20 value 105.971896
## iter 30 value 105.963726
## final value 105.963665
## converged
## # weights: 65
## initial value 143.184106
## iter 10 value 106.018026
## iter 20 value 105.887585
## iter 30 value 105.885863
## final value 105.885861
## converged
## # weights: 73
## initial value 126.904233
## iter 10 value 106.070685
## iter 20 value 105.872719
## iter 30 value 105.842977
## final value 105.842348
## converged
## # weights: 81
## initial value 164.953161
## iter 10 value 106.083639
## iter 20 value 105.877751
## iter 30 value 105.872399
## final value 105.871947
## converged
## # weights: 9
## initial value 115.556924
## iter 10 value 104.986062
## iter 20 value 104.727303
## iter 30 value 104.562336
## final value 104.561782
## converged
## # weights: 17
## initial value 157.340143
## iter 10 value 101.952078
## iter 20 value 99.158170
## iter 30 value 98.616502
## iter 40 value 98.593723
## final value 98.593680
## converged
## # weights: 25
## initial value 114.196924
## iter 10 value 102.457765
## iter 20 value 97.189165
## iter 30 value 96.522859
## iter 40 value 96.513827
## final value 96.513825
## converged
## # weights: 33
## initial value 118.881709
## iter 10 value 103.019122
## iter 20 value 94.970342
## iter 30 value 92.635347
## iter 40 value 91.989862
## iter 50 value 91.595842
## iter 60 value 91.591694
## final value 91.591687
## converged
## # weights: 41
## initial value 140.370426
## iter 10 value 100.219405
## iter 20 value 94.626581
## iter 30 value 93.854221
## iter 40 value 93.009475
## iter 50 value 92.432108
## iter 60 value 92.306983
## iter 70 value 92.292273
## iter 80 value 92.233553
## iter 90 value 91.238231
## iter 100 value 90.515734
## final value 90.515734
## stopped after 100 iterations
## # weights: 49
## initial value 119.667047
## iter 10 value 98.582138
## iter 20 value 93.945663
## iter 30 value 91.457019
## iter 40 value 90.344170
## iter 50 value 88.829730
## iter 60 value 88.268492
## iter 70 value 88.227485
## iter 80 value 88.091500
## iter 90 value 88.078138
## final value 88.078009
## converged
## # weights: 57
## initial value 149.457564
## iter 10 value 100.204087
## iter 20 value 95.373601
## iter 30 value 92.163568
## iter 40 value 87.816351
## iter 50 value 86.205763
## iter 60 value 85.984836
## iter 70 value 85.952484
## iter 80 value 85.945757
## iter 90 value 85.938951
## final value 85.938695
## converged
## # weights: 65
## initial value 190.262222
## iter 10 value 99.616021
## iter 20 value 91.694847
## iter 30 value 88.862745
## iter 40 value 87.186991
## iter 50 value 86.879490
## iter 60 value 86.460303
## iter 70 value 86.071360
## iter 80 value 85.989208
## iter 90 value 85.980585
## final value 85.980398
## converged
## # weights: 73
## initial value 114.746850
## iter 10 value 98.707763
## iter 20 value 93.047461
## iter 30 value 89.738351
## iter 40 value 88.164106
## iter 50 value 87.771485
## iter 60 value 87.409314
## iter 70 value 86.764833
## iter 80 value 86.410017
## iter 90 value 86.299315
## iter 100 value 86.281788
## final value 86.281788
## stopped after 100 iterations
## # weights: 81
## initial value 177.920535
## iter 10 value 100.436578
## iter 20 value 93.286187
## iter 30 value 87.792921
## iter 40 value 86.220375
## iter 50 value 85.175061
## iter 60 value 84.841985
## iter 70 value 84.613709
## iter 80 value 84.574044
## iter 90 value 84.569529
## iter 100 value 84.516303
## final value 84.516303
## stopped after 100 iterations
## # weights: 9
## initial value 114.821013
## iter 10 value 106.298712
## iter 20 value 105.951958
## final value 105.951784
## converged
## # weights: 17
## initial value 151.449650
## iter 10 value 105.056679
## iter 20 value 102.865077
## iter 30 value 102.367491
## final value 102.364003
## converged
## # weights: 25
## initial value 128.810751
## iter 10 value 104.766479
## iter 20 value 102.479322
## iter 30 value 101.820275
## iter 40 value 101.680781
## final value 101.680506
## converged
## # weights: 33
## initial value 172.165733
## iter 10 value 103.710388
## iter 20 value 100.871714
## iter 30 value 100.103773
## iter 40 value 99.647333
## iter 50 value 99.620287
## final value 99.620278
## converged
## # weights: 41
## initial value 120.738903
## iter 10 value 103.886119
## iter 20 value 100.777526
## iter 30 value 100.213253
## iter 40 value 99.655071
## iter 50 value 99.567369
## iter 60 value 99.561931
## iter 70 value 99.560001
## final value 99.559406
## converged
## # weights: 49
## initial value 118.339625
## iter 10 value 103.708807
## iter 20 value 100.271012
## iter 30 value 100.143852
## iter 40 value 99.964829
## iter 50 value 99.583487
## iter 60 value 99.549141
## iter 70 value 99.548790
## iter 70 value 99.548790
## iter 70 value 99.548790
## final value 99.548790
## converged
## # weights: 57
## initial value 144.175129
## iter 10 value 102.315015
## iter 20 value 100.312873
## iter 30 value 99.986321
## iter 40 value 99.704755
## iter 50 value 99.563635
## iter 60 value 99.552227
## iter 70 value 99.547023
## final value 99.546907
## converged
## # weights: 65
## initial value 125.587277
## iter 10 value 104.488076
## iter 20 value 100.599554
## iter 30 value 99.635760
## iter 40 value 99.372237
## iter 50 value 98.805084
## iter 60 value 98.692182
## iter 70 value 98.688572
## iter 80 value 98.688467
## final value 98.688465
## converged
## # weights: 73
## initial value 141.905060
## iter 10 value 102.280678
## iter 20 value 100.418149
## iter 30 value 100.154916
## iter 40 value 99.649948
## iter 50 value 99.551606
## iter 60 value 99.543353
## iter 70 value 99.542492
## final value 99.542479
## converged
## # weights: 81
## initial value 155.735610
## iter 10 value 102.727120
## iter 20 value 100.994595
## iter 30 value 100.742046
## iter 40 value 100.323952
## iter 50 value 100.201576
## iter 60 value 100.167627
## iter 70 value 100.126334
## iter 80 value 99.991201
## iter 90 value 99.970130
## iter 100 value 99.969411
## final value 99.969411
## stopped after 100 iterations
## # weights: 9
## initial value 122.796804
## iter 10 value 107.417714
## final value 106.981806
## converged
## # weights: 17
## initial value 140.958252
## iter 10 value 105.678159
## iter 20 value 104.613252
## iter 30 value 104.466703
## final value 104.466685
## converged
## # weights: 25
## initial value 170.674088
## iter 10 value 105.271756
## iter 20 value 103.780243
## iter 30 value 103.705088
## final value 103.702996
## converged
## # weights: 33
## initial value 177.000496
## iter 10 value 105.785102
## iter 20 value 103.589363
## iter 30 value 103.226732
## iter 40 value 103.219023
## final value 103.218830
## converged
## # weights: 41
## initial value 115.790383
## iter 10 value 104.307228
## iter 20 value 103.270900
## iter 30 value 103.213504
## iter 40 value 103.212746
## final value 103.212603
## converged
## # weights: 49
## initial value 115.319520
## iter 10 value 104.079225
## iter 20 value 103.252666
## iter 30 value 103.213417
## iter 40 value 103.207602
## iter 50 value 103.207311
## iter 50 value 103.207310
## iter 50 value 103.207310
## final value 103.207310
## converged
## # weights: 57
## initial value 175.349458
## iter 10 value 104.431557
## iter 20 value 103.361858
## iter 30 value 103.246294
## iter 40 value 103.217960
## iter 50 value 103.210411
## iter 60 value 103.208240
## iter 70 value 103.206068
## final value 103.205957
## converged
## # weights: 65
## initial value 143.356348
## iter 10 value 104.119004
## iter 20 value 103.445147
## iter 30 value 103.249053
## iter 40 value 103.239876
## iter 50 value 103.217720
## iter 60 value 103.207058
## iter 70 value 103.206724
## iter 80 value 103.206691
## final value 103.206670
## converged
## # weights: 73
## initial value 120.807189
## iter 10 value 105.200536
## iter 20 value 103.621618
## iter 30 value 103.240703
## iter 40 value 103.220643
## iter 50 value 103.208008
## iter 60 value 103.203425
## iter 70 value 103.203047
## iter 80 value 103.202994
## final value 103.202989
## converged
## # weights: 81
## initial value 122.916054
## iter 10 value 104.661229
## iter 20 value 103.336274
## iter 30 value 103.249102
## iter 40 value 103.203687
## iter 50 value 103.201970
## iter 60 value 103.201906
## final value 103.201904
## converged
## # weights: 9
## initial value 150.694915
## iter 10 value 108.073192
## iter 20 value 106.997611
## iter 30 value 106.981188
## final value 106.981152
## converged
## # weights: 17
## initial value 116.196867
## iter 10 value 107.039858
## iter 20 value 105.903166
## iter 30 value 105.585452
## final value 105.583650
## converged
## # weights: 25
## initial value 159.420570
## iter 10 value 106.188512
## iter 20 value 105.510990
## iter 30 value 105.449123
## final value 105.449059
## converged
## # weights: 33
## initial value 161.606023
## iter 10 value 105.032164
## iter 20 value 104.972400
## iter 30 value 104.971656
## iter 30 value 104.971656
## iter 30 value 104.971656
## final value 104.971656
## converged
## # weights: 41
## initial value 143.264882
## iter 10 value 105.977724
## iter 20 value 105.410937
## iter 30 value 105.380361
## final value 105.380029
## converged
## # weights: 49
## initial value 116.959423
## iter 10 value 105.636019
## iter 20 value 105.208867
## iter 30 value 105.022915
## iter 40 value 104.975970
## iter 50 value 104.968872
## iter 60 value 104.968787
## final value 104.968773
## converged
## # weights: 57
## initial value 125.982368
## iter 10 value 105.516857
## iter 20 value 105.102966
## iter 30 value 105.058722
## iter 40 value 105.058190
## final value 105.058188
## converged
## # weights: 65
## initial value 119.198669
## iter 10 value 105.870779
## iter 20 value 105.036676
## iter 30 value 104.973973
## iter 40 value 104.968883
## iter 50 value 104.968487
## final value 104.968473
## converged
## # weights: 73
## initial value 123.243184
## iter 10 value 105.287165
## iter 20 value 105.055847
## iter 30 value 105.041884
## final value 105.041849
## converged
## # weights: 81
## initial value 126.257354
## iter 10 value 105.163370
## iter 20 value 105.002281
## iter 30 value 104.973531
## iter 40 value 104.969192
## iter 50 value 104.968418
## iter 60 value 104.968223
## iter 70 value 104.967609
## final value 104.967579
## converged
## # weights: 9
## initial value 118.645347
## iter 10 value 109.001582
## iter 20 value 108.727062
## iter 20 value 108.727062
## final value 108.727062
## converged
## # weights: 17
## initial value 116.611101
## iter 10 value 107.216434
## iter 20 value 106.826286
## final value 106.825883
## converged
## # weights: 25
## initial value 119.035232
## iter 10 value 106.878011
## iter 20 value 106.200382
## iter 30 value 106.175729
## final value 106.175725
## converged
## # weights: 33
## initial value 154.585300
## iter 10 value 106.763051
## iter 20 value 106.211957
## iter 30 value 106.178346
## iter 40 value 106.178102
## final value 106.178100
## converged
## # weights: 41
## initial value 126.615115
## iter 10 value 106.704942
## iter 20 value 106.209549
## iter 30 value 106.136309
## iter 40 value 106.134130
## final value 106.134126
## converged
## # weights: 49
## initial value 191.802103
## iter 10 value 106.708139
## iter 20 value 106.529296
## iter 30 value 106.168296
## iter 40 value 106.110590
## iter 50 value 106.107931
## final value 106.107925
## converged
## # weights: 57
## initial value 151.847584
## iter 10 value 106.669756
## iter 20 value 106.121245
## iter 30 value 106.075685
## iter 40 value 106.072311
## final value 106.072281
## converged
## # weights: 65
## initial value 151.004834
## iter 10 value 106.270128
## iter 20 value 106.117064
## iter 30 value 106.069368
## iter 40 value 106.067628
## final value 106.067625
## converged
## # weights: 73
## initial value 151.722571
## iter 10 value 106.296254
## iter 20 value 106.063959
## iter 30 value 106.036241
## final value 106.036026
## converged
## # weights: 81
## initial value 185.581397
## iter 10 value 107.172317
## iter 20 value 106.117700
## iter 30 value 106.035374
## iter 40 value 106.028556
## final value 106.028515
## converged
## # weights: 9
## initial value 120.352697
## iter 10 value 108.294912
## final value 108.216109
## converged
## # weights: 17
## initial value 144.564553
## iter 10 value 107.835770
## iter 20 value 107.572663
## final value 107.569481
## converged
## # weights: 25
## initial value 124.775953
## iter 10 value 107.331205
## iter 20 value 107.161560
## iter 30 value 107.134147
## final value 107.134124
## converged
## # weights: 33
## initial value 123.843187
## iter 10 value 107.048541
## iter 20 value 106.981004
## final value 106.980723
## converged
## # weights: 41
## initial value 116.988608
## iter 10 value 107.117696
## iter 20 value 106.907906
## final value 106.905070
## converged
## # weights: 49
## initial value 125.353849
## iter 10 value 107.333929
## iter 20 value 106.890797
## iter 30 value 106.858906
## final value 106.858454
## converged
## # weights: 57
## initial value 185.101970
## iter 10 value 107.111567
## iter 20 value 106.912970
## iter 30 value 106.847141
## iter 40 value 106.843526
## final value 106.843514
## converged
## # weights: 65
## initial value 166.167045
## iter 10 value 106.974602
## iter 20 value 106.841958
## iter 30 value 106.800548
## final value 106.800461
## converged
## # weights: 73
## initial value 150.814864
## iter 10 value 107.209140
## iter 20 value 106.833533
## iter 30 value 106.806640
## iter 40 value 106.805443
## final value 106.805331
## converged
## # weights: 81
## initial value 145.404845
## iter 10 value 106.931530
## iter 20 value 106.783885
## iter 30 value 106.777485
## final value 106.777442
## converged
## # weights: 9
## initial value 142.864192
## iter 10 value 108.903743
## iter 20 value 108.745229
## iter 20 value 108.745228
## final value 108.745228
## converged
## # weights: 17
## initial value 148.284163
## iter 10 value 108.314540
## iter 20 value 108.184309
## iter 30 value 108.147387
## final value 108.147382
## converged
## # weights: 25
## initial value 119.010485
## iter 10 value 107.920948
## iter 20 value 107.783396
## final value 107.783205
## converged
## # weights: 33
## initial value 118.472852
## iter 10 value 107.761048
## iter 20 value 107.666628
## final value 107.666580
## converged
## # weights: 41
## initial value 163.065883
## iter 10 value 107.672996
## iter 20 value 107.546174
## final value 107.528867
## converged
## # weights: 49
## initial value 128.629028
## iter 10 value 107.582137
## iter 20 value 107.473397
## iter 30 value 107.473129
## iter 30 value 107.473128
## iter 30 value 107.473128
## final value 107.473128
## converged
## # weights: 57
## initial value 125.854010
## iter 10 value 107.819727
## iter 20 value 107.524105
## iter 30 value 107.516528
## iter 40 value 107.448068
## iter 50 value 107.443064
## final value 107.443057
## converged
## # weights: 65
## initial value 160.617509
## iter 10 value 107.493033
## iter 20 value 107.425541
## final value 107.424967
## converged
## # weights: 73
## initial value 124.169125
## iter 10 value 107.628992
## iter 20 value 107.405226
## iter 30 value 107.393587
## final value 107.393572
## converged
## # weights: 81
## initial value 126.099587
## iter 10 value 107.559644
## iter 20 value 107.403475
## iter 30 value 107.376219
## final value 107.376162
## converged
## # weights: 9
## initial value 130.602756
## iter 10 value 109.528606
## iter 20 value 109.226719
## final value 109.226370
## converged
## # weights: 17
## initial value 126.843144
## iter 10 value 108.906153
## iter 20 value 108.674386
## final value 108.670945
## converged
## # weights: 25
## initial value 164.731889
## iter 10 value 108.839969
## iter 20 value 108.731497
## iter 30 value 108.710593
## final value 108.710560
## converged
## # weights: 33
## initial value 140.058621
## iter 10 value 108.421934
## iter 20 value 108.139873
## iter 30 value 108.134937
## final value 108.134932
## converged
## # weights: 41
## initial value 152.977599
## iter 10 value 108.466511
## iter 20 value 108.099690
## iter 30 value 108.065142
## final value 108.065063
## converged
## # weights: 49
## initial value 159.049377
## iter 10 value 108.152585
## iter 20 value 108.059824
## iter 30 value 108.057853
## iter 30 value 108.057853
## iter 30 value 108.057853
## final value 108.057853
## converged
## # weights: 57
## initial value 124.426122
## iter 10 value 108.102752
## iter 20 value 107.975332
## iter 30 value 107.951012
## final value 107.949999
## converged
## # weights: 65
## initial value 140.474538
## iter 10 value 108.156639
## iter 20 value 107.919638
## iter 30 value 107.907154
## final value 107.907120
## converged
## # weights: 73
## initial value 123.984957
## iter 10 value 107.975710
## iter 20 value 107.890062
## iter 30 value 107.879284
## final value 107.879235
## converged
## # weights: 81
## initial value 197.399310
## iter 10 value 107.990211
## iter 20 value 107.854499
## iter 30 value 107.838065
## iter 40 value 107.837838
## iter 40 value 107.837838
## iter 40 value 107.837838
## final value 107.837838
## converged
## # weights: 9
## initial value 120.615663
## iter 10 value 109.710235
## final value 109.665595
## converged
## # weights: 17
## initial value 128.750020
## iter 10 value 109.239634
## iter 20 value 109.154180
## iter 30 value 109.150148
## final value 109.145014
## converged
## # weights: 25
## initial value 144.607138
## iter 10 value 108.878035
## iter 20 value 108.756491
## final value 108.756182
## converged
## # weights: 33
## initial value 184.062964
## iter 10 value 108.805496
## iter 20 value 108.587277
## final value 108.585475
## converged
## # weights: 41
## initial value 149.537629
## iter 10 value 108.631099
## iter 20 value 108.517523
## final value 108.516546
## converged
## # weights: 49
## initial value 153.387289
## iter 10 value 108.598839
## iter 20 value 108.493360
## iter 30 value 108.491276
## final value 108.491194
## converged
## # weights: 57
## initial value 186.540917
## iter 10 value 108.576515
## iter 20 value 108.416006
## iter 30 value 108.396721
## final value 108.396649
## converged
## # weights: 65
## initial value 129.225597
## iter 10 value 108.545714
## iter 20 value 108.414729
## iter 30 value 108.334566
## iter 40 value 108.333798
## iter 40 value 108.333797
## iter 40 value 108.333797
## final value 108.333797
## converged
## # weights: 73
## initial value 129.351305
## iter 10 value 108.371436
## iter 20 value 108.325590
## iter 30 value 108.323549
## iter 30 value 108.323548
## iter 30 value 108.323548
## final value 108.323548
## converged
## # weights: 81
## initial value 173.823405
## iter 10 value 108.424142
## iter 20 value 108.299081
## iter 30 value 108.279825
## final value 108.279795
## converged
## # weights: 9
## initial value 119.485809
## iter 10 value 110.331495
## iter 20 value 110.067861
## iter 20 value 110.067861
## final value 110.067861
## converged
## # weights: 17
## initial value 127.646779
## iter 10 value 109.718172
## iter 20 value 109.661729
## final value 109.661464
## converged
## # weights: 25
## initial value 118.469839
## iter 10 value 109.294292
## iter 20 value 109.184756
## final value 109.184115
## converged
## # weights: 33
## initial value 117.123488
## iter 10 value 109.510233
## iter 20 value 109.450339
## iter 30 value 109.446668
## iter 30 value 109.446667
## iter 30 value 109.446667
## final value 109.446667
## converged
## # weights: 41
## initial value 128.525421
## iter 10 value 109.111839
## iter 20 value 108.945912
## iter 30 value 108.943056
## final value 108.943053
## converged
## # weights: 49
## initial value 127.157924
## iter 10 value 108.876207
## iter 20 value 108.841725
## final value 108.841662
## converged
## # weights: 57
## initial value 123.344388
## iter 10 value 109.185374
## iter 20 value 108.915733
## iter 30 value 108.828834
## iter 40 value 108.821113
## final value 108.821094
## converged
## # weights: 65
## initial value 158.651683
## iter 10 value 109.055431
## iter 20 value 108.829887
## iter 30 value 108.759059
## iter 40 value 108.755430
## final value 108.755407
## converged
## # weights: 73
## initial value 130.595098
## iter 10 value 108.877327
## iter 20 value 108.762039
## iter 30 value 108.746488
## final value 108.746406
## converged
## # weights: 81
## initial value 139.521122
## iter 10 value 108.875462
## iter 20 value 108.747586
## iter 30 value 108.705032
## iter 40 value 108.700757
## final value 108.700718
## converged
## # weights: 17
## initial value 126.955600
## iter 10 value 119.544412
## iter 20 value 119.509658
## final value 119.508880
## converged
modelRF
## Neural Network
##
## 200 samples
## 6 predictor
## 2 classes: '0', '1'
##
## Pre-processing: centered (6), scaled (6)
## Resampling: Cross-Validated (10 fold)
## Summary of sample sizes: 180, 179, 180, 180, 181, 180, ...
## Resampling results across tuning parameters:
##
## size decay Accuracy Kappa
## 1 0.1 0.7139348 0.2778244
## 1 0.2 0.7239348 0.2954442
## 1 0.3 0.7189348 0.2851015
## 1 0.4 0.7091729 0.2604631
## 1 0.5 0.7139348 0.2694631
## 1 0.6 0.7191729 0.2792961
## 1 0.7 0.7241980 0.2829721
## 1 0.8 0.7394612 0.3116459
## 1 0.9 0.7146992 0.2284113
## 1 1.0 0.6849373 0.1053667
## 2 0.1 0.7142231 0.2652861
## 2 0.2 0.7194361 0.2871218
## 2 0.3 0.7239348 0.2989101
## 2 0.4 0.7239348 0.2958809
## 2 0.5 0.7189348 0.2852600
## 2 0.6 0.7189348 0.2808058
## 2 0.7 0.7241980 0.2901243
## 2 0.8 0.7394612 0.3180682
## 2 0.9 0.7394612 0.3116459
## 2 1.0 0.7444612 0.3269967
## 3 0.1 0.7189599 0.3108699
## 3 0.2 0.7286967 0.3005579
## 3 0.3 0.7134336 0.2677325
## 3 0.4 0.7186717 0.2802594
## 3 0.5 0.7239348 0.2964442
## 3 0.6 0.7189348 0.2808058
## 3 0.7 0.7394612 0.3180682
## 3 0.8 0.7344612 0.3029116
## 3 0.9 0.7344612 0.3029116
## 3 1.0 0.7394612 0.3180682
## 4 0.1 0.6743860 0.2268235
## 4 0.2 0.7286967 0.2975288
## 4 0.3 0.7184336 0.2805410
## 4 0.4 0.7186717 0.2802594
## 4 0.5 0.7239348 0.2964442
## 4 0.6 0.7294612 0.2989570
## 4 0.7 0.7394612 0.3180682
## 4 0.8 0.7344612 0.3077255
## 4 0.9 0.7394612 0.3180682
## 4 1.0 0.7344612 0.3027173
## 5 0.1 0.7102256 0.2918787
## 5 0.2 0.7186967 0.2817563
## 5 0.3 0.7086717 0.2609201
## 5 0.4 0.7234336 0.2892594
## 5 0.5 0.7239348 0.2964442
## 5 0.6 0.7344612 0.3095779
## 5 0.7 0.7344612 0.3077255
## 5 0.8 0.7344612 0.3077255
## 5 0.9 0.7394612 0.3180682
## 5 1.0 0.7394612 0.3180682
## 6 0.1 0.7099373 0.2953565
## 6 0.2 0.7286967 0.2958364
## 6 0.3 0.7134336 0.2699201
## 6 0.4 0.7186717 0.2802594
## 6 0.5 0.7289348 0.3070652
## 6 0.6 0.7394612 0.3180682
## 6 0.7 0.7344612 0.3077255
## 6 0.8 0.7344612 0.3077255
## 6 0.9 0.7344612 0.3077255
## 6 1.0 0.7394612 0.3180682
## 7 0.1 0.7257268 0.3291472
## 7 0.2 0.7291980 0.2972823
## 7 0.3 0.7086717 0.2609201
## 7 0.4 0.7186717 0.2802594
## 7 0.5 0.7341980 0.3163837
## 7 0.6 0.7344612 0.3077255
## 7 0.7 0.7344612 0.3077255
## 7 0.8 0.7344612 0.3077255
## 7 0.9 0.7344612 0.3077255
## 7 1.0 0.7394612 0.3180682
## 8 0.1 0.7059398 0.3094532
## 8 0.2 0.7044361 0.2507258
## 8 0.3 0.7086717 0.2609201
## 8 0.4 0.7239348 0.2964442
## 8 0.5 0.7341980 0.3163837
## 8 0.6 0.7344612 0.3077255
## 8 0.7 0.7344612 0.3077255
## 8 0.8 0.7344612 0.3077255
## 8 0.9 0.7344612 0.3077255
## 8 1.0 0.7394612 0.3180682
## 9 0.1 0.7054637 0.3044139
## 9 0.2 0.7289599 0.2983537
## 9 0.3 0.7136717 0.2715410
## 9 0.4 0.7239348 0.2964442
## 9 0.5 0.7341980 0.3163837
## 9 0.6 0.7344612 0.3077255
## 9 0.7 0.7344612 0.3077255
## 9 0.8 0.7344612 0.3077255
## 9 0.9 0.7344612 0.3077255
## 9 1.0 0.7394612 0.3180682
## 10 0.1 0.7004637 0.2784819
## 10 0.2 0.7291980 0.2995380
## 10 0.3 0.7134336 0.2699201
## 10 0.4 0.7239348 0.2964442
## 10 0.5 0.7341980 0.3163837
## 10 0.6 0.7344612 0.3077255
## 10 0.7 0.7344612 0.3077255
## 10 0.8 0.7344612 0.3077255
## 10 0.9 0.7344612 0.3077255
## 10 1.0 0.7394612 0.3180682
##
## Accuracy was used to select the optimal model using the largest value.
## The final values used for the model were size = 2 and decay = 1.
predRF <- predict(modelRF, test)
上記の結果、size = 5 and decay = 0.1..の時が良いとの結論
nn<-nnet(taisyo ~ slope + uedokaburi + masuhonsuu + long + kyouyounensuu + kei, data=train,size = 5, rang = .1, decay = 0.1, maxit = 200 )
## # weights: 41
## initial value 141.095885
## iter 10 value 109.847857
## iter 20 value 104.294848
## iter 30 value 101.243860
## iter 40 value 100.564868
## iter 50 value 98.421948
## iter 60 value 95.584704
## iter 70 value 90.716160
## iter 80 value 88.893495
## iter 90 value 87.915759
## iter 100 value 87.722571
## iter 110 value 86.951616
## iter 120 value 83.985251
## iter 130 value 82.763204
## iter 140 value 82.696915
## final value 82.695073
## converged
nn
## a 6-5-1 network with 41 weights
## inputs: slope uedokaburi masuhonsuu long kyouyounensuu kei
## output(s): taisyo
## options were - entropy fitting decay=0.1
nn_predict<-predict(nn,test,type="class")
table(nn_predict, test$taisyo)
##
## nn_predict 0 1
## 0 54 8
## 1 5 15
cat(test$taisyo, file = "testtaisyo2.txt", append =FALSE)
cat(nn_predict, file = "nnresult2.txt", append =FALSE)
nn_predict<-predict(nn,test,type="raw")
nn_predict#推定値の生データ出力:https://mjin.doshisha.ac.jp/R/Chap_23/23.html
## [,1]
## 6 0.0902583776
## 10 0.2825692107
## 12 0.2816178162
## 15 0.2817721995
## 17 0.2283533750
## 22 0.0713185268
## 24 0.0084773901
## 26 0.0097348266
## 27 0.2823669688
## 29 0.0145462481
## 32 0.0198413404
## 40 0.0097726387
## 41 0.0084189701
## 42 0.5516321073
## 44 0.0756648445
## 48 0.0083687002
## 49 0.0083642624
## 55 0.0083654393
## 57 0.8194100535
## 63 0.8194101100
## 64 0.8194105334
## 67 0.8194108903
## 72 0.7963677879
## 76 0.0944674281
## 78 0.0775733378
## 81 0.1041343221
## 86 0.2216093026
## 89 0.2699625086
## 90 0.1521672046
## 92 0.0110329971
## 93 0.7668066998
## 96 0.7147450408
## 97 0.2970252352
## 100 0.7662735197
## 105 0.1882610975
## 107 0.0004860288
## 109 0.0653694032
## 110 0.0004917860
## 114 0.0524938297
## 116 0.2967111140
## 119 0.2980276746
## 120 0.2967093125
## 122 0.2905311083
## 123 0.2541655957
## 127 0.2408592893
## 135 0.0721202985
## 138 0.2975943128
## 149 0.2966806970
## 156 0.0894672218
## 159 0.2978716733
## 163 0.3018964998
## 166 0.7834701268
## 168 0.7205485029
## 169 0.4554337631
## 170 0.2980880015
## 189 0.8194108903
## 196 0.8194108903
## 203 0.7907843820
## 205 0.8039301682
## 206 0.8061896735
## 213 0.8193706605
## 214 0.8194101379
## 218 0.8098664630
## 224 0.0192225832
## 231 0.0935571351
## 234 0.0057161279
## 235 0.1946542724
## 236 0.2001896600
## 240 0.4540448406
## 241 0.5591900977
## 243 0.1390000919
## 247 0.0158439431
## 250 0.1155366035
## 251 0.3856909729
## 252 0.0969467816
## 256 0.2177204261
## 263 0.0695230210
## 265 0.1800130526
## 267 0.1771321855
## 268 0.0179655677
## 269 0.0215635205
## 279 0.1783311556
nn_predict<-predict(nn,test,type="class")#推定値のグループ出力
#推定値グループのファイルテキスト出力http://takenaka-akio.org/doc/r_auto/chapter_03.html
nn_predict
## [1] "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "1" "0" "0" "0" "0" "1"
## [20] "1" "1" "1" "1" "0" "0" "0" "0" "0" "0" "0" "1" "1" "0" "1" "0" "0" "0" "0"
## [39] "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "0" "1" "1" "0" "0" "1" "1"
## [58] "1" "1" "1" "1" "1" "1" "0" "0" "0" "0" "0" "0" "1" "0" "0" "0" "0" "0" "0"
## [77] "0" "0" "0" "0" "0" "0"
cat(test$taisyo, file = "testtaisyo.txt", append =FALSE)
cat(predrandam, file = "lfresult.txt", append =FALSE)
#cat(predsvm, file = "svmresult.txt", append =FALSE)
cat(nn_predict, file = "nnresult.txt", append =FALSE)
kekka<-table(nn_predict, test$taisyo)
kekka
##
## nn_predict 0 1
## 0 54 8
## 1 5 15
ランダムフォーレストハイパーパラメータチューニング
# ランダムフォレストによる予測
# randomForestパッケージを使う。
set.seed(0)
modelRF <- train(
taisyo ~ .,
data = train,
method = "rf",
tuneLength = 4,
preProcess = c('center', 'scale'),
trControl = trainControl(method = "cv")
)
modelRF
## Random Forest
##
## 200 samples
## 10 predictor
## 2 classes: '0', '1'
##
## Pre-processing: centered (10), scaled (10)
## Resampling: Cross-Validated (10 fold)
## Summary of sample sizes: 180, 179, 180, 180, 181, 180, ...
## Resampling results across tuning parameters:
##
## mtry Accuracy Kappa
## 2 0.7989850 0.4776347
## 4 0.7694612 0.4150349
## 7 0.7799875 0.4477948
## 10 0.7654887 0.4108131
##
## Accuracy was used to select the optimal model using the largest value.
## The final value used for the model was mtry = 2.
4/2 再度予測式
gesui.rf2 <- randomForest( # 予測、分類器の構築
taisyo ~ .,# モデル式
data = gesui, # データ
# 3/30 データから5変数を除去
mtry = 2,
importance=T)
predrandam = predict(gesui.rf2, test)
predrandam
## 6 10 12 15 17 22 24 26 27 29 32 40 41 42 44 48 49 55 57 63
## 0 0 0 0 0 0 0 1 1 0 0 0 0 0 0 0 0 0 1 1
## 64 67 72 76 78 81 86 89 90 92 93 96 97 100 105 107 109 110 114 116
## 1 1 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0
## 119 120 122 123 127 135 138 149 156 159 163 166 168 169 170 189 196 203 205 206
## 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1
## 213 214 218 224 231 234 235 236 240 241 243 247 250 251 252 256 263 265 267 268
## 1 1 1 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0
## 269 279
## 0 0
## Levels: 0 1
table(predrandam,test$taisyo)
##
## predrandam 0 1
## 0 57 9
## 1 2 14
x=gesui.rf2$importance
rank <- data.frame(x) # 重要度のリストをデータフレームに変換
rank$factor <- rownames(rank) # 行名になっている要因をデータフレームに追加
rank <- rank[order(rank[,1], decreasing=T),] # 重要度(偏回帰係数的なもの)順に並び替え
rownames(rank) <- 1:nrow(rank) # ランキングを行名にする
rank
## X0 X1 MeanDecreaseAccuracy MeanDecreaseGini
## 1 0.0320441881 0.038513134 0.034018094 15.996390
## 2 0.0241577340 0.097838890 0.047895341 12.237502
## 3 0.0169456085 0.010123678 0.014788816 13.002394
## 4 0.0126846853 0.001666126 0.009170203 1.914125
## 5 0.0124997737 0.003727802 0.009794852 2.492283
## 6 0.0088487005 0.060842525 0.025419283 14.655921
## 7 0.0050257782 0.055814644 0.021296138 5.215724
## 8 0.0040184113 0.012299272 0.006440426 1.649560
## 9 0.0009980311 0.024628800 0.008580328 1.814428
## 10 -0.0024070993 0.008653988 0.001065232 5.391825
## factor
## 1 uedokaburi
## 2 kyouyounensuu
## 3 slope
## 4 kubun
## 5 ekijyouka
## 6 long
## 7 kei
## 8 kouhou
## 9 did
## 10 masuhonsuu
varImpPlot(gesui.rf2)
plot(test$taisyo, predrandam, main = gesui.rf2$call)#推定結果グラフ
curve(identity, add = TRUE)
# この推定結果は、# 4/1の再推定(505レコード)より落ちる??
train_rf3 <- confusionMatrix(predict(gesui.rf2, train), train$taisyo)
test_rf3 <- confusionMatrix(predict(gesui.rf2, test), test$taisyo)
print(str_c("Accuracy (train):", train_rf3$overall[1]))
## [1] "Accuracy (train):0.98"
print(str_c("Accuracy (test):", test_rf3$overall[1]))
## [1] "Accuracy (test):0.865853658536585"
library(xgboost)
##
## Attaching package: 'xgboost'
## The following object is masked from 'package:rattle':
##
## xgboost
## The following object is masked from 'package:dplyr':
##
## slice
set.seed(0)
modelXgboostTree <-
#train(taisyo ~ slope + uedokaburi + masuhonsuu + long + kyouyounensuu + kei,# モデル式
train(taisyo ~ .,# モデル式
data = train,
method = "xgbTree",
preProcess = c('center', 'scale'),
trControl = trainControl(method = "cv"),
tuneLength = 4)
predxgBoostTree <- predict(modelXgboostTree, test)
table(predxgBoostTree,test$taisyo)
##
## predxgBoostTree 0 1
## 0 53 8
## 1 6 15
train_rf2 <- confusionMatrix(predict(modelXgboostTree, train), train$taisyo)
test_rf2 <- confusionMatrix(predict(modelXgboostTree,test), test$taisyo)
print(str_c("Accuracy (train):", train_rf2$overall[1]))
## [1] "Accuracy (train):1"
print(str_c("Accuracy (test) :", test_rf2$overall[1]))
## [1] "Accuracy (test) :0.829268292682927"
#この結果は、ランダムフォレストよりはるかに落ちる。
https://shohei-doi.github.io/notes/posts/2019-05-27-cross-validation/ 成分を用いてランダムフォレストで学習をします。
stargazer(as.data.frame(train),type = "html")
| Statistic | N | Mean | St. Dev. | Min | Pctl(25) | Pctl(75) | Max |
| kubun | 200 | 0.200 | 0.401 | 0 | 0 | 0 | 1 |
| did | 200 | 0.725 | 0.448 | 0 | 0 | 1 | 1 |
| ekijyouka | 200 | 0.215 | 0.412 | 0 | 0 | 0 | 1 |
| kouhou | 200 | 0.355 | 0.480 | 0 | 0 | 1 | 1 |
| slope | 200 | 3.296 | 2.032 | 0.000 | 1.900 | 4.200 | 9.700 |
| uedokaburi | 200 | 4.121 | 2.418 | 1.055 | 2.476 | 5.261 | 13.385 |
| masuhonsuu | 200 | 1.270 | 1.781 | 0 | 0 | 2 | 11 |
| long | 200 | 31.977 | 15.610 | 0.970 | 21.745 | 41.785 | 96.820 |
| kyouyounensuu | 200 | 27.615 | 5.313 | 10 | 25 | 27 | 40 |
| kei | 200 | 394.500 | 164.025 | 200 | 250 | 600 | 900 |
pca <- preProcess(train, method = "pca", pcaComp = 6)
#pca <- preProcess(train, method = "pca", pcaComp = 10)
data_train_pca <- predict(pca, train)
data_test_pca <- predict(pca, test)
data_train_pca %>%
head()
## taisyo PC1 PC2 PC3 PC4 PC5 PC6
## 108 0 2.2545959 -0.3997834 2.4484087 0.1682442 0.4397396 2.10271484
## 115 0 0.9592254 0.8882532 2.8801324 1.5954494 -0.2977598 -0.68000995
## 223 0 1.1641085 -0.9221369 0.1407157 0.7660756 0.2735742 -1.81181755
## 65 1 2.4553824 0.1849302 -0.8515177 -0.1864929 -1.0184509 0.82381067
## 28 1 1.5179251 -0.7266024 -1.9222494 -0.5078729 -1.7378817 -0.18142810
## 79 0 2.0358587 2.0953776 -0.9079450 -0.4472103 1.6497911 -0.02765803
vote_rf3 <- train(
taisyo ~ .,
data = data_train_pca,
method = "rf",
)
train_rf3 <- confusionMatrix(predict(vote_rf3, data_train_pca), data_train_pca$taisyo)
test_rf3 <- confusionMatrix(predict(vote_rf3, data_test_pca), data_test_pca$taisyo)
print(str_c("Accuracy (train):", train_rf3$overall[1]))
## [1] "Accuracy (train):1"
print(str_c("Accuracy (test):", test_rf3$overall[1]))
## [1] "Accuracy (test):0.817073170731707"
rf_predict<-predict(vote_rf3, data_test_pca)
table(rf_predict, test$taisyo)
##
## rf_predict 0 1
## 0 56 12
## 1 3 11
5個の主成分のとき “Accuracy (test):0.841463414634146 6個の主成分のとき”Accuracy (test):0.853658536585366" 7個の主成分のとき“Accuracy (test):0.829268292682927” 7個の主成分で質的変数を量的変数とした場合 “Accuracy (test):0.841463414634146” 6個の主成分のとき“Accuracy (test):0.804878048780488” 10個の主成分のときで質的変数を量的変数とした場合 “Accuracy (test):0.804878048780488”
vote_logit3 <- train(
taisyo ~ .,
data = gesui,
method = "rf",
trControl = trainControl(method = "cv")
)
vote_logit3
## Random Forest
##
## 200 samples
## 10 predictor
## 2 classes: '0', '1'
##
## No pre-processing
## Resampling: Cross-Validated (10 fold)
## Summary of sample sizes: 181, 181, 181, 180, 180, 180, ...
## Resampling results across tuning parameters:
##
## mtry Accuracy Kappa
## 2 0.7904386 0.4597052
## 6 0.7801754 0.4499320
## 10 0.7751504 0.4395279
##
## Accuracy was used to select the optimal model using the largest value.
## The final value used for the model was mtry = 2.
https://shohei-doi.github.io/notes/posts/2019-05-27-cross-validation/
vote_logit3 <- train(
taisyo ~ .,
data = data_train_pca,
method = "rf",
trControl = trainControl(method = "cv")
)
vote_logit3
## Random Forest
##
## 200 samples
## 6 predictor
## 2 classes: '0', '1'
##
## No pre-processing
## Resampling: Cross-Validated (10 fold)
## Summary of sample sizes: 181, 181, 180, 180, 179, 180, ...
## Resampling results across tuning parameters:
##
## mtry Accuracy Kappa
## 2 0.7796491 0.4434353
## 4 0.7501003 0.3770825
## 6 0.7556015 0.3866642
##
## Accuracy was used to select the optimal model using the largest value.
## The final value used for the model was mtry = 2.
cordata <- gesui
# ダミー化したい変数taisyoをセレクト
dum <- cordata %>% select(kubun, did, ekijyouka, kouhou, taisyo)
# ダミー化しない変数をセレクト
not_dum <- cordata %>% select(slope, uedokaburi, masuhonsuu, long, kyouyounensuu, kei)
# makedummies()を使用してダミー変数を作成
dummy_var <- makedummies(dum, basal_level = FALSE)
# 結合する
gesui <- cbind(dummy_var, not_dum)
https://blog.statsbeginner.net/entry/2016/01/06/042458
cordata <- gesui
# ダミー化したい変数をセレクト
dum <- cordata %>% select(kubun, did, ekijyouka, kouhou, taisyo)
# ダミー化しない変数をセレクト
not_dum <- cordata %>% select(slope, uedokaburi, masuhonsuu, long, kyouyounensuu, kei)
# makedummies()を使用してダミー変数を作成
dummy_var <- makedummies(dum, basal_level = FALSE)
# 結合する
gesui <- cbind(dummy_var, not_dum)
library(pequod)
model1 <- lmres(taisyo~kyouyounensuu*uedokaburi, centered =c("kyouyounensuu", "uedokaburi"), data =gesui)
summary(model1)
## Formula:
## taisyo ~ kyouyounensuu + uedokaburi + kyouyounensuu.XX.uedokaburi
## <environment: 0x0000000025739308>
##
## Models
## R R^2 Adj. R^2 F df1 df2 p.value
## Model 0.376 0.141 0.128 10.741 3.000 196 1.4e-06 ***
## ---
## Signif. codes: 0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
##
## Residuals
## Min. 1st Qu. Median Mean 3rd Qu. Max.
## -0.8088 -0.2635 -0.2413 0.0000 0.3229 0.7994
##
## Coefficients
## Estimate StdErr t.value beta p.value
## (Intercept) 0.30698 0.03256 9.42677 < 2e-16 ***
## kyouyounensuu 0.02033 0.00767 2.64954 0.2300 0.00872 **
## uedokaburi -0.02340 0.01373 -1.70391 -0.1205 0.08998 .
## kyouyounensuu.XX.uedokaburi -0.00642 0.00355 -1.80897 -0.1539 0.07199 .
## ---
## Signif. codes: 0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
##
## Collinearity
## VIF Tolerance
## kyouyounensuu 1.7200 0.5814
## uedokaburi 1.1416 0.8760
## kyouyounensuu.XX.uedokaburi 1.6519 0.6054
model2 <- simpleSlope(model1, pred=“talk”, mod1 = “per”) summary(model2)
model2 <- simpleSlope(model1, pred="kyouyounensuu", mod1 = "uedokaburi")
summary(model2)
##
## ** Estimated points of taisyo **
##
## Low kyouyounensuu (-1 SD) High kyouyounensuu (+1 SD)
## Low uedokaburi (-1 SD) 0.1731 0.5540
## High uedokaburi (+1 SD) 0.2249 0.2759
##
##
##
## ** Simple Slopes analysis ( df= 196 ) **
##
## simple slope standard error t-value p.value
## Low uedokaburi (-1 SD) 0.0359 0.0071 5.05 <2e-16 ***
## High uedokaburi (+1 SD) 0.0048 0.0147 0.33 0.74
## ---
## Signif. codes: 0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
##
##
##
## ** Bauer & Curran 95% CI **
##
## lower CI upper CI
## uedokaburi -55.08 0.4737
model11 <- lmres(taisyo~kyouyounensuu*long, centered =c("kyouyounensuu", "long"), data =gesui)
model3 <- simpleSlope(model11, pred="kyouyounensuu", mod1 = "long")
model12 <- lmres(taisyo~kyouyounensuu*kei, centered =c("kyouyounensuu", "kei"), data =gesui)
model312 <- simpleSlope(model12, pred="kyouyounensuu", mod1 = "kei")
model4<- lmres(taisyo~kyouyounensuu*slope, centered =c("kyouyounensuu", "slope"), data =gesui)
summary(model12)
## Formula:
## taisyo ~ kyouyounensuu + kei + kyouyounensuu.XX.kei
## <environment: 0x0000000022ee68a0>
##
## Models
## R R^2 Adj. R^2 F df1 df2 p.value
## Model 0.391 0.153 0.140 11.796 3.000 196 3.9e-07 ***
## ---
## Signif. codes: 0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
##
## Residuals
## Min. 1st Qu. Median Mean 3rd Qu. Max.
## -0.7852 -0.2697 -0.2362 0.0000 0.3246 0.8004
##
## Coefficients
## Estimate StdErr t.value beta p.value
## (Intercept) 0.29565 0.03267 9.05064 < 2e-16 ***
## kyouyounensuu 0.01744 0.00754 2.31301 0.1974 0.02176 *
## kei -0.00025 0.00020 -1.24912 -0.0877 0.21311
## kyouyounensuu.XX.kei -0.00013 0.00005 -2.69097 -0.2223 0.00774 **
## ---
## Signif. codes: 0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
##
## Collinearity
## VIF Tolerance
## kyouyounensuu 1.6845 0.5937
## kei 1.1394 0.8776
## kyouyounensuu.XX.kei 1.5788 0.6334
model41 <- simpleSlope(model4, pred="kyouyounensuu", mod1 = "slope")
PlotSlope(model2)
PlotSlope(model3)
PlotSlope(model312)
PlotSlope(model41 )
library(pequod)
model41 <- lmres(taisyo~kyouyounensuu*uedokaburi*kei, centered =c("kyouyounensuu", "uedokaburi", "kei"), data =gesui)
model42 <- lmres(taisyo~kyouyounensuu*uedokaburi*long, centered =c("kyouyounensuu", "uedokaburi", "long"), data =gesui)
model43 <- lmres(taisyo~slope*uedokaburi*long, centered =c("slope", "uedokaburi", "long"), data =gesui)
model411 <- simpleSlope(model41, pred="kyouyounensuu", mod1 = "uedokaburi", mod2 = "kei")
model422 <- simpleSlope(model42, pred="kyouyounensuu", mod1 = "uedokaburi", mod2 ="long" )
model431 <- simpleSlope(model43, pred="uedokaburi", mod1 = "slope", mod2 ="long" )
PlotSlope(model411)
PlotSlope(model422)
PlotSlope(model431)