Minimum set of libraries
require(car)
## Loading required package: car
## Warning: package 'car' was built under R version 4.3.2
## Loading required package: carData
## Warning: package 'carData' was built under R version 4.3.2
require(CauseAndCorrelation)
## Loading required package: CauseAndCorrelation
## Warning in library(package, lib.loc = lib.loc, character.only = TRUE,
## logical.return = TRUE, : there is no package called 'CauseAndCorrelation'
require(dplyr)
## Loading required package: dplyr
## Warning: package 'dplyr' was built under R version 4.3.2
##
## Attaching package: 'dplyr'
## The following object is masked from 'package:car':
##
## recode
## The following objects are masked from 'package:stats':
##
## filter, lag
## The following objects are masked from 'package:base':
##
## intersect, setdiff, setequal, union
require(factoextra)
## Loading required package: factoextra
## Warning: package 'factoextra' was built under R version 4.3.2
## Loading required package: ggplot2
## Warning: package 'ggplot2' was built under R version 4.3.2
## Welcome! Want to learn more? See two factoextra-related books at https://goo.gl/ve3WBa
require(ggcorrplot)
## Loading required package: ggcorrplot
## Warning: package 'ggcorrplot' was built under R version 4.3.2
require(kableExtra)
## Loading required package: kableExtra
## Warning: package 'kableExtra' was built under R version 4.3.2
## Error: package or namespace load failed for 'kableExtra':
## .onLoad failed in loadNamespace() for 'kableExtra', details:
## call: !is.null(rmarkdown::metadata$output) && rmarkdown::metadata$output %in%
## error: 'length = 3' in coercion to 'logical(1)'
require(knitr)
## Loading required package: knitr
## Warning: package 'knitr' was built under R version 4.3.2
require(lavaan)
## Loading required package: lavaan
## Warning: package 'lavaan' was built under R version 4.3.2
## This is lavaan 0.6-16
## lavaan is FREE software! Please report any bugs.
require(lavaanPlot)
## Loading required package: lavaanPlot
## Warning: package 'lavaanPlot' was built under R version 4.3.2
require(magrittr)
## Loading required package: magrittr
## Warning: package 'magrittr' was built under R version 4.3.2
require(psych)
## Loading required package: psych
## Warning: package 'psych' was built under R version 4.3.2
##
## Attaching package: 'psych'
## The following object is masked from 'package:lavaan':
##
## cor2cov
## The following objects are masked from 'package:ggplot2':
##
## %+%, alpha
## The following object is masked from 'package:car':
##
## logit
require(ResourceSelection)
## Loading required package: ResourceSelection
## Warning: package 'ResourceSelection' was built under R version 4.3.2
## ResourceSelection 0.3-6 2023-06-27
require(semPlot)
## Loading required package: semPlot
## Warning: package 'semPlot' was built under R version 4.3.2
require(tidySEM)
## Loading required package: tidySEM
## Warning: package 'tidySEM' was built under R version 4.3.2
## Loading required package: OpenMx
## Warning: package 'OpenMx' was built under R version 4.3.2
##
## Attaching package: 'OpenMx'
## The following object is masked from 'package:psych':
##
## tr
## Registered S3 method overwritten by 'tidySEM':
## method from
## predict.MxModel OpenMx
require(tidyverse)
## Loading required package: tidyverse
## Warning: package 'tidyverse' was built under R version 4.3.2
## Warning: package 'tibble' was built under R version 4.3.2
## Warning: package 'tidyr' was built under R version 4.3.2
## Warning: package 'readr' was built under R version 4.3.2
## Warning: package 'purrr' was built under R version 4.3.2
## Warning: package 'stringr' was built under R version 4.3.2
## Warning: package 'forcats' was built under R version 4.3.2
## Warning: package 'lubridate' was built under R version 4.3.2
## ── Attaching core tidyverse packages ──────────────────────── tidyverse 2.0.0 ──
## ✔ forcats 1.0.0 ✔ stringr 1.5.1
## ✔ lubridate 1.9.3 ✔ tibble 3.2.1
## ✔ purrr 1.0.2 ✔ tidyr 1.3.0
## ✔ readr 2.1.4
## ── Conflicts ────────────────────────────────────────── tidyverse_conflicts() ──
## ✖ psych::%+%() masks ggplot2::%+%()
## ✖ psych::alpha() masks ggplot2::alpha()
## ✖ tidyr::extract() masks magrittr::extract()
## ✖ dplyr::filter() masks stats::filter()
## ✖ dplyr::lag() masks stats::lag()
## ✖ dplyr::recode() masks car::recode()
## ✖ purrr::set_names() masks magrittr::set_names()
## ✖ purrr::some() masks car::some()
## ℹ Use the conflicted package (<http://conflicted.r-lib.org/>) to force all conflicts to become errors
require(performance)
## Loading required package: performance
## Warning: package 'performance' was built under R version 4.3.2
mydata=read.csv('jasp4.csv')
Correlation, printing, and citing
corfunction=function(d,m='pearson',sz=2){
mycorr=cor(d[, 1:ncol(d)], method=m); p.mat=ggcorrplot::cor_pmat(d[,1:ncol(d)])
myplot=ggcorrplot(mycorr, hc.order=T,type="lower",
colors=c("red", "white","green"),tl.cex = 8,
tl.col = "black", lab=TRUE, lab_size=sz, p.mat=p.mat,sig.level=0.01,
insig="pch", pch=4)
print(myplot)}
myprint=function(x){print(x) }
mycite=function(x){citation(x)}
Descriptives prior to normalizing
myprint(describe(mydata))
## vars n mean sd median trimmed mad min
## GeoName* 1 382 191.50 110.42 191.50 191.50 141.59 1.00
## New_MEFI_Score 2 382 6.77 0.75 6.79 6.79 0.76 4.00
## MEFI_Score 3 382 6.74 0.87 6.76 6.77 0.91 3.82
## Govt_Consumption 4 382 6.93 1.69 7.18 7.08 1.28 0.00
## Govt_Xfers_Subsidies 5 382 8.84 0.74 9.08 8.97 0.46 5.56
## Govt_Retirement 6 382 4.24 2.02 4.53 4.33 2.12 0.00
## Income_Payroll_Tax 7 382 4.81 2.81 4.28 4.69 1.89 0.00
## Sales_Tax 8 382 5.39 1.71 5.44 5.43 1.24 0.00
## Property_Tax 9 382 8.27 1.13 8.40 8.36 0.83 0.00
## Minimum_Wage_Over_Income 10 382 7.45 1.23 7.59 7.53 1.10 2.98
## Percent_Govt_Employment 11 382 8.35 0.91 8.48 8.43 0.71 0.00
## Percent_Private_Union_Density 12 382 6.37 2.76 7.04 6.59 3.02 0.00
## Minimum_Wage_Over_10._Income 13 382 6.68 1.79 7.14 6.91 1.50 0.84
## max range skew kurtosis se
## GeoName* 382.00 381.00 0.00 -1.21 5.65
## New_MEFI_Score 8.62 4.62 -0.37 0.33 0.04
## MEFI_Score 8.81 4.99 -0.29 -0.19 0.04
## Govt_Consumption 10.00 10.00 -1.47 4.21 0.09
## Govt_Xfers_Subsidies 9.74 4.17 -1.81 3.23 0.04
## Govt_Retirement 8.12 8.12 -0.42 -0.56 0.10
## Income_Payroll_Tax 10.00 10.00 0.50 -0.53 0.14
## Sales_Tax 9.81 9.81 -0.28 1.30 0.09
## Property_Tax 10.00 10.00 -3.27 20.97 0.06
## Minimum_Wage_Over_Income 10.00 7.02 -0.58 0.31 0.06
## Percent_Govt_Employment 10.00 10.00 -2.52 18.23 0.05
## Percent_Private_Union_Density 10.00 10.00 -0.54 -0.81 0.14
## Minimum_Wage_Over_10._Income 9.28 8.44 -1.08 0.51 0.09
#colnames(mydata)[4:13]=c('X1A','X1B','X1C','X2A','X2B','X2C','X3A', 'X3B','X3C', 'New3A')
corfunction(mydata[,-c(1:3)], ,3)
KDE Pairs of data
kdepairs(mydata[, 2:3])
## Warning in par(usr): argument 1 does not name a graphical parameter
## Warning in par(usr): argument 1 does not name a graphical parameter
kdepairs(mydata[,4:6])
## Warning in par(usr): argument 1 does not name a graphical parameter
## Warning in par(usr): argument 1 does not name a graphical parameter
## Warning in par(usr): argument 1 does not name a graphical parameter
kdepairs(mydata[,7:9])
## Warning in par(usr): argument 1 does not name a graphical parameter
## Warning in par(usr): argument 1 does not name a graphical parameter
## Warning in par(usr): argument 1 does not name a graphical parameter
kdepairs(mydata[,10:13])
## Warning in par(usr): argument 1 does not name a graphical parameter
## Warning in par(usr): argument 1 does not name a graphical parameter
## Warning in par(usr): argument 1 does not name a graphical parameter
## Warning in par(usr): argument 1 does not name a graphical parameter
Z-scores to adjust for magnitudes
tempname=mydata$GeoName
mydata[,4:13]=as.data.frame(apply(mydata[, 4:13], 2, 'scale'))
mydata$GeoName=tempname
myprint(describe(mydata))
## vars n mean sd median trimmed mad
## GeoName* 1 382 191.50 110.42 191.50 191.50 141.59
## New_MEFI_Score 2 382 6.77 0.75 6.79 6.79 0.76
## MEFI_Score 3 382 6.74 0.87 6.76 6.77 0.91
## Govt_Consumption 4 382 0.00 1.00 0.15 0.09 0.76
## Govt_Xfers_Subsidies 5 382 0.00 1.00 0.32 0.18 0.62
## Govt_Retirement 6 382 0.00 1.00 0.15 0.05 1.05
## Income_Payroll_Tax 7 382 0.00 1.00 -0.19 -0.04 0.67
## Sales_Tax 8 382 0.00 1.00 0.03 0.02 0.72
## Property_Tax 9 382 0.00 1.00 0.12 0.08 0.74
## Minimum_Wage_Over_Income 10 382 0.00 1.00 0.11 0.06 0.90
## Percent_Govt_Employment 11 382 0.00 1.00 0.15 0.09 0.78
## Percent_Private_Union_Density 12 382 0.00 1.00 0.24 0.08 1.09
## Minimum_Wage_Over_10._Income 13 382 0.00 1.00 0.25 0.13 0.84
## min max range skew kurtosis se
## GeoName* 1.00 382.00 381.00 0.00 -1.21 5.65
## New_MEFI_Score 4.00 8.62 4.62 -0.37 0.33 0.04
## MEFI_Score 3.82 8.81 4.99 -0.29 -0.19 0.04
## Govt_Consumption -4.10 1.82 5.92 -1.47 4.21 0.05
## Govt_Xfers_Subsidies -4.40 1.20 5.60 -1.81 3.23 0.05
## Govt_Retirement -2.09 1.92 4.01 -0.42 -0.56 0.05
## Income_Payroll_Tax -1.71 1.85 3.56 0.50 -0.53 0.05
## Sales_Tax -3.15 2.58 5.73 -0.28 1.30 0.05
## Property_Tax -7.33 1.53 8.87 -3.27 20.97 0.05
## Minimum_Wage_Over_Income -3.64 2.07 5.72 -0.58 0.31 0.05
## Percent_Govt_Employment -9.22 1.82 11.05 -2.52 18.23 0.05
## Percent_Private_Union_Density -2.31 1.32 3.63 -0.54 -0.81 0.05
## Minimum_Wage_Over_10._Income -3.26 1.45 4.71 -1.08 0.51 0.05
Not hierarchically clustered
corfunction(mydata[,4:13])
## Vanishing Tetrads
#print("Tetrad Testing, M1")
#vanishing.tetrads(mydata[, 4:12])
#print("Tetrad Testing, M2")
#vanishing.tetrads(mydata[,4:11,13])
Baseline model
Estimate of negative variance exists for the forced additive measure of Economic Freedom (P2 and P3).
colnames(mydata)[4:13]=c('X1A','X1B','X1C','X2A','X2B','X2C','X3A', 'X3B','X3C', 'New3A')
mod='
Economic_Freedom=~1/3*Government_Spending+1/3*Taxation+1/3*Labor_Market_Freedom
Government_Spending=~1/3*X1A+1/3*X1B+1/3*X1C
Taxation=~1/3*X2A+1/3*X2B+1/3*X2C
Labor_Market_Freedom=~1/3*X3A+1/3*X3B+1/3*X3C
'
fit=cfa(mod, data=mydata, do.fit=T, estimator='DWLS')
## Warning in lav_object_post_check(object): lavaan WARNING: some estimated lv
## variances are negative
(mys1=summary(fit, standardized=T,fit.measures = TRUE))
## lavaan 0.6.16 ended normally after 50 iterations
##
## Estimator DWLS
## Optimization method NLMINB
## Number of model parameters 13
##
## Number of observations 382
##
## Model Test User Model:
##
## Test statistic 463.355
## Degrees of freedom 32
## P-value (Chi-square) 0.000
##
## Model Test Baseline Model:
##
## Test statistic 819.555
## Degrees of freedom 36
## P-value 0.000
##
## User Model versus Baseline Model:
##
## Comparative Fit Index (CFI) 0.449
## Tucker-Lewis Index (TLI) 0.381
##
## Root Mean Square Error of Approximation:
##
## RMSEA 0.188
## 90 Percent confidence interval - lower 0.173
## 90 Percent confidence interval - upper 0.203
## P-value H_0: RMSEA <= 0.050 0.000
## P-value H_0: RMSEA >= 0.080 1.000
##
## Standardized Root Mean Square Residual:
##
## SRMR 0.172
##
## Parameter Estimates:
##
## Standard errors Standard
## Information Expected
## Information saturated (h1) model Unstructured
##
## Latent Variables:
## Estimate Std.Err z-value P(>|z|) Std.lv Std.all
## Economic_Freedom =~
## Gvrnmnt_Spndng 0.333 0.830 0.830
## Taxation 0.333 NA NA
## Labr_Mrkt_Frdm 0.333 0.839 0.839
## Government_Spending =~
## X1A 0.333 0.480 0.480
## X1B 0.333 0.480 0.480
## X1C 0.333 0.480 0.480
## Taxation =~
## X2A 0.333 NA NA
## X2B 0.333 NA NA
## X2C 0.333 NA NA
## Labor_Market_Freedom =~
## X3A 0.333 0.474 0.474
## X3B 0.333 0.474 0.474
## X3C 0.333 0.474 0.474
##
## Variances:
## Estimate Std.Err z-value P(>|z|) Std.lv Std.all
## .X1A 0.770 0.132 5.846 0.000 0.770 0.770
## .X1B 0.770 0.122 6.334 0.000 0.770 0.770
## .X1C 0.770 0.070 11.075 0.000 0.770 0.770
## .X2A 1.176 0.071 16.598 0.000 1.176 1.176
## .X2B 1.176 0.099 11.869 0.000 1.176 1.176
## .X2C 1.176 0.248 4.744 0.000 1.176 1.176
## .X3A 0.775 0.084 9.232 0.000 0.775 0.775
## .X3B 0.775 0.233 3.334 0.001 0.775 0.775
## .X3C 0.775 0.064 12.080 0.000 0.775 0.775
## Economic_Fredm 12.820 0.851 15.057 0.000 1.000 1.000
## .Gvrnmnt_Spndng 0.646 0.304 2.122 0.034 0.312 0.312
## .Taxation -3.006 0.319 -9.437 0.000 NA NA
## .Labr_Mrkt_Frdm 0.597 0.298 2.003 0.045 0.295 0.295
lavaanPlot(model = fit, edge_options = list(color = "grey"),coefs = TRUE, covs=F)
temp2=lavPredict(fit)[,1]
## Warning in lav_object_post_check(object): lavaan WARNING: some estimated lv
## variances are negative
#mydata$ScoreM1=10*(temp2-min(temp2))/(max(temp2)-min(temp2)) #scale between 0 and 10
Estimate of negative variance exists for the forced additive measure of Economic Freedom (P2 and P3).
mod='
Economic_Freedom=~1/3*Government_Spending+1/3*Taxation+1/3*Labor_Market_Freedom
Government_Spending=~1/3*X1A+1/3*X1B+1/3*X1C
Taxation=~1/3*X2A+1/3*X2B+1/3*X2C
Labor_Market_Freedom=~1/3*New3A+1/3*X3B+1/3*X3C
'
fit=cfa(mod, data=mydata, do.fit=T, estimator='DWLS')
## Warning in lav_object_post_check(object): lavaan WARNING: some estimated lv
## variances are negative
(mys2=summary(fit, standardized=T,fit.measures = TRUE))
## lavaan 0.6.16 ended normally after 50 iterations
##
## Estimator DWLS
## Optimization method NLMINB
## Number of model parameters 13
##
## Number of observations 382
##
## Model Test User Model:
##
## Test statistic 591.511
## Degrees of freedom 32
## P-value (Chi-square) 0.000
##
## Model Test Baseline Model:
##
## Test statistic 983.750
## Degrees of freedom 36
## P-value 0.000
##
## User Model versus Baseline Model:
##
## Comparative Fit Index (CFI) 0.410
## Tucker-Lewis Index (TLI) 0.336
##
## Root Mean Square Error of Approximation:
##
## RMSEA 0.214
## 90 Percent confidence interval - lower 0.199
## 90 Percent confidence interval - upper 0.230
## P-value H_0: RMSEA <= 0.050 0.000
## P-value H_0: RMSEA >= 0.080 1.000
##
## Standardized Root Mean Square Residual:
##
## SRMR 0.194
##
## Parameter Estimates:
##
## Standard errors Standard
## Information Expected
## Information saturated (h1) model Unstructured
##
## Latent Variables:
## Estimate Std.Err z-value P(>|z|) Std.lv Std.all
## Economic_Freedom =~
## Gvrnmnt_Spndng 0.333 0.856 0.856
## Taxation 0.333 NA NA
## Labr_Mrkt_Frdm 0.333 0.951 0.951
## Government_Spending =~
## X1A 0.333 0.480 0.480
## X1B 0.333 0.480 0.480
## X1C 0.333 0.480 0.480
## Taxation =~
## X2A 0.333 NA NA
## X2B 0.333 NA NA
## X2C 0.333 NA NA
## Labor_Market_Freedom =~
## New3A 0.333 0.432 0.432
## X3B 0.333 0.432 0.432
## X3C 0.333 0.432 0.432
##
## Variances:
## Estimate Std.Err z-value P(>|z|) Std.lv Std.all
## .X1A 0.770 0.132 5.846 0.000 0.770 0.770
## .X1B 0.770 0.122 6.334 0.000 0.770 0.770
## .X1C 0.770 0.070 11.075 0.000 0.770 0.770
## .X2A 1.176 0.071 16.598 0.000 1.176 1.176
## .X2B 1.176 0.099 11.869 0.000 1.176 1.176
## .X2C 1.176 0.248 4.744 0.000 1.176 1.176
## .New3A 0.814 0.086 9.447 0.000 0.814 0.814
## .X3B 0.814 0.232 3.503 0.000 0.814 0.814
## .X3C 0.814 0.063 12.938 0.000 0.814 0.814
## Economic_Fredm 13.666 0.829 16.480 0.000 1.000 1.000
## .Gvrnmnt_Spndng 0.551 0.303 1.818 0.069 0.266 0.266
## .Taxation -3.100 0.318 -9.755 0.000 NA NA
## .Labr_Mrkt_Frdm 0.159 0.274 0.580 0.562 0.095 0.095
lavaanPlot(model = fit, edge_options = list(color = "grey"),coefs = TRUE,covs=T)
temp2=lavPredict(fit)[,1]
## Warning in lav_object_post_check(object): lavaan WARNING: some estimated lv
## variances are negative
#mydata$ScoreM2=10*(temp2-min(temp2))/(max(temp2)-min(temp2)) #scale between 0 and 10
Statistically significant estimate of negative variance exists for the forced additive measure of Economic Freedom (P1).
lavOptions("verbose")
## $verbose
## [1] FALSE
mod='
Economic_Freedom=~Government_Spending+Taxation+Labor_Market_Freedom
Government_Spending=~X1A+X1B+X1C
Taxation=~X2A+X2B+X2C
Labor_Market_Freedom=~X3A+X3B+X3C
'
fit=cfa(mod, data=mydata, do.fit=T, estimator='DWLS')
## Warning in lav_object_post_check(object): lavaan WARNING: some estimated ov
## variances are negative
## Warning in lav_object_post_check(object): lavaan WARNING: some estimated lv
## variances are negative
(mys3=summary(fit, standardized=T, fit.measures = TRUE))
## lavaan 0.6.16 ended normally after 77 iterations
##
## Estimator DWLS
## Optimization method NLMINB
## Number of model parameters 21
##
## Number of observations 382
##
## Model Test User Model:
##
## Test statistic 217.474
## Degrees of freedom 24
## P-value (Chi-square) 0.000
##
## Model Test Baseline Model:
##
## Test statistic 819.555
## Degrees of freedom 36
## P-value 0.000
##
## User Model versus Baseline Model:
##
## Comparative Fit Index (CFI) 0.753
## Tucker-Lewis Index (TLI) 0.630
##
## Root Mean Square Error of Approximation:
##
## RMSEA 0.145
## 90 Percent confidence interval - lower 0.128
## 90 Percent confidence interval - upper 0.163
## P-value H_0: RMSEA <= 0.050 0.000
## P-value H_0: RMSEA >= 0.080 1.000
##
## Standardized Root Mean Square Residual:
##
## SRMR 0.147
##
## Parameter Estimates:
##
## Standard errors Standard
## Information Expected
## Information saturated (h1) model Unstructured
##
## Latent Variables:
## Estimate Std.Err z-value P(>|z|) Std.lv Std.all
## Economic_Freedom =~
## Gvrnmnt_Spndng 1.000 1.172 1.172
## Taxation 1.501 0.302 4.979 0.000 0.357 0.357
## Labr_Mrkt_Frdm 0.765 0.185 4.139 0.000 0.845 0.845
## Government_Spending =~
## X1A 1.000 0.336 0.336
## X1B 0.951 0.164 5.813 0.000 0.319 0.319
## X1C 2.090 0.308 6.795 0.000 0.702 0.702
## Taxation =~
## X2A 1.000 1.657 1.657
## X2B -0.192 0.074 -2.603 0.009 -0.318 -0.318
## X2C 0.045 0.024 1.867 0.062 0.075 0.075
## Labor_Market_Freedom =~
## X3A 1.000 0.356 0.356
## X3B 0.789 0.170 4.640 0.000 0.281 0.281
## X3C 1.892 0.303 6.237 0.000 0.674 0.674
##
## Variances:
## Estimate Std.Err z-value P(>|z|) Std.lv Std.all
## .X1A 0.887 0.131 6.761 0.000 0.887 0.887
## .X1B 0.898 0.120 7.491 0.000 0.898 0.898
## .X1C 0.507 0.115 4.413 0.000 0.507 0.507
## .X2A -1.745 1.092 -1.597 0.110 -1.745 -1.745
## .X2B 0.899 0.102 8.852 0.000 0.899 0.899
## .X2C 0.994 0.246 4.049 0.000 0.994 0.994
## .X3A 0.873 0.085 10.251 0.000 0.873 0.873
## .X3B 0.921 0.232 3.969 0.000 0.921 0.921
## .X3C 0.545 0.117 4.652 0.000 0.545 0.545
## Economic_Fredm 0.155 0.047 3.271 0.001 1.000 1.000
## .Gvrnmnt_Spndng -0.042 0.036 -1.171 0.242 -0.374 -0.374
## .Taxation 2.395 1.083 2.212 0.027 0.873 0.873
## .Labr_Mrkt_Frdm 0.036 0.028 1.301 0.193 0.286 0.286
lavaanPlot(model = fit, edge_options = list(color = "grey"),coefs = TRUE, covs=F)
temp2=lavPredict(fit)[,1]
## Warning in lav_object_post_check(object): lavaan WARNING: some estimated ov
## variances are negative
## Warning in lav_object_post_check(object): lavaan WARNING: some estimated lv
## variances are negative
mydata$ScoreM3=10*(temp2-min(temp2))/(max(temp2)-min(temp2)) #scale between 0 and 10
Statistically significant estimate of negative variance exists for the forced additive measure of Economic Freedom (P2).
mod='
Economic_Freedom=~Government_Spending+Taxation+Labor_Market_Freedom
Government_Spending=~X1A+X1B+X1C
Taxation=~X2A+X2B+X2C
Labor_Market_Freedom=~New3A+X3B+X3C
'
fit=cfa(mod, data=mydata, do.fit=T, estimator='DWLS')
(mys4=summary(fit, standardized=T, fit.measures=TRUE))
## lavaan 0.6.16 ended normally after 82 iterations
##
## Estimator DWLS
## Optimization method NLMINB
## Number of model parameters 21
##
## Number of observations 382
##
## Model Test User Model:
##
## Test statistic 198.480
## Degrees of freedom 24
## P-value (Chi-square) 0.000
##
## Model Test Baseline Model:
##
## Test statistic 983.750
## Degrees of freedom 36
## P-value 0.000
##
## User Model versus Baseline Model:
##
## Comparative Fit Index (CFI) 0.816
## Tucker-Lewis Index (TLI) 0.724
##
## Root Mean Square Error of Approximation:
##
## RMSEA 0.138
## 90 Percent confidence interval - lower 0.121
## 90 Percent confidence interval - upper 0.156
## P-value H_0: RMSEA <= 0.050 0.000
## P-value H_0: RMSEA >= 0.080 1.000
##
## Standardized Root Mean Square Residual:
##
## SRMR 0.145
##
## Parameter Estimates:
##
## Standard errors Standard
## Information Expected
## Information saturated (h1) model Unstructured
##
## Latent Variables:
## Estimate Std.Err z-value P(>|z|) Std.lv Std.all
## Economic_Freedom =~
## Gvrnmnt_Spndng 1.000 0.728 0.728
## Taxation 3.418 0.769 4.446 0.000 0.732 0.732
## Labr_Mrkt_Frdm 2.690 0.663 4.057 0.000 0.743 0.743
## Government_Spending =~
## X1A 1.000 0.259 0.259
## X1B 0.820 0.194 4.215 0.000 0.212 0.212
## X1C 3.661 0.759 4.825 0.000 0.949 0.949
## Taxation =~
## X2A 1.000 0.881 0.881
## X2B -0.497 0.077 -6.469 0.000 -0.438 -0.438
## X2C 0.331 0.056 5.897 0.000 0.292 0.292
## Labor_Market_Freedom =~
## New3A 1.000 0.683 0.683
## X3B 0.076 0.053 1.432 0.152 0.052 0.052
## X3C 1.463 0.172 8.488 0.000 1.000 1.000
##
## Variances:
## Estimate Std.Err z-value P(>|z|) Std.lv Std.all
## .X1A 0.933 0.130 7.197 0.000 0.933 0.933
## .X1B 0.955 0.118 8.073 0.000 0.955 0.955
## .X1C 0.100 0.228 0.437 0.662 0.100 0.100
## .X2A 0.224 0.155 1.447 0.148 0.224 0.224
## .X2B 0.808 0.102 7.895 0.000 0.808 0.808
## .X2C 0.915 0.247 3.709 0.000 0.915 0.915
## .New3A 0.533 0.105 5.090 0.000 0.533 0.533
## .X3B 0.997 0.230 4.327 0.000 0.997 0.997
## .X3C 0.000 0.152 0.003 0.997 0.000 0.000
## Economic_Fredm 0.036 0.015 2.433 0.015 1.000 1.000
## .Gvrnmnt_Spndng 0.032 0.014 2.227 0.026 0.470 0.470
## .Taxation 0.360 0.133 2.714 0.007 0.464 0.464
## .Labr_Mrkt_Frdm 0.209 0.052 3.990 0.000 0.449 0.449
lavaanPlot(model = fit, edge_options = list(color = "grey"),coefs = TRUE,covs=F)
temp2=lavPredict(fit)[,1]
mydata$ScoreM4=10*(temp2-min(temp2))/(max(temp2)-min(temp2)) #scale between 0 and 10
fit=efa(data=mydata[,c(4:6,10:12)], do.fit=T, estimator='DWLS', rotation='varimax', nfactors=2, sample.cov=cor(mydata[,c(4:6,10:12)]), rotation.args = list(geomin.epsilon = 0.05, rstarts = 1, orthogonal=T))
summary(fit, standardized=T, fit.measures = TRUE, cutoff=0.4)
## This is lavaan 0.6.16 -- running exploratory factor analysis
##
## Estimator DWLS
## Rotation method VARIMAX ORTHOGONAL
## Rotation algorithm (rstarts) GPA (1)
## Standardized metric TRUE
## Row weights Kaiser
##
## Number of observations 382
##
## Fit measures:
## chisq df pvalue cfi rmsea
## nfactors = 2 5.581 4 0.233 0.996 0.032
##
## Eigenvalues correlation matrix:
##
## ev1 ev2 ev3 ev4 ev5 ev6
## 2.370 1.356 0.847 0.690 0.400 0.338
##
## Standardized loadings: (* = significant at 1% level)
##
## f1 f2 unique.var communalities
## X1A 0.774* .* 0.390 0.610
## X1B .* .* 0.832 0.168
## X1C .* 0.528* 0.687 0.313
## X3A 0.455* .* 0.753 0.247
## X3B 0.851* 0.276 0.724
## X3C 0.999* 0.000 1.000
##
## f1 f2 total
## Sum of sq (ortho) loadings 1.723 1.338 3.061
## Proportion of total 0.563 0.437 1.000
## Proportion var 0.287 0.223 0.510
## Cumulative var 0.287 0.510 0.510
Model fits. (Estimate of negative variance statistically insignificant)
mod='
#Score~EF
Economic_Freedom=~F1+F2+F3
F1=~X1A+X3A+X3B
F2=~X2A+X2B+X2C
F3=~X1C+X3C
'
#optimal covariance structure
fit=sem(mod, data=mydata, do.fit=T, estimator='DWLS')
## Warning in lav_object_post_check(object): lavaan WARNING: some estimated ov
## variances are negative
## Warning in lav_object_post_check(object): lavaan WARNING: some estimated lv
## variances are negative
(mys5=summary(fit, standardized=T, fit.measures=T))
## lavaan 0.6.16 ended normally after 86 iterations
##
## Estimator DWLS
## Optimization method NLMINB
## Number of model parameters 19
##
## Number of observations 382
##
## Model Test User Model:
##
## Test statistic 71.100
## Degrees of freedom 17
## P-value (Chi-square) 0.000
##
## Model Test Baseline Model:
##
## Test statistic 688.904
## Degrees of freedom 28
## P-value 0.000
##
## User Model versus Baseline Model:
##
## Comparative Fit Index (CFI) 0.918
## Tucker-Lewis Index (TLI) 0.865
##
## Root Mean Square Error of Approximation:
##
## RMSEA 0.091
## 90 Percent confidence interval - lower 0.070
## 90 Percent confidence interval - upper 0.114
## P-value H_0: RMSEA <= 0.050 0.001
## P-value H_0: RMSEA >= 0.080 0.819
##
## Standardized Root Mean Square Residual:
##
## SRMR 0.088
##
## Parameter Estimates:
##
## Standard errors Standard
## Information Expected
## Information saturated (h1) model Unstructured
##
## Latent Variables:
## Estimate Std.Err z-value P(>|z|) Std.lv Std.all
## Economic_Freedom =~
## F1 1.000 0.216 0.216
## F2 2.821 0.551 5.119 0.000 0.425 0.425
## F3 6.575 3.325 1.978 0.048 1.347 1.347
## F1 =~
## X1A 1.000 0.734 0.734
## X3A 0.766 0.166 4.612 0.000 0.562 0.562
## X3B 0.982 0.173 5.672 0.000 0.721 0.721
## F2 =~
## X2A 1.000 1.054 1.054
## X2B -0.436 0.088 -4.973 0.000 -0.459 -0.459
## X2C 0.173 0.048 3.607 0.000 0.183 0.183
## F3 =~
## X1C 1.000 0.774 0.774
## X3C 0.896 0.125 7.195 0.000 0.694 0.694
##
## Variances:
## Estimate Std.Err z-value P(>|z|) Std.lv Std.all
## .X1A 0.462 0.189 2.443 0.015 0.462 0.462
## .X3A 0.684 0.112 6.113 0.000 0.684 0.684
## .X3B 0.481 0.267 1.802 0.072 0.481 0.481
## .X2A -0.110 0.257 -0.429 0.668 -0.110 -0.110
## .X2B 0.789 0.106 7.457 0.000 0.789 0.789
## .X2C 0.967 0.246 3.929 0.000 0.967 0.967
## .X1C 0.401 0.112 3.577 0.000 0.401 0.401
## .X3C 0.519 0.094 5.542 0.000 0.519 0.519
## Economic_Fredm 0.025 0.014 1.824 0.068 1.000 1.000
## .F1 0.513 0.136 3.780 0.000 0.953 0.953
## .F2 0.910 0.260 3.502 0.000 0.820 0.820
## .F3 -0.489 0.566 -0.863 0.388 -0.815 -0.815
lavaanPlot(model = fit, edge_options = list(color = "grey"),coefs = TRUE, covs=F)
temp2=lavPredict(fit)[,1]
## Warning in lav_object_post_check(object): lavaan WARNING: some estimated ov
## variances are negative
## Warning in lav_object_post_check(object): lavaan WARNING: some estimated lv
## variances are negative
mydata$ScoreM5=10*(temp2-min(temp2))/(max(temp2)-min(temp2)) #scale between 0 and 10
fit=efa(data=mydata[, c(4:6, 11,12,13)], do.fit=T, estimator='DWLS', rotation='varimax', nfactors=2, sample.cov=cor(mydata[, c(4:6, 11,12,13)]), rotation.args = list(geomin.epsilon = 0.05, rstarts = 1, orthogonal=T))
summary(fit, standardized=T, fit.measures = TRUE, cutoff=.4)
## This is lavaan 0.6.16 -- running exploratory factor analysis
##
## Estimator DWLS
## Rotation method VARIMAX ORTHOGONAL
## Rotation algorithm (rstarts) GPA (1)
## Standardized metric TRUE
## Row weights Kaiser
##
## Number of observations 382
##
## Fit measures:
## chisq df pvalue cfi rmsea
## nfactors = 2 2.651 4 0.618 1 0
##
## Eigenvalues correlation matrix:
##
## ev1 ev2 ev3 ev4 ev5 ev6
## 2.238 1.756 0.769 0.621 0.361 0.254
##
## Standardized loadings: (* = significant at 1% level)
##
## f1 f2 unique.var communalities
## X1A 0.824* * 0.312 0.688
## X1B 0.440* 0.803 0.197
## X1C .* 0.523* 0.671 0.329
## X3B 0.710* 0.496 0.504
## X3C 0.999* 0.000 1.000
## New3A 0.690* 0.520 0.480
##
## f2 f1 total
## Sum of sq (ortho) loadings 1.759 1.441 3.199
## Proportion of total 0.550 0.450 1.000
## Proportion var 0.293 0.240 0.533
## Cumulative var 0.293 0.533 0.533
Model fits. (Estimate of negative variance statistically insignificant)
mod='
Economic_Freedom=~F1+F2+F3
F1=~X1A+X1B+X3B
F2=~X2A+X2B+X2C
F3=~X1C+New3A+X3C
'
#optimal covariance structure for fit
fit=sem(mod, data=mydata, do.fit=T, estimator='DWLS')
(mys6=summary(fit, standardized=T, fit.measures=T))
## lavaan 0.6.16 ended normally after 76 iterations
##
## Estimator DWLS
## Optimization method NLMINB
## Number of model parameters 21
##
## Number of observations 382
##
## Model Test User Model:
##
## Test statistic 159.957
## Degrees of freedom 24
## P-value (Chi-square) 0.000
##
## Model Test Baseline Model:
##
## Test statistic 983.750
## Degrees of freedom 36
## P-value 0.000
##
## User Model versus Baseline Model:
##
## Comparative Fit Index (CFI) 0.857
## Tucker-Lewis Index (TLI) 0.785
##
## Root Mean Square Error of Approximation:
##
## RMSEA 0.122
## 90 Percent confidence interval - lower 0.104
## 90 Percent confidence interval - upper 0.140
## P-value H_0: RMSEA <= 0.050 0.000
## P-value H_0: RMSEA >= 0.080 1.000
##
## Standardized Root Mean Square Residual:
##
## SRMR 0.103
##
## Parameter Estimates:
##
## Standard errors Standard
## Information Expected
## Information saturated (h1) model Unstructured
##
## Latent Variables:
## Estimate Std.Err z-value P(>|z|) Std.lv Std.all
## Economic_Freedom =~
## F1 1.000 0.232 0.232
## F2 4.170 1.005 4.148 0.000 0.945 0.945
## F3 2.332 0.625 3.731 0.000 0.712 0.712
## F1 =~
## X1A 1.000 0.827 0.827
## X1B 0.611 0.152 4.007 0.000 0.505 0.505
## X3B 0.705 0.161 4.368 0.000 0.583 0.583
## F2 =~
## X2A 1.000 0.845 0.845
## X2B -0.507 0.079 -6.444 0.000 -0.428 -0.428
## X2C 0.371 0.059 6.270 0.000 0.314 0.314
## F3 =~
## X1C 1.000 0.627 0.627
## New3A 1.064 0.101 10.552 0.000 0.667 0.667
## X3C 1.408 0.146 9.651 0.000 0.883 0.883
##
## Variances:
## Estimate Std.Err z-value P(>|z|) Std.lv Std.all
## .X1A 0.316 0.236 1.340 0.180 0.316 0.316
## .X1B 0.745 0.140 5.321 0.000 0.745 0.745
## .X3B 0.660 0.256 2.580 0.010 0.660 0.660
## .X2A 0.286 0.144 1.978 0.048 0.286 0.286
## .X2B 0.816 0.102 7.993 0.000 0.816 0.816
## .X2C 0.902 0.247 3.653 0.000 0.902 0.902
## .X1C 0.607 0.079 7.708 0.000 0.607 0.607
## .New3A 0.555 0.101 5.483 0.000 0.555 0.555
## .X3C 0.221 0.108 2.046 0.041 0.221 0.221
## Economic_Fredm 0.037 0.014 2.679 0.007 1.000 1.000
## .F1 0.647 0.192 3.366 0.001 0.946 0.946
## .F2 0.076 0.225 0.339 0.735 0.107 0.107
## .F3 0.194 0.069 2.813 0.005 0.492 0.492
lavaanPlot(model = fit, edge_options = list(color = "grey"),coefs = TRUE, covs=F)
temp2=lavPredict(fit)[,1]
mydata$ScoreM6=10*(temp2-min(temp2))/(max(temp2)-min(temp2)) #scale between 0 and 10
fit=efa(data=mydata[,c(4:12)], do.fit=T, estimator='DWLS', rotation='varimax', nfactors=3, sample.cov=cor(mydata[,4:12]), rotation.args = list(geomin.epsilon = 0.05, rstarts = 1, orthogonal=T))
summary(fit, standardized=T, fit.measures = TRUE, cutoff=0.4)
## This is lavaan 0.6.16 -- running exploratory factor analysis
##
## Estimator DWLS
## Rotation method VARIMAX ORTHOGONAL
## Rotation algorithm (rstarts) GPA (1)
## Standardized metric TRUE
## Row weights Kaiser
##
## Number of observations 382
##
## Fit measures:
## chisq df pvalue cfi rmsea
## nfactors = 3 20.664 12 0.056 0.989 0.044
##
## Eigenvalues correlation matrix:
##
## ev1 ev2 ev3 ev4 ev5 ev6 ev7 ev8 ev9
## 2.644 1.877 1.249 0.945 0.763 0.599 0.364 0.312 0.247
##
## Standardized loadings: (* = significant at 1% level)
##
## f1 f2 f3 unique.var communalities
## X1A 0.695* 0.516 0.484
## X1B .* 0.497* 0.710 0.290
## X1C .* .* 0.433* 0.631 0.369
## X2A 0.955* .* .* 0.000 1.000
## X2B -0.556* .* 0.650 0.350
## X2C . .* 0.826 0.174
## X3A . 0.514* .* 0.700 0.300
## X3B 0.762* 0.408 0.592
## X3C .* .* 0.966* 0.000 1.000
##
## f2 f1 f3 total
## Sum of sq (ortho) loadings 1.787 1.424 1.349 4.560
## Proportion of total 0.392 0.312 0.296 1.000
## Proportion var 0.199 0.158 0.150 0.507
## Cumulative var 0.199 0.357 0.507 0.507
Model fits. (Estimate of negative variance statistically insignificant)
mod='
Economic_Freedom=~F1+F2+F3
F1=~X2A+X2B
F2=~X1A+X1B+X3A+X3B
F3=~X1C+X3C
'
#optimal covariance structure for fit
fit=sem(mod, data=mydata, do.fit=T, estimator='DWLS')
## Warning in lav_object_post_check(object): lavaan WARNING: some estimated ov
## variances are negative
(mys7=summary(fit, standardized=T, fit.measures=T))
## lavaan 0.6.16 ended normally after 61 iterations
##
## Estimator DWLS
## Optimization method NLMINB
## Number of model parameters 19
##
## Number of observations 382
##
## Model Test User Model:
##
## Test statistic 57.094
## Degrees of freedom 17
## P-value (Chi-square) 0.000
##
## Model Test Baseline Model:
##
## Test statistic 748.582
## Degrees of freedom 28
## P-value 0.000
##
## User Model versus Baseline Model:
##
## Comparative Fit Index (CFI) 0.944
## Tucker-Lewis Index (TLI) 0.908
##
## Root Mean Square Error of Approximation:
##
## RMSEA 0.079
## 90 Percent confidence interval - lower 0.057
## 90 Percent confidence interval - upper 0.102
## P-value H_0: RMSEA <= 0.050 0.017
## P-value H_0: RMSEA >= 0.080 0.488
##
## Standardized Root Mean Square Residual:
##
## SRMR 0.076
##
## Parameter Estimates:
##
## Standard errors Standard
## Information Expected
## Information saturated (h1) model Unstructured
##
## Latent Variables:
## Estimate Std.Err z-value P(>|z|) Std.lv Std.all
## Economic_Freedom =~
## F1 1.000 0.403 0.403
## F2 0.381 0.066 5.751 0.000 0.378 0.378
## F3 0.972 0.214 4.532 0.000 0.846 0.846
## F1 =~
## X2A 1.000 1.761 1.761
## X2B -0.173 0.079 -2.204 0.028 -0.305 -0.305
## F2 =~
## X1A 1.000 0.715 0.715
## X1B 0.737 0.122 6.066 0.000 0.527 0.527
## X3A 0.732 0.125 5.868 0.000 0.523 0.523
## X3B 0.914 0.137 6.686 0.000 0.654 0.654
## F3 =~
## X1C 1.000 0.815 0.815
## X3C 0.808 0.116 6.981 0.000 0.659 0.659
##
## Variances:
## Estimate Std.Err z-value P(>|z|) Std.lv Std.all
## .X2A -2.101 1.446 -1.453 0.146 -2.101 -2.101
## .X2B 0.907 0.103 8.828 0.000 0.907 0.907
## .X1A 0.488 0.167 2.917 0.004 0.488 0.488
## .X1B 0.722 0.132 5.465 0.000 0.722 0.722
## .X3A 0.726 0.098 7.416 0.000 0.726 0.726
## .X3B 0.572 0.249 2.295 0.022 0.572 0.572
## .X1C 0.335 0.123 2.732 0.006 0.335 0.335
## .X3C 0.566 0.089 6.352 0.000 0.566 0.566
## Economic_Fredm 0.504 0.115 4.370 0.000 1.000 1.000
## .F1 2.597 1.437 1.808 0.071 0.838 0.838
## .F2 0.439 0.098 4.470 0.000 0.857 0.857
## .F3 0.189 0.125 1.514 0.130 0.285 0.285
lavaanPlot(model = fit, edge_options = list(color = "grey"),coefs = TRUE, covs=F)
temp2=lavPredict(fit)[,1]
## Warning in lav_object_post_check(object): lavaan WARNING: some estimated ov
## variances are negative
mydata$ScoreM7=10*(temp2-min(temp2))/(max(temp2)-min(temp2)) #scale between 0 and 10
mod='
Economic_Freedom=~.
'
fit=efa(data=mydata[,c(4:9,11:13)], do.fit=T, estimator='DWLS', rotation='varimax', nfactors=3, sample.cov=cor(mydata[,c(4:9,11:13)]), rotation.args = list(geomin.epsilon = 0.05, rstarts = 1, orthogonal=T))
summary(fit, standardized=T, fit.measures = TRUE, cutoff=0.4)
## This is lavaan 0.6.16 -- running exploratory factor analysis
##
## Estimator DWLS
## Rotation method VARIMAX ORTHOGONAL
## Rotation algorithm (rstarts) GPA (1)
## Standardized metric TRUE
## Row weights Kaiser
##
## Number of observations 382
##
## Fit measures:
## chisq df pvalue cfi rmsea
## nfactors = 3 16.988 12 0.15 0.995 0.033
##
## Eigenvalues correlation matrix:
##
## ev1 ev2 ev3 ev4 ev5 ev6 ev7 ev8 ev9
## 2.759 1.869 1.333 1.001 0.631 0.522 0.334 0.317 0.233
##
## Standardized loadings: (* = significant at 1% level)
##
## f1 f2 f3 unique.var communalities
## X1A 0.687* 0.523 0.477
## X1B 0.557* .* 0.664 0.336
## X1C .* .* 0.460* 0.623 0.377
## X2A .* 0.926* .* 0.000 1.000
## X2B .* -0.599* 0.606 0.394
## X2C . .* 0.832 0.168
## X3B 0.650* . 0.565 0.435
## X3C .* . 0.978* 0.003 0.997
## New3A . 0.690* 0.511 0.489
##
## f3 f1 f2 total
## Sum of sq (ortho) loadings 1.858 1.451 1.364 4.672
## Proportion of total 0.398 0.311 0.292 1.000
## Proportion var 0.206 0.161 0.152 0.519
## Cumulative var 0.206 0.368 0.519 0.519
Model fits. (Estimate of negative variance statistically insignificant)
mod='
Economic_Freedom=~F1+F2+F3
F1=~X2A+X2B
F2=~X1A+X1B+X3B
F3=~X1C+X3C+New3A
'
#optimal covariance structure for fit
fit=sem(mod, data=mydata, do.fit=T, estimator='DWLS')
## Warning in lav_object_post_check(object): lavaan WARNING: some estimated ov
## variances are negative
(mys8=summary(fit, standardized=T, fit.measures=T))
## lavaan 0.6.16 ended normally after 88 iterations
##
## Estimator DWLS
## Optimization method NLMINB
## Number of model parameters 19
##
## Number of observations 382
##
## Model Test User Model:
##
## Test statistic 74.800
## Degrees of freedom 17
## P-value (Chi-square) 0.000
##
## Model Test Baseline Model:
##
## Test statistic 857.848
## Degrees of freedom 28
## P-value 0.000
##
## User Model versus Baseline Model:
##
## Comparative Fit Index (CFI) 0.930
## Tucker-Lewis Index (TLI) 0.885
##
## Root Mean Square Error of Approximation:
##
## RMSEA 0.094
## 90 Percent confidence interval - lower 0.073
## 90 Percent confidence interval - upper 0.117
## P-value H_0: RMSEA <= 0.050 0.000
## P-value H_0: RMSEA >= 0.080 0.873
##
## Standardized Root Mean Square Residual:
##
## SRMR 0.093
##
## Parameter Estimates:
##
## Standard errors Standard
## Information Expected
## Information saturated (h1) model Unstructured
##
## Latent Variables:
## Estimate Std.Err z-value P(>|z|) Std.lv Std.all
## Economic_Freedom =~
## F1 1.000 0.603 0.603
## F2 0.224 0.055 4.110 0.000 0.337 0.337
## F3 0.360 0.094 3.839 0.000 0.553 0.553
## F1 =~
## X2A 1.000 1.647 1.647
## X2B -0.198 0.076 -2.615 0.009 -0.326 -0.326
## F2 =~
## X1A 1.000 0.661 0.661
## X1B 0.975 0.216 4.516 0.000 0.645 0.645
## X3B 0.730 0.158 4.628 0.000 0.482 0.482
## F3 =~
## X1C 1.000 0.646 0.646
## X3C 1.370 0.155 8.818 0.000 0.885 0.885
## New3A 0.976 0.099 9.824 0.000 0.630 0.630
##
## Variances:
## Estimate Std.Err z-value P(>|z|) Std.lv Std.all
## .X2A -1.713 1.078 -1.589 0.112 -1.713 -1.713
## .X2B 0.894 0.102 8.740 0.000 0.894 0.894
## .X1A 0.563 0.176 3.196 0.001 0.563 0.563
## .X1B 0.585 0.159 3.665 0.000 0.585 0.585
## .X3B 0.767 0.244 3.144 0.002 0.767 0.767
## .X1C 0.583 0.083 7.051 0.000 0.583 0.583
## .X3C 0.218 0.114 1.901 0.057 0.218 0.218
## .New3A 0.603 0.100 6.010 0.000 0.603 0.603
## Economic_Fredm 0.986 0.260 3.798 0.000 1.000 1.000
## .F1 1.727 1.081 1.597 0.110 0.636 0.636
## .F2 0.388 0.112 3.474 0.001 0.887 0.887
## .F3 0.289 0.051 5.643 0.000 0.694 0.694
lavaanPlot(model = fit, edge_options = list(color = "grey"),coefs = TRUE, covs=F)
temp2=lavPredict(fit)[,1]
## Warning in lav_object_post_check(object): lavaan WARNING: some estimated ov
## variances are negative
mydata$ScoreM8=10*(temp2-min(temp2))/(max(temp2)-min(temp2)) #scale between 0 and 10
newdata=subset(mydata, select=c('GeoName','MEFI_Score', 'New_MEFI_Score', 'ScoreM3','ScoreM4','ScoreM5','ScoreM6', 'ScoreM7','ScoreM8'))
MEFI=newdata[order(-newdata$MEFI_Score),]
MEFI$RankOriginal=seq(1:nrow(MEFI))
MEFI=MEFI[order(-MEFI$New_MEFI_Score),]
MEFI$RankNewScore=seq(1:nrow(MEFI))
MEFI=MEFI[order(-MEFI$ScoreM3),]
MEFI$RankM3=seq(1:nrow(MEFI))
MEFI=MEFI[order(-MEFI$ScoreM4),]
MEFI$RankM4=seq(1:nrow(MEFI))
MEFI=MEFI[order(-MEFI$ScoreM5),]
MEFI$RankM5=seq(1:nrow(MEFI))
MEFI=MEFI[order(-MEFI$ScoreM6),]
MEFI$RankM6=seq(1:nrow(MEFI))
MEFI=MEFI[order(-MEFI$ScoreM7),]
MEFI$RankM7=seq(1:nrow(MEFI))
MEFI=MEFI[order(-MEFI$ScoreM8),]
MEFI$RankM8=seq(1:nrow(MEFI))
write.csv(MEFI,'MEFI.csv', row.names=F)
cor(MEFI[, c(10:17)], method='spearman')
## RankOriginal RankNewScore RankM3 RankM4 RankM5 RankM6
## RankOriginal 1.0000000 0.9691024 0.9317558 0.8712406 0.7772446 0.7342913
## RankNewScore 0.9691024 1.0000000 0.8979671 0.8000575 0.6998202 0.6906990
## RankM3 0.9317558 0.8979671 1.0000000 0.9027356 0.8459990 0.7387254
## RankM4 0.8712406 0.8000575 0.9027356 1.0000000 0.8936045 0.8814783
## RankM5 0.7772446 0.6998202 0.8459990 0.8936045 1.0000000 0.6101163
## RankM6 0.7342913 0.6906990 0.7387254 0.8814783 0.6101163 1.0000000
## RankM7 0.9001536 0.8686890 0.9342997 0.9033216 0.7542611 0.8370515
## RankM8 0.6004661 0.6321198 0.6123308 0.6052751 0.3042064 0.7437503
## RankM7 RankM8
## RankOriginal 0.9001536 0.6004661
## RankNewScore 0.8686890 0.6321198
## RankM3 0.9342997 0.6123308
## RankM4 0.9033216 0.6052751
## RankM5 0.7542611 0.3042064
## RankM6 0.8370515 0.7437503
## RankM7 1.0000000 0.8028731
## RankM8 0.8028731 1.0000000
corfunction(MEFI[,c(10:17)], 'spearman', 3)
require(tidyverse)
mydf=as.data.frame(round(rbind(mys1$fit, mys2$fit, mys3$fit,mys4$fit,mys5$fit,mys6$fit, mys7$fit,mys8$fit),5))
rownames(mydf)=c('M1 w/Cov','M2 w/Cov', 'M3','M4','M5','M6', 'M7', 'M8')
mydf$Model=rownames(mydf)
print(mydf[,6:8])
## baseline.chisq baseline.df baseline.pvalue
## M1 w/Cov 819.5549 36 0
## M2 w/Cov 983.7500 36 0
## M3 819.5549 36 0
## M4 983.7500 36 0
## M5 688.9045 28 0
## M6 983.7500 36 0
## M7 748.5824 28 0
## M8 857.8481 28 0
mydf[,c(11:13)]
## rmsea rmsea.ci.lower rmsea.ci.upper
## M1 w/Cov 0.18810 0.17313 0.20346
## M2 w/Cov 0.21422 0.19930 0.22951
## M3 0.14546 0.12808 0.16349
## M4 0.13814 0.12071 0.15624
## M5 0.09139 0.07002 0.11388
## M6 0.12194 0.10438 0.14022
## M7 0.07868 0.05674 0.10166
## M8 0.09447 0.07319 0.11686
mysub=mydf[,c(9,10)]%>%pivot_longer(everything(), names_to='metric', values_to='value')
mysub$Model=c(rep('M1',2),rep('M2',2),rep('M3',2),rep('M4',2),rep('M5',2),rep('M6',2), rep('M7',2), rep('M8',2))
myplot=ggplot(mysub, aes(x=metric, y=value, fill=Model))+geom_bar(stat='identity', alpha=.5, position='dodge')+
geom_text(aes(label = round(value, 3)),
position = position_dodge(.9),
color="black",vjust = .5,hjust =1, angle = 90)+xlab("")+ylab("Metric Value")
myplot
path1 <- data.frame(x=c(.5,1.5),y=c(.95,.95), Model=c('Benchmark'))
path2 <- data.frame(x=c(1.5,2.5),y=c(.9,.9), Model=c('Benchmark'))
q <- myplot + geom_path(data=path1,aes(x=x, y=y, col=Model))
q <- q + geom_path(data=path2,aes(x=x, y=y, col=Model))+ylim(0, 1.2)
q
k=5
BaseTop=MEFI[MEFI$RankOriginal<=k,]
BaseTop=select(BaseTop, c('GeoName','RankOriginal'))
BaseTop=BaseTop[order(BaseTop$RankOriginal),"GeoName"]
SubTop=MEFI[MEFI$RankNewScore<=k,]
SubTop=select(SubTop, c('GeoName','RankNewScore'))
SubTop=SubTop[order(SubTop$RankNewScore),"GeoName"]
M3Top=MEFI[MEFI$RankM3<=k,]
M3Top=select(M3Top, c('GeoName','RankM3'))
M3Top=M3Top[order(M3Top$RankM3),"GeoName"]
M4Top=MEFI[MEFI$RankM4<=k,]
M4Top=select(M4Top, c('GeoName','RankM4'))
M4Top=M4Top[order(M4Top$RankM4),"GeoName"]
M5Top=MEFI[MEFI$RankM5<=k,]
M5Top=select(M5Top, c('GeoName','RankM5'))
M5Top=M5Top[order(M5Top$RankM5),"GeoName"]
M6Top=MEFI[MEFI$RankM6<=k,]
M6Top=select(M6Top, c('GeoName','RankM6'))
M6Top=M6Top[order(M6Top$RankM6),"GeoName"]
M7Top=MEFI[MEFI$RankM7<=k,]
M7Top=select(M7Top, c('GeoName','RankM7'))
M7Top=M7Top[order(M7Top$RankM7),"GeoName"]
M8Top=MEFI[MEFI$RankM8<=k,]
M8Top=select(M8Top, c('GeoName','RankM8'))
M8Top=M8Top[order(M8Top$RankM8),"GeoName"]
newdf=as.data.frame(cbind(BaseTop, SubTop, M3Top, M4Top, M5Top,M6Top, M7Top, M8Top))
myprint(newdf)
## BaseTop SubTop
## 1 Naples-Immokalee-Marco Island, FL MSA Naples-Immokalee-Marco Island, FL MSA
## 2 Sebastian-Vero Beach, FL MSA Sebastian-Vero Beach, FL MSA
## 3 Midland, TX MSA The Villages, FL MSA
## 4 The Villages, FL MSA Midland, TX MSA
## 5 Port St. Lucie, FL MSA Tyler, TX MSA
## M3Top
## 1 Naples-Immokalee-Marco Island, FL MSA
## 2 The Villages, FL MSA
## 3 Sebastian-Vero Beach, FL MSA
## 4 Homosassa Springs, FL MSA
## 5 Crestview-Fort Walton Beach-Destin, FL MSA
## M4Top
## 1 The Villages, FL MSA
## 2 Homosassa Springs, FL MSA
## 3 Naples-Immokalee-Marco Island, FL MSA
## 4 Punta Gorda, FL MSA
## 5 Crestview-Fort Walton Beach-Destin, FL MSA
## M5Top M6Top
## 1 Grand Island, NE MSA Tyler, TX MSA
## 2 Blacksburg-Christiansburg-Radford, VA MSA Abilene, TX MSA
## 3 Lynchburg, VA MSA San Angelo, TX MSA
## 4 Harrisonburg, VA MSA Midland, TX MSA
## 5 Staunton-Waynesboro, VA MSA Killeen-Temple, TX MSA
## M7Top M8Top
## 1 Naples-Immokalee-Marco Island, FL MSA Fairbanks, AK MSA
## 2 Manchester-Nashua, NH MSA Anchorage, AK MSA
## 3 The Villages, FL MSA Manchester-Nashua, NH MSA
## 4 Sebastian-Vero Beach, FL MSA Killeen-Temple, TX MSA
## 5 Homosassa Springs, FL MSA Sherman-Denison, TX MSA
myt=table(as.matrix(newdf))
myprint(myt[order(myt, decreasing=T)])
##
## Naples-Immokalee-Marco Island, FL MSA
## 5
## The Villages, FL MSA
## 5
## Sebastian-Vero Beach, FL MSA
## 4
## Homosassa Springs, FL MSA
## 3
## Midland, TX MSA
## 3
## Crestview-Fort Walton Beach-Destin, FL MSA
## 2
## Killeen-Temple, TX MSA
## 2
## Manchester-Nashua, NH MSA
## 2
## Tyler, TX MSA
## 2
## Abilene, TX MSA
## 1
## Anchorage, AK MSA
## 1
## Blacksburg-Christiansburg-Radford, VA MSA
## 1
## Fairbanks, AK MSA
## 1
## Grand Island, NE MSA
## 1
## Harrisonburg, VA MSA
## 1
## Lynchburg, VA MSA
## 1
## Port St. Lucie, FL MSA
## 1
## Punta Gorda, FL MSA
## 1
## San Angelo, TX MSA
## 1
## Sherman-Denison, TX MSA
## 1
## Staunton-Waynesboro, VA MSA
## 1
k=nrow(MEFI)-5
BaseBottom=MEFI[MEFI$RankOriginal>k,]
BaseBottom=select(BaseBottom, c('GeoName','RankOriginal'))
BaseBottom=BaseBottom[order(BaseBottom$RankOriginal),"GeoName"]
SubBottom=MEFI[MEFI$RankNewScore>k,]
SubBottom=select(SubBottom, c('GeoName','RankNewScore'))
SubBottom=SubBottom[order(SubBottom$RankNewScore),"GeoName"]
M3Bottom=MEFI[MEFI$RankM3>k,]
M3Bottom=select(M3Bottom, c('GeoName','RankM3'))
M3Bottom=M3Bottom[order(M3Bottom$RankM3),"GeoName"]
M4Bottom=MEFI[MEFI$RankM4>k,]
M4Bottom=select(M4Bottom, c('GeoName','RankM4'))
M4Bottom=M4Bottom[order(M4Bottom$RankM4),"GeoName"]
M5Bottom=MEFI[MEFI$RankM5>k,]
M5Bottom=select(M5Bottom, c('GeoName','RankM5'))
M5Bottom=M5Bottom[order(M5Bottom$RankM5),"GeoName"]
M6Bottom=MEFI[MEFI$RankM6>k,]
M6Bottom=select(M6Bottom, c('GeoName','RankM6'))
M6Bottom=M6Bottom[order(M6Bottom$RankM6),"GeoName"]
M7Bottom=MEFI[MEFI$RankM7>k,]
M7Bottom=select(M7Bottom, c('GeoName','RankM7'))
M7Bottom=M7Bottom[order(M7Bottom$RankM7),"GeoName"]
M8Bottom=MEFI[MEFI$RankM8>k,]
M8Bottom=select(M8Bottom, c('GeoName','RankM8'))
M8Bottom=M8Bottom[order(M8Bottom$RankM8),"GeoName"]
newdf=as.data.frame(cbind(BaseBottom, SubBottom, M3Bottom, M4Bottom, M5Bottom,M6Bottom, M7Bottom, M8Bottom))
myprint(newdf)
## BaseBottom SubBottom
## 1 Merced, CA MSA Merced, CA MSA
## 2 Visalia-Porterville, CA MSA Visalia-Porterville, CA MSA
## 3 Rapid City, SD MSA Bakersfield, CA MSA
## 4 Bakersfield, CA MSA Rapid City, SD MSA
## 5 El Centro, CA MSA El Centro, CA MSA
## M3Bottom M4Bottom
## 1 Stockton-Lodi, CA MSA Eugene, OR MSA
## 2 Visalia-Porterville, CA MSA Los Angeles-Long Beach-Anaheim, CA MSA
## 3 Merced, CA MSA Fresno, CA MSA
## 4 Bakersfield, CA MSA Bakersfield, CA MSA
## 5 El Centro, CA MSA New York-Newark-Jersey City, NY-NJ-PA MSA
## M5Bottom
## 1 Providence-Warwick-Pawtucket, RI MSA
## 2 Chicago-Naperville-Elgin, IL-IN-WI MSA
## 3 New York-Newark-Jersey City, NY-NJ-PA MSA
## 4 Anchorage, AK MSA
## 5 Fairbanks, AK MSA
## M6Bottom
## 1 Bend-Redmond, OR MSA
## 2 Salem, OR MSA
## 3 Eugene, OR MSA
## 4 Albany, OR MSA
## 5 New York-Newark-Jersey City, NY-NJ-PA MSA
## M7Bottom M8Bottom
## 1 Merced, CA MSA Mankato-North Mankato, MN MSA
## 2 Visalia-Porterville, CA MSA Rochester, MN MSA
## 3 New York-Newark-Jersey City, NY-NJ-PA MSA Kahului-Wailuku-Lahaina, HI MSA
## 4 Bakersfield, CA MSA Lexington-Fayette, KY MSA
## 5 El Centro, CA MSA Urban Honolulu, HI MSA
myt=table(as.matrix(newdf))
myprint(myt[order(myt, decreasing=T)])
##
## Bakersfield, CA MSA
## 5
## El Centro, CA MSA
## 4
## Merced, CA MSA
## 4
## New York-Newark-Jersey City, NY-NJ-PA MSA
## 4
## Visalia-Porterville, CA MSA
## 4
## Eugene, OR MSA
## 2
## Rapid City, SD MSA
## 2
## Albany, OR MSA
## 1
## Anchorage, AK MSA
## 1
## Bend-Redmond, OR MSA
## 1
## Chicago-Naperville-Elgin, IL-IN-WI MSA
## 1
## Fairbanks, AK MSA
## 1
## Fresno, CA MSA
## 1
## Kahului-Wailuku-Lahaina, HI MSA
## 1
## Lexington-Fayette, KY MSA
## 1
## Los Angeles-Long Beach-Anaheim, CA MSA
## 1
## Mankato-North Mankato, MN MSA
## 1
## Providence-Warwick-Pawtucket, RI MSA
## 1
## Rochester, MN MSA
## 1
## Salem, OR MSA
## 1
## Stockton-Lodi, CA MSA
## 1
## Urban Honolulu, HI MSA
## 1
tmp=data.frame(MEFI[order(MEFI$RankM6, decreasing=F),])
tmp
## GeoName
## 352 Tyler, TX MSA
## 1 Abilene, TX MSA
## 308 San Angelo, TX MSA
## 229 Midland, TX MSA
## 183 Killeen-Temple, TX MSA
## 372 Wichita Falls, TX MSA
## 10 Amarillo, TX MSA
## 209 Longview, TX MSA
## 361 Waco, TX MSA
## 345 The Villages, FL MSA
## 30 Beaumont-Port Arthur, TX MSA
## 324 Sherman-Denison, TX MSA
## 268 Pensacola-Ferry Pass-Brent, FL MSA
## 87 Dallas-Fort Worth-Arlington, TX MSA
## 76 College Station-Bryan, TX MSA
## 83 Corpus Christi, TX MSA
## 213 Lubbock, TX MSA
## 328 Sioux Falls, SD MSA
## 48 Brownsville-Harlingen, TX MSA
## 309 San Antonio-New Braunfels, TX MSA
## 357 Victoria, TX MSA
## 321 Sebastian-Vero Beach, FL MSA
## 160 Houston-The Woodlands-Sugar Land, TX MSA
## 110 El Paso, TX MSA
## 256 Odessa, TX MSA
## 222 McAllen-Edinburg-Mission, TX MSA
## 322 Sebring, FL MSA
## 85 Crestview-Fort Walton Beach-Destin, FL MSA
## 245 Naples-Immokalee-Marco Island, FL MSA
## 254 Ocala, FL MSA
## 193 Lakeland-Winter Haven, FL MSA
## 342 Tampa-St. Petersburg-Clearwater, FL MSA
## 157 Homosassa Springs, FL MSA
## 261 Orlando-Kissimmee-Sanford, FL MSA
## 265 Palm Bay-Melbourne-Titusville, FL MSA
## 22 Austin-Round Rock, TX MSA
## 196 Laredo, TX MSA
## 283 Punta Gorda, FL MSA
## 170 Jacksonville, FL MSA
## 266 Panama City, FL MSA
## 95 Deltona-Daytona Beach-Ormond Beach, FL MSA
## 198 Las Vegas-Henderson-Paradise, NV MSA
## 252 North Port-Sarasota-Bradenton, FL MSA
## 278 Port St. Lucie, FL MSA
## 289 Reno, NV MSA
## 130 Gainesville, FL MSA
## 341 Tallahassee, FL MSA
## 226 Miami-Fort Lauderdale-West Palm Beach, FL MSA
## 58 Carson City, NV MSA
## 55 Cape Coral-Fort Myers, FL MSA
## 73 Cleveland, TN MSA
## 174 Johnson City, TN MSA
## 246 Nashville-Davidson?\x9b\x83?\xaa\x83?\x9dMurfreesboro?\x9b\x83?\xaa\x83?\x9dFranklin, TN MSA
## 186 Knoxville, TN MSA
## 239 Morristown, TN MSA
## 344 Texarkana, TX-AR MSA
## 169 Jackson, TN MSA
## 148 Hammond, LA MSA
## 235 Monroe, LA MSA
## 191 Lake Charles, LA MSA
## 224 Memphis, TN-MS-AR MSA
## 27 Baton Rouge, LA MSA
## 189 Lafayette, LA MSA
## 7 Alexandria, LA MSA
## 67 Chattanooga, TN-GA MSA
## 184 Kingsport-Bristol-Bristol, TN-VA MSA
## 249 New Orleans-Metairie, LA MSA
## 325 Shreveport-Bossier City, LA MSA
## 120 Flagstaff, AZ MSA
## 286 Rapid City, SD MSA
## 72 Clarksville, TN-KY MSA
## 378 Yakima, WA MSA
## 37 Bismarck, ND MSA
## 279 Prescott, AZ MSA
## 32 Bellingham, WA MSA
## 331 Spokane-Spokane Valley, WA MSA
## 320 Seattle-Tacoma-Bellevue, WA MSA
## 46 Bremerton-Silverdale, WA MSA
## 159 Houma-Thibodaux, LA MSA
## 259 Olympia-Tumwater, WA MSA
## 271 Phoenix-Mesa-Scottsdale, AZ MSA
## 362 Walla Walla, WA MSA
## 182 Kennewick-Richland-Pasco, WA MSA
## 369 Wenatchee, WA MSA
## 192 Lake Havasu City-Kingman, AZ MSA
## 349 Tucson, AZ MSA
## 326 Sierra Vista-Douglas, AZ MSA
## 382 Yuma, AZ MSA
## 210 Longview, WA MSA
## 240 Mount Vernon-Anacortes, WA MSA
## 111 Enid, OK MSA
## 197 Las Cruces, NM MSA
## 350 Tulsa, OK MSA
## 258 Oklahoma City, OK MSA
## 200 Lawton, OK MSA
## 219 Manhattan, KS MSA
## 6 Albuquerque, NM MSA
## 168 Jackson, MS MSA
## 315 Santa Fe, NM MSA
## 339 Sumter, SC MSA
## 126 Fort Smith, AR-OK MSA
## 122 Florence, SC MSA
## 158 Hot Springs, AR MSA
## 199 Lawrence, KS MSA
## 371 Wichita, KS MSA
## 233 Mobile, AL MSA
## 116 Fargo, ND-MN MSA
## 64 Charleston-North Charleston, SC MSA
## 202 Lewiston, ID-WA MSA
## 347 Topeka, KS MSA
## 90 Daphne-Fairhope-Foley, AL MSA
## 218 Manchester-Nashua, NH MSA
## 119 Fayetteville-Springdale-Rogers, AR-MO MSA
## 14 Anniston-Oxford-Jacksonville, AL MSA
## 237 Montgomery, AL MSA
## 93 Decatur, AL MSA
## 153 Hattiesburg, MS MSA
## 207 Little Rock-North Little Rock-Conway, AR MSA
## 21 Augusta-Richmond County, GA-SC MSA
## 99 Dothan, AL MSA
## 272 Pine Bluff, AR MSA
## 123 Florence-Muscle Shoals, AL MSA
## 190 Lafayette-West Lafayette, IN MSA
## 363 Warner Robins, GA MSA
## 155 Hilton Head Island-Bluffton-Beaufort, SC MSA
## 40 Bloomington, IN MSA
## 146 Gulfport-Biloxi-Pascagoula, MS MSA
## 176 Jonesboro, AR MSA
## 187 Kokomo, IN MSA
## 243 Myrtle Beach-Conway-North Myrtle Beach, SC-NC MSA
## 117 Farmington, NM MSA
## 145 Greenville-Anderson-Mauldin, SC MSA
## 131 Gainesville, GA MSA
## 162 Huntsville, AL MSA
## 227 Michigan City-La Porte, IN MSA
## 241 Muncie, IN MSA
## 171 Jacksonville, NC MSA
## 79 Columbia, SC MSA
## 36 Birmingham-Hoover, AL MSA
## 80 Columbus, GA-AL MSA
## 351 Tuscaloosa, AL MSA
## 297 Rome, GA MSA
## 135 Grand Forks, ND-MN MSA
## 88 Dalton, GA MSA
## 163 Idaho Falls, ID MSA
## 215 Macon, GA MSA
## 156 Hinesville, GA MSA
## 114 Evansville, IN-KY MSA
## 318 Savannah, GA MSA
## 18 Atlanta-Sandy Springs-Roswell, GA MSA
## 355 Valdosta, GA MSA
## 108 Elkhart-Goshen, IN MSA
## 20 Auburn-Opelika, AL MSA
## 330 Spartanburg, SC MSA
## 81 Columbus, IN MSA
## 49 Brunswick, GA MSA
## 42 Boise City-Nampa, ID MSA
## 17 Athens-Clarke County, GA MSA
## 3 Albany, GA MSA
## 134 Goldsboro, NC MSA
## 164 Indianapolis-Carmel-Anderson, IN MSA
## 275 Pocatello, ID MSA
## 177 Joplin, MO MSA
## 129 Gadsden, AL MSA
## 51 Burlington, NC MSA
## 376 Winston-Salem, NC MSA
## 118 Fayetteville, NC MSA
## 56 Cape Girardeau, MO-IL MSA
## 143 Greensboro-High Point, NC MSA
## 343 Terre Haute, IN MSA
## 285 Raleigh, NC MSA
## 173 Jefferson City, MO MSA
## 16 Asheville, NC MSA
## 65 Charlotte-Concord-Gastonia, NC-SC MSA
## 154 Hickory-Lenoir-Morganton, NC MSA
## 127 Fort Wayne, IN MSA
## 334 Springfield, MO MSA
## 208 Logan, UT-ID MSA
## 302 St. Joseph, MO-KS MSA
## 103 Durham-Chapel Hill, NC MSA
## 247 New Bern, NC MSA
## 281 Provo-Orem, UT MSA
## 296 Rocky Mount, NC MSA
## 301 St. George, UT MSA
## 257 Ogden-Clearfield, UT MSA
## 181 Kansas City, MO-KS MSA
## 78 Columbia, MO MSA
## 75 Coeur d'Alene, ID MSA
## 307 Salt Lake City, UT MSA
## 374 Wilmington, NC MSA
## 115 Fairbanks, AK MSA
## 12 Anchorage, AK MSA
## 96 Denver-Aurora-Lakewood, CO MSA
## 144 Greenville, NC MSA
## 337 Staunton-Waynesboro, VA MSA
## 214 Lynchburg, VA MSA
## 327 Sioux City, IA-NE-SD MSA
## 329 South Bend-Mishawaka, IN-MI MSA
## 77 Colorado Springs, CO MSA
## 206 Lincoln, NE MSA
## 282 Pueblo, CO MSA
## 137 Grand Junction, CO MSA
## 303 St. Louis, MO-IL MSA
## 125 Fort Collins-Loveland, CO MSA
## 141 Greeley, CO MSA
## 151 Harrisonburg, VA MSA
## 290 Richmond, VA MSA
## 38 Blacksburg-Christiansburg-Radford, VA MSA
## 101 Dubuque, IA MSA
## 205 Lima, OH MSA
## 359 Virginia Beach-Norfolk-Newport News, VA-NC MSA
## 375 Winchester, VA-WV MSA
## 66 Charlottesville, VA MSA
## 54 Canton-Massillon, OH MSA
## 370 Wheeling, WV-OH MSA
## 44 Boulder, CO MSA
## 292 Roanoke, VA MSA
## 260 Omaha-Council Bluffs, NE-IA MSA
## 136 Grand Island, NE MSA
## 335 Springfield, OH MSA
## 365 Waterloo-Cedar Falls, IA MSA
## 380 Youngstown-Warren-Boardman, OH-PA MSA
## 60 Cedar Rapids, IA MSA
## 165 Iowa City, IA MSA
## 11 Ames, IA MSA
## 97 Des Moines-West Des Moines, IA MSA
## 91 Davenport-Moline-Rock Island, IA-IL MSA
## 9 Altoona, PA MSA
## 161 Huntington-Ashland, WV-KY-OH MSA
## 238 Morgantown, WV MSA
## 175 Johnstown, PA MSA
## 57 Carbondale-Marion, IL MSA
## 267 Parkersburg-Vienna, WV MSA
## 368 Weirton-Steubenville, WV-OH MSA
## 31 Beckley, WV MSA
## 194 Lancaster, PA MSA
## 61 Chambersburg-Waynesboro, PA MSA
## 228 Midland, MI MSA
## 71 Cincinnati-Middleton, OH-KY-IN MSA
## 63 Charleston, WV MSA
## 112 Erie, PA MSA
## 105 Eau Claire, WI MSA
## 94 Decatur, IL MSA
## 39 Bloomington, IL MSA
## 25 Bangor, ME MSA
## 269 Peoria, IL MSA
## 221 Mansfield, OH MSA
## 284 Racine, WI MSA
## 62 Champaign--Urbana, IL MSA
## 201 Lebanon, PA MSA
## 379 York-Hanover, PA MSA
## 92 Dayton, OH MSA
## 236 Monroe, MI MSA
## 142 Green Bay, WI MSA
## 179 Kalamazoo-Portage, MI MSA
## 295 Rockford, IL MSA
## 89 Danville, IL MSA
## 15 Appleton, WI MSA
## 13 Ann Arbor, MI MSA
## 336 State College, PA MSA
## 273 Pittsburgh, PA MSA
## 124 Fond du Lac, WI MSA
## 107 Elizabethtown-Fort Knox, KY MSA
## 172 Janesville-Beloit, WI MSA
## 69 Chicago-Naperville-Elgin, IL-IN-WI MSA
## 276 Portland-South Portland, ME MSA
## 251 Niles-Benton Harbor, MI MSA
## 332 Springfield, IL MSA
## 203 Lewiston-Auburn, ME MSA
## 180 Kankakee, IL MSA
## 299 Saginaw, MI MSA
## 59 Casper, WY MSA
## 138 Grand Rapids-Wyoming, MI MSA
## 2 Akron, OH MSA
## 367 Wausau, WI MSA
## 242 Muskegon, MI MSA
## 319 Scranton--Wilkes-Barre--Hazleton, PA MSA
## 98 Detroit-Warren-Dearborn, MI MSA
## 132 Gettysburg, PA MSA
## 121 Flint, MI MSA
## 41 Bloomsburg-Berwick, PA MSA
## 217 Madison, WI MSA
## 346 Toledo, OH MSA
## 167 Jackson, MI MSA
## 230 Milwaukee-Waukesha-West Allis, WI MSA
## 74 Cleveland-Elyria, OH MSA
## 323 Sheboygan, WI MSA
## 373 Williamsport, PA MSA
## 104 East Stroudsburg, PA MSA
## 52 Burlington-South Burlington, VT MSA
## 287 Reading, PA MSA
## 263 Owensboro, KY MSA
## 188 La Crosse-Onalaska, WI-MN MSA
## 29 Bay City, MI MSA
## 195 Lansing-East Lansing, MI MSA
## 8 Allentown-Bethlehem-Easton, PA-NJ MSA
## 68 Cheyenne, WY MSA
## 82 Columbus, OH MSA
## 150 Harrisburg-Carlisle, PA MSA
## 212 Louisville-Jefferson County, KY-IN MSA
## 140 Great Falls, MT MSA
## 232 Missoula, MT MSA
## 34 Billings, MT MSA
## 262 Oshkosh-Neenah, WI MSA
## 353 Urban Honolulu, HI MSA
## 28 Battle Creek, MI MSA
## 147 Hagerstown-Martinsburg, MD-WV MSA
## 86 Cumberland, MD-WV MSA
## 280 Providence-Warwick-Pawtucket, RI MSA
## 45 Bowling Green, KY MSA
## 270 Philadelphia-Camden-Wilmington, PA-NJ-DE-MD MSA
## 100 Dover, DE MSA
## 364 Washington-Arlington-Alexandria, DC-VA-MD-WV MSA
## 306 Salisbury, MD-DE MSA
## 178 Kahului-Wailuku-Lahaina, HI MSA
## 348 Trenton, NJ MSA
## 358 Vineland-Bridgeton, NJ MSA
## 204 Lexington-Fayette, KY MSA
## 53 California-Lexington Park, MD MSA
## 43 Boston-Cambridge-Quincy, MA-NH MSA
## 24 Baltimore-Columbia-Towson, MD MSA
## 102 Duluth, MN-WI MSA
## 47 Bridgeport-Stamford-Norwalk, CT MSA
## 220 Mankato-North Mankato, MN MSA
## 231 Minneapolis-St. Paul-Bloomington, MN-WI MSA
## 377 Worcester, MA-CT MSA
## 293 Rochester, MN MSA
## 300 St. Cloud, MN MSA
## 253 Norwich-New London, CT MSA
## 333 Springfield, MA MSA
## 26 Barnstable Town, MA MSA
## 152 Hartford-West Hartford-East Hartford, CT MSA
## 19 Atlantic City-Hammonton, NJ MSA
## 248 New Haven-Milford, CT MSA
## 274 Pittsfield, MA MSA
## 366 Watertown-Fort Drum, NY MSA
## 50 Buffalo-Cheektowaga-Niagara Falls, NY MSA
## 255 Ocean City, NJ MSA
## 314 Santa Cruz-Watsonville, CA MSA
## 109 Elmira, NY MSA
## 305 Salinas, CA MSA
## 356 Vallejo-Fairfield, CA MSA
## 354 Utica-Rome, NY MSA
## 216 Madera, CA MSA
## 70 Chico, CA MSA
## 5 Albany-Schenectady-Troy, NY MSA
## 294 Rochester, NY MSA
## 340 Syracuse, NY MSA
## 310 San Diego-Carlsbad, CA MSA
## 244 Napa, CA MSA
## 312 San Jose-Sunnyvale-Santa Clara, CA MSA
## 298 Sacramento--Roseville--Arden-Arcadee, CA MSA
## 149 Hanford-Corcoran, CA MSA
## 211 Los Angeles-Long Beach-Anaheim, CA MSA
## 288 Redding, CA MSA
## 313 San Luis Obispo-Paso Robles-Arroyo Grande, CA MSA
## 264 Oxnard-Thousand Oaks-Ventura, CA MSA
## 277 Portland-Vancouver-Hillsboro, OR-WA MSA
## 381 Yuba City, CA MSA
## 128 Fresno, CA MSA
## 317 Santa Rosa, CA MSA
## 338 Stockton-Lodi, CA MSA
## 291 Riverside-San Bernardino-Ontario, CA MSA
## 234 Modesto, CA MSA
## 311 San Francisco-Oakland-Fremont, CA MSA
## 35 Binghamton, NY MSA
## 360 Visalia-Porterville, CA MSA
## 316 Santa Maria-Santa Barbara, CA MSA
## 133 Glens Falls, NY MSA
## 166 Ithaca, NY MSA
## 225 Merced, CA MSA
## 185 Kingston, NY MSA
## 106 El Centro, CA MSA
## 23 Bakersfield, CA MSA
## 139 Grants Pass, OR MSA
## 223 Medford-Ashland, OR MSA
## 84 Corvallis, OR MSA
## 33 Bend-Redmond, OR MSA
## 304 Salem, OR MSA
## 113 Eugene, OR MSA
## 4 Albany, OR MSA
## 250 New York-Newark-Jersey City, NY-NJ-PA MSA
## MEFI_Score New_MEFI_Score ScoreM3 ScoreM4 ScoreM5 ScoreM6
## 352 8.305701 8.249855 8.8169675 9.605915 8.0295216 10.0000000
## 1 7.966254 7.882337 8.4378697 9.529896 7.9729599 9.9106501
## 308 8.094744 8.001471 8.5919262 9.524295 7.9402623 9.8958049
## 229 8.593456 8.277960 9.0462172 9.637316 7.9633258 9.8597869
## 183 7.970140 7.872188 8.4803114 9.603775 8.2020497 9.8255029
## 372 7.992037 7.933851 8.4783624 9.517987 8.0418599 9.8011050
## 10 7.939000 7.801282 8.3660239 9.465303 7.8898248 9.7916322
## 209 7.811227 7.722632 8.3821291 9.532117 8.0703701 9.7831672
## 361 7.794591 7.670908 8.4345047 9.579712 8.2086325 9.6706482
## 345 8.562153 8.462706 9.8408763 10.000000 9.1539059 9.6620731
## 30 7.839124 7.727138 8.4430064 9.458057 7.9946525 9.6250584
## 324 7.879510 7.774540 8.5280410 9.541875 8.1779413 9.6193289
## 268 8.285605 8.153410 9.2775364 9.801192 8.8446957 9.6098726
## 87 8.092567 7.891692 8.4661499 9.293119 7.5731458 9.6081200
## 76 7.715802 7.643581 8.4128431 9.568855 8.2544570 9.6043629
## 83 7.725414 7.629802 8.3421223 9.471140 8.0617954 9.5957705
## 213 7.565643 7.491084 7.8433627 9.407342 8.0163168 9.5935859
## 328 8.053074 7.727225 8.2856424 9.179330 7.4300388 9.5683316
## 48 7.098762 7.122448 7.6641970 9.528908 8.4494991 9.5644675
## 309 7.801722 7.649969 8.1699916 9.321718 7.7651678 9.5488043
## 357 7.552875 7.445567 8.1832518 9.595903 8.3898413 9.5440952
## 321 8.742002 8.595771 9.7995043 9.806301 8.7902206 9.5299947
## 160 8.089738 7.891201 8.4803587 9.230276 7.5043745 9.5255691
## 110 7.244049 7.239665 7.1327054 8.801946 6.9966193 9.5237929
## 256 7.517312 7.204155 7.9093748 9.467928 8.0224183 9.5127544
## 222 6.980315 6.996983 7.6109631 9.517907 8.4855844 9.4961099
## 322 7.818680 7.709877 8.9531221 9.829109 9.0967824 9.4745711
## 85 8.301959 8.153389 9.4771277 9.859433 9.0062463 9.4658703
## 245 8.806686 8.618350 10.0000000 9.889110 8.9727438 9.4272348
## 254 8.074176 7.956015 9.1047807 9.752038 8.9336395 9.4174787
## 193 7.723405 7.572496 8.4422712 9.497465 8.4837310 9.4163595
## 342 8.192180 8.029575 9.1738228 9.689031 8.7252372 9.4137653
## 157 8.212575 8.099092 9.5786658 9.955464 9.2929209 9.4132799
## 261 7.938190 7.782602 8.8478082 9.607937 8.6096418 9.3966189
## 265 8.161420 7.996476 9.1451228 9.684214 8.7511754 9.3958518
## 22 8.016775 7.729349 8.3377149 9.139422 7.3735593 9.3624559
## 196 6.978413 6.983385 7.5954426 9.334419 8.1538284 9.3582145
## 283 8.078356 7.934464 9.4155082 9.879077 9.1508928 9.3528419
## 170 8.155603 7.964928 8.5178575 9.106970 7.6501914 9.3226264
## 266 7.779503 7.649647 8.6344418 9.446215 8.4215793 9.3017655
## 95 7.915611 7.799790 8.8499748 9.568722 8.6480039 9.2966998
## 198 6.932798 7.250402 6.5924561 7.004919 4.6762689 9.2614451
## 252 8.228521 8.058899 9.2260800 9.606275 8.6459639 9.2527789
## 278 8.324451 8.166524 9.4499947 9.695085 8.8237023 9.2388088
## 289 7.718917 8.003773 7.1804757 6.977018 4.5624374 9.2289906
## 130 7.867612 7.754130 8.3518076 9.190605 7.9883037 9.2160281
## 341 7.587567 7.443676 7.3714542 8.604388 6.9077336 9.1988332
## 226 8.086403 7.933155 8.8697989 9.354494 8.2408754 9.1489837
## 58 7.596674 7.743518 7.1011238 6.982716 4.5785168 9.1143784
## 55 7.943821 7.776538 8.9697120 9.654496 8.9398723 9.0568215
## 73 7.905506 7.891662 8.7630618 9.760474 9.5691502 9.0080856
## 174 7.849410 7.868315 8.7211893 9.738767 9.5901045 8.9515518
## 246 8.066807 7.933665 8.6009103 9.429750 8.8300831 8.9324620
## 186 7.802294 7.762720 8.3079041 9.396805 8.8888512 8.9309932
## 239 7.810368 7.789507 8.6269128 9.731205 9.6086430 8.8927608
## 344 7.233473 7.128243 7.3414382 8.686876 8.0051134 8.3883335
## 169 6.607818 6.596313 6.5852696 9.168001 9.1269097 8.3106927
## 148 6.202778 6.184857 4.6284888 7.412008 6.2408859 8.1905450
## 235 6.646153 6.602501 5.2302040 7.374610 6.0545651 8.1694663
## 191 6.592190 6.508772 5.2335216 7.381058 6.0416081 8.1294076
## 224 7.253817 7.173124 6.5624798 7.870140 6.7034864 8.0824890
## 27 6.892378 6.794925 5.1397498 6.970307 5.3015936 7.9825214
## 189 6.951486 6.877424 5.5112711 7.292596 5.9896571 7.9296025
## 7 6.872442 6.810076 5.1546791 7.050042 5.5864223 7.8971416
## 67 7.523486 7.439364 7.5742006 8.717233 8.5516156 7.8483367
## 184 7.682901 7.639456 8.4796069 9.247439 9.7075533 7.8415829
## 249 7.048577 6.967499 5.3355304 6.931469 5.3040540 7.8268096
## 325 6.712994 6.667003 5.0871084 6.923638 5.3949493 7.7892870
## 120 7.365965 6.905044 7.3939824 8.719880 8.4489461 7.7673040
## 286 4.436836 4.183502 5.9437270 9.133106 9.0388655 7.7279668
## 72 7.408867 7.481892 7.3163546 8.154799 8.0198634 7.6641551
## 378 6.175702 6.270471 6.0979250 5.766091 4.0005145 7.6182303
## 37 7.562230 6.973945 7.1797683 8.134681 7.0742852 7.6139010
## 279 7.170851 6.709930 7.2372366 8.630278 8.4309287 7.6075521
## 32 6.369341 6.433701 6.4427167 5.747701 3.8722507 7.5978760
## 331 6.190919 6.276331 5.9948373 5.559240 3.5910603 7.5716426
## 320 6.600002 6.535707 6.4112918 5.617284 3.5203671 7.5432922
## 46 6.553676 6.606339 6.5366785 5.743180 3.8812094 7.5362462
## 159 6.632943 6.526133 5.1705159 7.193895 6.0952777 7.5217971
## 259 6.370216 6.420541 6.4281487 5.737548 3.9024469 7.5128622
## 271 7.303761 6.842840 7.0219247 8.340880 7.9019731 7.5054591
## 362 6.136793 6.189456 6.1810355 5.734746 3.9617059 7.4928324
## 182 5.802344 5.880739 5.5852480 5.747863 4.1102416 7.4582339
## 369 5.854285 5.949054 5.7269918 5.724270 4.0886102 7.3964208
## 192 6.965140 6.504219 7.1436970 8.611692 8.5888978 7.3934781
## 349 7.277831 6.816910 7.1276013 8.319624 7.9426663 7.3902996
## 326 7.067884 6.606963 7.1879376 8.612949 8.6198351 7.3415221
## 382 6.732408 6.271487 7.0367785 8.594513 8.6534238 7.2576126
## 210 5.821972 5.905045 5.6507910 5.663217 4.0849636 7.2339022
## 240 5.688774 5.741437 5.5801086 5.685091 4.1721874 7.1657518
## 111 7.546786 7.506764 7.1815118 7.816980 7.7336026 7.1528174
## 197 5.776511 5.828062 3.2210508 5.509174 3.7690958 7.0068152
## 350 7.723071 7.636266 7.3057655 7.708789 7.5543779 6.9899409
## 258 7.670083 7.578599 7.3263666 7.776253 7.6960748 6.9761112
## 200 7.156816 7.144865 6.5608304 7.689242 7.8170144 6.8452681
## 219 7.308970 7.453041 7.7732238 7.684109 8.0180153 6.8320093
## 6 6.154302 6.100590 3.6268447 5.442972 3.6317847 6.7764822
## 168 6.729378 6.672535 4.9281665 6.235111 5.1171696 6.6663133
## 315 6.263269 6.087920 3.7564132 5.431582 3.5995715 6.6398555
## 339 7.367744 7.222715 6.5759201 7.857361 8.0076245 6.6343326
## 126 6.782059 6.653091 6.3229574 7.695918 8.1309449 6.6306239
## 122 7.405011 7.276356 6.5988930 7.845038 7.9967128 6.6169820
## 158 6.744399 6.612255 6.2560518 7.689805 8.2326505 6.5903450
## 199 6.978300 7.089622 7.3035178 7.634358 8.1260055 6.5756102
## 371 7.364078 7.374815 7.3416386 7.270526 7.3573619 6.5138769
## 233 6.447193 6.506158 4.9682004 6.814923 6.7735799 6.5058715
## 116 7.150823 6.901942 6.5703450 7.026671 6.6731752 6.4887157
## 64 7.505090 7.289885 6.8160063 7.820303 7.9143512 6.4743693
## 202 6.990795 7.041929 6.7922527 6.712772 6.4759589 6.4410444
## 347 7.081999 7.071684 7.4895940 7.630584 8.1245894 6.4391652
## 90 6.763181 6.880626 5.3177168 6.899996 6.9973993 6.4275398
## 218 8.087754 8.099079 9.2285924 7.551349 7.4900963 6.4197659
## 119 7.321780 7.135468 6.7585882 7.544553 7.9597287 6.3834523
## 14 6.149303 6.280783 4.5515319 6.837688 7.0453079 6.3806441
## 237 6.609036 6.707768 4.6628547 6.518859 6.3109570 6.3720678
## 93 6.507599 6.625044 5.0790707 6.883238 7.0470711 6.3619843
## 153 5.907741 5.932769 3.9514419 6.168383 5.4249319 6.3417187
## 207 6.853000 6.674072 6.2764146 7.508870 7.9910841 6.3325023
## 21 7.620578 7.485244 7.3357569 7.901038 8.4647382 6.3050560
## 99 6.245529 6.386366 4.6039622 6.890635 7.1989868 6.3037954
## 272 6.396622 6.231729 5.8086555 7.439469 7.9955480 6.2896591
## 123 6.422316 6.549117 4.6898706 6.648297 6.6775398 6.2887631
## 190 7.298110 7.420080 7.6836085 7.628549 8.5286839 6.2877655
## 363 7.589523 7.477140 7.6067833 8.047294 8.9154907 6.2760875
## 155 7.611129 7.438030 7.0165162 7.740170 7.8958852 6.2751267
## 40 7.311130 7.468188 7.8526434 7.848324 8.9822729 6.2746570
## 146 5.948761 5.919988 4.0957921 6.147233 5.3579653 6.2685553
## 176 6.558954 6.410435 6.0238806 7.552482 8.2157434 6.2554072
## 187 7.260012 7.421748 7.8715052 7.851516 9.0157854 6.2478443
## 243 7.187469 7.068171 6.7734289 7.865137 8.3804418 6.2373448
## 117 5.433575 5.450039 2.5864824 5.301303 3.9311783 6.2210427
## 145 7.326075 7.110870 6.3533979 7.741203 8.0225432 6.2140559
## 131 7.680129 7.516284 7.5690924 7.863066 8.5515523 6.1844354
## 162 6.540676 6.601981 4.8722839 6.856287 7.0801753 6.1837076
## 227 7.097847 7.250226 7.6668320 7.803213 9.0117980 6.1737328
## 241 6.837356 7.024823 7.1105278 7.802525 9.1227184 6.1390673
## 171 7.622307 7.529634 7.5743742 8.093371 9.1995898 6.1389076
## 79 7.253703 7.040838 6.4990917 7.725889 8.0261437 6.1231009
## 36 6.755798 6.784354 4.9119487 6.314314 6.0791974 6.1118523
## 80 7.378606 7.347126 6.8249803 7.399779 7.8773329 6.1024174
## 351 6.365196 6.459249 4.5494831 6.510758 6.5389764 6.0907900
## 297 7.299187 7.186804 7.3711785 7.992613 8.9698615 6.0736940
## 135 6.823402 6.689542 6.1491889 6.670340 6.5205213 6.0721817
## 88 7.218244 7.080130 6.9167878 7.776445 8.5828342 6.0556832
## 163 7.576593 7.512783 7.2919384 7.481487 8.1068057 6.0475114
## 215 7.623472 7.529802 6.9504608 7.296237 7.6367220 6.0322597
## 156 7.392166 7.291479 7.3811751 7.969358 8.9834295 6.0309752
## 114 7.269004 7.355961 7.1549767 7.170887 7.9020268 6.0101486
## 318 7.326483 7.211761 6.9404427 7.570526 8.1669031 5.9997753
## 18 7.790038 7.595783 7.3881075 7.520347 7.9715828 5.9802230
## 355 7.061460 6.998200 6.9753632 7.963204 9.0877388 5.9650420
## 108 7.489145 7.384214 7.9611881 7.788262 8.9501290 5.9567501
## 20 6.279730 6.425245 4.8456716 6.730852 7.1723844 5.9533507
## 330 6.955649 6.703018 5.9297810 7.693565 8.1512370 5.9524764
## 81 7.070936 7.043198 7.3045958 7.815205 9.0845229 5.9433397
## 49 7.083883 7.015945 6.7605199 7.708234 8.5665420 5.9429261
## 42 7.604201 7.460859 7.4032567 7.462121 8.0607678 5.9344076
## 17 7.361497 7.244435 7.0250133 7.682792 8.4698337 5.9116917
## 3 7.033448 6.956153 6.4824464 7.390737 7.9977068 5.9088359
## 134 7.268940 7.092057 7.0748958 8.058615 9.3394948 5.8799805
## 164 7.412696 7.471508 7.7317115 7.684106 9.0199120 5.8764163
## 275 7.299106 7.254010 7.1052396 7.387061 8.0771879 5.8698030
## 177 7.127078 7.240190 6.5412970 6.420371 6.5951084 5.8690828
## 129 6.568715 6.730604 5.1116629 6.627358 7.1549715 5.8610638
## 51 7.373712 7.236595 7.2664047 8.026170 9.2635386 5.8584959
## 376 7.499331 7.313091 7.3367198 7.978903 9.1263619 5.8481215
## 118 7.187639 7.045844 7.2358803 8.046794 9.3661660 5.8427328
## 56 7.211811 7.364237 6.7282146 6.363701 6.5406310 5.8340497
## 143 7.432923 7.251362 7.2873982 8.020783 9.2234569 5.8198950
## 343 7.013298 7.196087 7.5083777 7.627970 9.1019498 5.8155224
## 285 7.678090 7.428693 7.5727862 8.031737 9.1700172 5.7884839
## 173 7.417551 7.465166 6.9380106 6.474234 6.6372358 5.7866799
## 16 7.279983 7.037603 7.0226809 8.028909 9.2751311 5.7518589
## 65 7.281840 7.041293 6.8922328 7.944825 9.0456541 5.7463026
## 154 7.164166 6.954535 6.8947315 8.019578 9.3429651 5.7305127
## 127 7.402672 7.501250 7.8481194 7.577777 9.0216872 5.7249897
## 334 7.099889 7.203644 6.3667049 6.268031 6.3942452 5.7124761
## 208 7.103819 6.943920 6.6524872 7.811368 9.0940865 5.7049456
## 302 6.939632 6.988984 6.5052197 6.485760 6.7774245 5.7011389
## 103 7.734209 7.356157 7.6452778 8.030337 9.1361785 5.6859720
## 247 7.043723 6.901928 6.6299664 8.006268 9.4424431 5.6842461
## 281 7.128484 6.902427 6.6742379 7.851727 9.1528010 5.6514063
## 296 6.943090 6.805973 6.5962407 7.774894 9.0328425 5.6381851
## 301 6.958899 6.760912 6.5552383 7.843058 9.1733913 5.6350948
## 257 7.222522 6.982430 6.7904879 7.840845 9.1443543 5.6010470
## 181 7.089413 7.056098 6.3997235 6.308019 6.4303556 5.5901863
## 78 6.939395 7.029115 6.0227356 6.249604 6.4411874 5.5812969
## 75 7.008933 6.863252 6.6832695 7.400160 8.3072065 5.5793057
## 307 7.229756 6.858669 6.7189436 7.806189 9.0180083 5.5083162
## 374 6.714652 6.556483 6.4247127 7.969653 9.4742189 5.5002658
## 115 6.633907 6.675872 4.1100068 2.960524 0.0000000 5.4201213
## 12 6.553053 6.683907 4.0481406 2.942521 0.0311488 5.4156198
## 96 7.647274 7.515615 6.8807179 5.998371 5.9676367 5.3718361
## 144 6.551182 6.388334 5.9808691 7.938537 9.5766848 5.3370609
## 337 7.806334 7.664884 8.3143315 8.088411 9.8620346 5.3238177
## 214 7.663058 7.584765 8.2172133 8.070524 9.8881387 5.3197747
## 327 6.832335 6.701831 6.5370740 6.951669 7.7561176 5.3196990
## 329 7.133708 7.276348 7.3647611 6.964655 8.2953864 5.3138410
## 77 7.695630 7.659878 7.0403064 6.036438 6.1752632 5.2948309
## 206 7.328316 7.219256 8.3871476 7.868629 9.6248002 5.2891032
## 282 7.354399 7.349056 6.7838956 6.039713 6.2921534 5.2872941
## 137 7.590941 7.562206 6.9653803 6.031327 6.2137224 5.2710119
## 303 7.469524 7.568053 6.4246223 5.476601 5.3282074 5.2601653
## 125 7.740603 7.674441 7.0735867 6.035463 6.1751383 5.2498542
## 141 7.584194 7.515693 6.9544434 6.031790 6.2284518 5.2173906
## 151 7.603876 7.443712 8.0872629 8.059861 9.8848080 5.2116438
## 290 7.970823 7.780250 8.3584158 7.945264 9.5957062 5.1701606
## 38 7.511367 7.407343 7.9158041 8.022605 9.9059102 5.1654722
## 101 6.996048 6.953533 6.3852160 6.487641 7.1214716 5.1553691
## 205 6.153852 6.440600 3.5601990 3.461709 1.8863935 5.1285007
## 359 7.485843 7.354490 7.5632718 7.584211 9.0618401 5.0848347
## 375 7.406812 7.246985 7.5403349 7.514078 9.0401519 5.0532865
## 66 8.071121 7.761250 8.3693120 7.858686 9.4099244 5.0527513
## 54 6.279347 6.573113 3.7277832 3.411463 1.8846487 5.0417703
## 370 5.888629 6.095377 3.4702107 3.964973 2.8487492 5.0413470
## 44 7.267555 7.032972 6.6661086 5.937655 5.9856667 5.0398711
## 292 7.496248 7.350120 7.5807317 7.586647 9.0950778 5.0327585
## 260 7.137609 6.972976 6.9409512 6.640754 7.4315786 5.0005367
## 136 6.775247 6.668526 7.8743419 7.910797 10.0000000 4.9592842
## 335 6.032381 6.288720 3.4438220 3.403177 1.9797969 4.9521033
## 365 6.638892 6.572986 5.9690110 6.433178 7.2329672 4.9254877
## 380 6.125149 6.431378 3.6579093 3.515931 2.2051582 4.9232910
## 60 6.849836 6.734807 6.2217612 6.438713 7.2043524 4.9110006
## 165 7.167849 7.050481 6.5678658 6.394216 7.0876167 4.8842461
## 11 6.623880 6.562652 5.8906743 6.398113 7.2397634 4.8416761
## 97 6.987147 6.792586 6.3662965 6.425930 7.0991926 4.8376531
## 91 6.588592 6.779391 5.5519841 4.899369 4.8921813 4.6525374
## 9 6.753290 7.126639 5.2157706 4.609996 4.6422474 4.6288642
## 161 5.864151 6.028671 3.5415614 4.341300 4.0541810 4.6130217
## 238 6.062568 6.188066 3.8891173 4.616148 4.5607769 4.6016232
## 175 6.433054 6.813420 4.8000112 4.462383 4.3812541 4.5917756
## 57 6.370872 6.779001 5.0753784 3.806258 3.3084878 4.5328257
## 267 5.804791 5.960698 3.5256607 4.538188 4.5501431 4.5295198
## 368 5.436184 5.668967 2.7690621 3.759779 3.0530644 4.5258224
## 31 5.673704 5.831950 3.5525712 4.637828 4.7640411 4.5207050
## 194 6.870730 7.096710 5.7605079 4.893648 5.0780736 4.4922644
## 61 6.861595 7.099271 5.6931397 4.836213 5.0540848 4.4823218
## 228 6.764919 7.162695 5.8402333 4.234621 4.1519380 4.4807809
## 71 6.348105 6.549410 3.6303482 3.288315 2.1043235 4.4523024
## 63 5.784584 5.919438 3.6799828 4.638668 4.7442197 4.4507526
## 112 6.493901 6.855554 4.9517294 4.484904 4.4950472 4.4480001
## 105 6.909050 6.950145 6.0892990 5.678907 6.4283596 4.4106078
## 94 6.390476 6.782231 4.8106513 3.520721 2.8102860 4.4040322
## 39 6.373566 6.741930 5.1847955 3.739309 3.1783103 4.4013934
## 25 6.418001 6.532698 6.2100081 5.606810 6.5332312 4.3910126
## 269 6.436965 6.812346 5.0940205 3.644803 3.0377855 4.3814898
## 221 5.834736 6.114466 3.2011703 3.145327 2.1032724 4.3715567
## 284 6.961440 7.037623 6.1575318 5.655445 6.4301891 4.3663876
## 62 6.411556 6.782259 5.2111830 3.766975 3.2766497 4.3639398
## 201 6.634026 6.909129 5.0054294 4.281248 4.0964957 4.3559906
## 379 6.700844 6.945538 5.5943175 4.773165 4.9845570 4.3378494
## 92 6.014905 6.264226 3.4352963 3.132095 2.0189006 4.3143720
## 236 6.313106 6.724917 5.2463908 4.015816 3.9469518 4.3128962
## 142 6.975720 6.979388 6.1246366 5.658026 6.4280083 4.3039610
## 179 6.378409 6.776185 5.5287169 4.216862 4.2997830 4.2971578
## 295 6.301404 6.669768 4.9913485 3.653655 3.1194087 4.2929270
## 89 6.162570 6.554326 4.7029267 3.616943 3.1526350 4.2908373
## 15 6.962810 6.910338 6.0658728 5.673085 6.4523067 4.2824457
## 13 6.661356 6.995974 5.6943398 4.083631 3.9349730 4.2778641
## 336 6.856906 7.108617 5.6501095 4.690089 4.9919818 4.2737694
## 273 6.660654 6.912365 5.0908659 4.420074 4.4470306 4.2704640
## 124 6.790153 6.770430 5.9921424 5.660443 6.4859676 4.2487648
## 107 6.005776 6.180997 3.5923427 4.431232 4.5754459 4.2475426
## 172 6.595526 6.676388 5.8104973 5.637073 6.5425943 4.2445721
## 69 6.033901 6.306980 3.7886397 2.800542 1.4028252 4.2425351
## 276 6.642859 6.647614 6.4676867 5.580557 6.4475056 4.2215070
## 251 6.273036 6.680169 5.3934490 4.164334 4.2779311 4.2063089
## 332 6.114006 6.477691 4.4872491 3.526539 2.9776560 4.1920517
## 203 6.232408 6.314356 6.1156386 5.558025 6.5720267 4.1742119
## 180 6.245016 6.622736 4.9953208 3.659177 3.2493517 4.1708595
## 299 6.169910 6.584061 5.2558901 4.278542 4.6977212 4.1562480
## 59 6.780837 6.611406 5.4793355 5.692803 6.5630409 4.1493806
## 138 6.563286 6.918957 5.6162788 4.164462 4.3687337 4.1294922
## 2 6.131943 6.388282 3.4977074 3.026825 2.0282229 4.1291036
## 367 6.691408 6.613206 5.7349201 5.644817 6.5152682 4.1255912
## 242 6.129050 6.526826 5.5925715 4.546919 5.2105379 4.1235425
## 319 6.553293 6.823717 5.2016255 4.481763 4.7340399 4.1109566
## 98 6.416473 6.795535 4.8928172 3.555662 3.2061933 4.1074584
## 132 6.521408 6.817563 5.5529575 4.889159 5.5989130 4.1026340
## 121 5.981201 6.397691 4.8542080 4.087924 4.3604429 4.0971788
## 41 6.731552 6.941158 5.5591113 4.700236 5.1792512 4.0913880
## 217 6.953194 6.793120 6.1597462 5.639513 6.3952176 4.0884173
## 346 5.970134 6.240507 3.3122673 3.006678 2.0798870 4.0777149
## 167 6.063030 6.470163 4.8926169 3.891340 4.0059775 4.0455654
## 230 6.647278 6.615859 4.5909892 4.432448 4.2469775 4.0414070
## 74 5.920469 6.153415 3.3035865 3.025413 2.0824201 4.0364827
## 323 6.493014 6.520074 5.3825771 5.597347 6.6006490 4.0239890
## 373 6.435623 6.736457 4.9087073 4.309715 4.6007551 3.9799721
## 104 6.189913 6.532853 5.5636215 4.794580 5.3264267 3.9780580
## 52 6.323417 6.011487 6.2264729 6.145862 7.5135654 3.9185630
## 287 6.376678 6.602659 5.0731416 4.444214 4.7460552 3.9174554
## 263 5.951527 6.180549 3.5042232 4.208220 4.5335154 3.9124766
## 188 6.524156 6.608377 5.5620963 5.343989 6.3548842 3.9115640
## 29 5.620588 6.053451 3.7991323 3.182879 2.7434421 3.9031089
## 195 6.047758 6.443195 5.0691436 4.002109 4.3085200 3.8920976
## 8 6.394809 6.626505 4.9968687 4.276104 4.4601088 3.8834456
## 68 6.043216 5.925247 4.5591120 5.635332 6.8139760 3.8709157
## 82 6.013636 6.216173 3.3336313 2.928741 2.0723223 3.8523811
## 150 6.615699 6.799574 5.4043686 4.585619 5.0933583 3.8515019
## 212 6.552958 6.673021 4.7758563 4.744686 5.5417924 3.8028758
## 140 6.909168 7.120665 5.8078097 4.423592 4.6749947 3.7835884
## 232 6.982083 7.219312 5.9211725 4.401991 4.6192347 3.7698370
## 34 7.001037 7.128324 5.8727983 4.414581 4.6292544 3.6972254
## 262 6.166611 6.079051 4.8091998 5.530570 6.6672323 3.6785285
## 353 5.525477 6.119629 4.6245096 2.839308 3.2326860 3.6771544
## 28 5.990731 6.372133 5.0975836 4.007823 4.5701759 3.6617035
## 147 6.305562 6.404367 5.3264324 5.231515 6.8010173 3.5516532
## 86 6.318744 6.445084 5.7103646 5.531781 7.4543171 3.5007612
## 280 5.914227 6.028910 3.7142454 2.471361 1.5330905 3.3541289
## 45 6.046448 6.252078 3.6126087 3.937622 4.6257706 3.3320351
## 270 6.483800 6.656987 4.8901355 3.870022 4.2569004 3.3204851
## 100 6.434728 6.548083 5.7200978 5.392932 6.9733289 3.2274238
## 364 6.952040 6.570023 6.7608863 6.143913 8.1817593 3.1242445
## 306 6.531438 6.646768 6.2719130 5.550881 7.5413022 3.0760717
## 178 4.898941 5.394848 3.6050417 2.650884 3.3002051 3.0618062
## 348 6.413820 6.612714 5.1402711 3.030706 3.0456560 2.8636523
## 358 5.501018 5.915117 3.9891298 2.992980 3.3703000 2.8184642
## 204 6.065324 6.238206 3.1997544 3.273890 3.8186374 2.8042571
## 53 6.965968 6.967988 6.7865624 5.535530 8.1736413 2.6634737
## 43 7.025329 6.639788 6.4497437 4.576952 5.8078196 2.4886419
## 24 6.674288 6.723092 5.7440133 4.663361 6.7170698 2.3971520
## 102 5.555191 5.765296 4.5159299 3.909988 5.7311198 2.3179027
## 47 6.732983 6.853189 5.7412555 2.971331 3.7285144 2.2686723
## 220 5.783158 6.058402 4.8455430 3.643901 5.4650936 2.2015978
## 231 6.227190 6.321124 5.2113101 3.636817 5.2200840 2.1781667
## 377 6.551832 6.300919 6.1228585 4.418325 6.1022064 2.1732654
## 293 5.915336 6.080638 4.8708685 3.653822 5.4345342 2.1300423
## 300 5.732325 5.921019 4.3765380 3.599304 5.4150460 2.1030482
## 253 6.130708 6.402960 5.3185761 2.928461 3.9451288 2.0936459
## 333 6.324013 6.066677 5.5156219 4.202382 5.7418076 2.0909381
## 26 6.551317 6.279947 5.8465786 4.056513 5.4161552 1.9649097
## 152 6.228898 6.342086 5.1287271 2.585068 3.2486775 1.9499337
## 19 5.178263 5.620433 4.7638335 2.670168 3.2122434 1.8301616
## 248 5.902484 6.053099 4.5808071 2.293074 2.8602528 1.8004857
## 274 5.993028 5.766102 3.5218273 2.291376 2.3003808 1.7658176
## 366 5.256052 5.904005 4.5258761 2.444300 4.2702602 1.7563884
## 50 5.354570 5.983810 4.5095790 2.401527 4.1883094 1.6782098
## 255 5.054998 5.436349 4.4760573 2.649806 3.2815095 1.6563184
## 314 6.075319 6.118485 4.3938427 2.817650 4.1448534 1.6121760
## 109 5.209421 5.862053 4.5750702 2.391547 4.2897348 1.5860154
## 305 5.535437 5.632404 3.6602796 2.800438 4.2881581 1.5503930
## 356 5.763455 5.818317 4.1256131 2.792160 4.2162666 1.5311621
## 354 5.142093 5.783028 4.4485889 2.367013 4.2895406 1.5123095
## 216 5.330176 5.438840 3.2517146 2.757708 4.3284794 1.5107253
## 70 5.383730 5.522803 3.2001213 2.742834 4.3156069 1.4974701
## 5 5.708030 6.215632 5.0911949 2.367015 4.0989304 1.4681757
## 294 5.359463 5.934902 4.6547762 2.358059 4.2099975 1.4551039
## 340 5.335126 5.940974 4.6296657 2.349033 4.2253136 1.4480810
## 310 5.759855 5.796004 3.1645807 1.816586 2.3569623 1.4445262
## 244 6.089994 6.074681 4.6608339 2.778606 4.0711360 1.4416432
## 312 6.369464 6.197426 4.4925447 2.464412 3.3664594 1.4158327
## 298 5.670024 5.720208 3.1471325 2.097176 2.9691259 1.4058025
## 149 5.206877 5.315541 2.9720446 2.724192 4.3780575 1.4008426
## 211 5.512085 5.583322 2.4884141 1.452078 1.7511868 1.3907372
## 288 5.354283 5.472303 3.1902065 2.724008 4.3429057 1.3839613
## 313 5.760169 5.836084 3.4458080 1.899691 2.5640092 1.3627219
## 264 5.830031 5.905946 3.3457166 1.863484 2.5390511 1.3456130
## 277 5.904558 5.889136 3.7767637 2.012999 2.5507809 1.3228021
## 381 5.201145 5.298113 3.1000838 2.720491 4.4161568 1.3072745
## 128 5.060974 5.202385 2.1220924 1.434632 1.9408548 1.2996139
## 317 5.718838 5.703525 3.1924030 1.838259 2.4605175 1.2977098
## 338 5.027410 5.131395 1.9726887 1.654859 2.3551283 1.2964070
## 291 4.925027 5.031352 2.3621631 2.224550 3.4989965 1.2555543
## 234 5.109908 5.223250 2.3500509 1.644438 2.3801617 1.2493026
## 311 6.007986 5.826591 3.2672639 1.734234 2.1485884 1.2152527
## 35 4.676305 5.328936 3.4742813 2.248841 4.3474377 1.2148004
## 360 4.550569 4.696659 1.3062610 1.858782 3.0109760 1.1578584
## 316 5.362190 5.424070 2.7211042 1.725531 2.4695509 1.1533124
## 133 5.019526 5.594965 4.6668602 2.286413 4.2882560 1.1504010
## 166 5.181647 5.679893 4.7412284 2.267447 4.1925934 1.1005246
## 225 4.597295 4.724671 1.2511273 1.532265 2.4488534 1.0244122
## 185 5.056695 5.627455 4.7060735 2.185087 4.2196415 0.8832531
## 106 3.819789 4.000967 0.0000000 1.615822 2.9718686 0.7773095
## 23 4.201715 4.345466 0.4509558 1.229730 2.1180902 0.6796507
## 139 5.763283 5.893996 3.3057397 1.593385 2.7790038 0.5290082
## 223 5.875402 5.975706 3.4300045 1.541953 2.6360606 0.4695739
## 84 5.981392 6.053625 3.6773525 1.568544 2.6552389 0.4523523
## 33 5.966449 6.048040 3.6095699 1.540800 2.6562329 0.3475306
## 304 5.622567 5.704158 3.1463211 1.512940 2.7315471 0.3218229
## 113 5.624758 5.713366 3.1293772 1.492802 2.7169344 0.2631538
## 4 5.490305 5.578913 3.1069057 1.535527 2.8536597 0.2396920
## 250 5.176069 5.751756 2.7523837 0.000000 0.8922214 0.0000000
## ScoreM7 ScoreM8 RankOriginal RankNewScore RankM3 RankM4 RankM5 RankM6
## 352 9.1441598 7.0131427 6 5 20 20 110 1
## 1 8.8086323 6.8303646 28 25 36 28 125 2
## 308 8.9512090 6.9233040 14 14 26 30 130 3
## 229 9.3556066 7.0864254 3 4 14 17 127 4
## 183 8.9842125 7.1669431 27 26 30 21 94 5
## 372 9.0262323 7.1396728 25 19 32 31 109 6
## 10 8.7441320 6.8455688 30 28 42 36 135 7
## 209 8.7486679 6.8179200 39 42 40 27 106 8
## 361 8.8701502 6.9161581 44 45 37 23 93 9
## 345 9.7785055 6.8147767 4 3 2 1 26 10
## 30 8.8650614 6.9742023 37 41 34 37 122 11
## 324 9.0406773 7.1637326 34 34 27 26 96 12
## 268 9.3092118 6.6703209 8 7 8 8 60 13
## 87 8.8802863 6.9634512 15 22 33 45 144 14
## 76 8.9161890 6.9361614 53 50 38 24 89 15
## 83 8.7526736 6.8850339 49 53 45 34 107 16
## 213 8.5729230 6.9842903 73 68 61 40 116 17
## 328 8.5132090 6.5697398 23 40 49 49 152 18
## 48 8.4690362 7.1052608 139 124 66 29 81 19
## 309 8.7200304 6.9945142 43 48 52 44 138 20
## 357 8.6267114 6.9066829 75 77 51 22 85 21
## 321 9.7621049 6.9011298 2 2 3 7 63 22
## 160 8.9349836 7.0859304 16 24 29 47 148 23
## 110 7.9408403 6.9559731 120 107 107 54 169 24
## 256 8.2678645 6.5433957 78 113 56 35 113 25
## 222 8.4336823 7.1216772 158 149 68 32 77 26
## 322 9.0632988 6.6727335 38 43 16 6 35 27
## 85 9.4046922 6.6558365 7 8 5 5 50 28
## 245 10.0000000 7.0562004 1 1 1 3 53 29
## 254 9.3726013 6.9716059 20 17 13 10 57 30
## 193 8.6397743 6.5602027 50 57 35 33 78 31
## 342 9.2436396 6.6875029 11 12 11 14 65 32
## 157 9.7059724 7.0337659 10 9 4 2 19 33
## 261 8.8786588 6.3993400 31 31 19 18 70 34
## 265 9.2496282 6.7987464 12 15 12 15 64 35
## 22 8.8290644 7.0497630 24 39 46 51 153 36
## 196 8.2469592 6.8898861 159 152 70 43 99 37
## 283 9.3804264 6.6842946 19 18 7 4 28 38
## 170 8.8084457 6.7409647 13 16 28 53 142 39
## 266 8.6980857 6.5257174 46 49 23 38 84 40
## 95 9.0170559 6.7152898 32 29 18 25 67 41
## 198 6.2753036 5.3537773 174 102 150 158 251 42
## 252 9.2978834 6.8203525 9 11 10 19 68 43
## 278 9.5727565 7.0215598 5 6 6 13 62 44
## 289 7.3704729 6.5272527 52 13 103 160 261 45
## 130 8.7937961 6.8633837 35 37 44 48 124 46
## 341 7.9608432 6.6206097 70 79 84 61 171 47
## 226 9.0534226 6.7704879 18 21 17 42 90 48
## 58 7.2608264 6.4408198 67 38 111 159 258 49
## 55 9.2472918 7.0258416 29 33 15 16 56 50
## 73 8.7855400 6.0366010 33 23 21 9 12 51
## 174 8.7707794 6.0791320 36 27 22 11 10 52
## 246 8.4362479 5.7152423 22 20 25 39 61 53
## 186 8.2920473 5.7705586 42 35 48 41 59 54
## 239 8.8182161 6.2101194 40 30 24 12 8 55
## 344 7.0681650 5.1289176 121 122 88 57 118 56
## 169 7.3246795 5.9641268 217 241 151 50 31 57
## 148 3.4985830 2.2153860 277 300 296 144 206 58
## 235 4.0747576 2.4861705 208 239 244 150 214 59
## 191 3.8486815 2.1497637 220 258 243 149 215 60
## 224 6.6394232 5.1421305 118 117 155 92 177 61
## 27 4.2106346 2.8306027 177 190 253 161 235 62
## 189 4.7210819 3.2170347 170 175 232 152 216 63
## 7 4.5163977 3.3384713 178 186 251 156 223 64
## 67 7.6104022 5.4207483 77 80 73 56 74 65
## 184 7.9638451 5.0415181 55 51 31 46 6 66
## 249 4.6034986 3.2356276 147 158 237 164 234 67
## 325 4.2617098 3.0458375 201 221 260 165 230 68
## 120 6.4526487 3.7755058 97 170 81 55 82 69
## 286 5.6712245 5.1901857 380 381 199 52 43 70
## 72 6.8466159 4.5448344 89 70 92 65 114 71
## 378 6.1601032 6.3545407 280 289 190 206 304 72
## 37 7.2299114 5.6510938 74 155 104 66 165 73
## 279 6.4694952 4.0466140 128 207 99 58 83 74
## 32 6.2629974 6.1751292 259 267 166 208 312 75
## 331 6.0034130 6.2277196 278 287 195 229 318 76
## 320 6.1270775 6.0337989 218 252 170 225 319 77
## 46 6.4995814 6.4684341 225 237 160 209 311 78
## 159 4.6770505 3.5270493 213 256 250 154 212 79
## 259 6.3018019 6.2819480 257 270 167 210 310 80
## 271 6.4754538 4.2230129 107 180 118 63 133 81
## 362 6.0886931 6.1970506 287 298 183 211 305 82
## 182 5.7225071 6.1054144 329 339 223 207 296 83
## 369 5.7746002 6.1216149 324 325 213 212 299 84
## 192 6.6712668 4.4721116 163 260 106 60 71 85
## 349 6.6224942 4.4398326 113 183 108 64 129 86
## 326 6.6836655 4.5679586 145 236 101 59 69 87
## 382 6.3904047 4.2290271 197 288 115 62 66 88
## 210 6.0144591 6.5094943 327 335 218 217 300 89
## 240 5.7063387 6.2787953 340 351 224 214 293 90
## 111 6.3187421 3.9568577 76 66 102 105 140 91
## 197 2.8220583 2.9689502 332 343 357 235 314 92
## 350 6.5451872 4.1994852 51 52 93 118 145 93
## 258 6.5888357 4.1904654 58 56 91 113 141 94
## 200 6.1781121 4.4031814 131 119 156 123 137 95
## 219 6.2314936 3.5353983 106 76 62 124 115 96
## 6 3.3864995 3.4022630 284 307 332 237 316 97
## 168 4.2556147 3.1630486 199 218 271 194 239 98
## 315 3.5321862 3.5317899 271 309 324 238 317 99
## 339 6.3936790 4.2775612 96 109 152 97 117 100
## 126 4.9207663 2.4715593 188 223 176 120 101 101
## 122 6.4635818 4.3477780 91 98 148 101 120 102
## 158 4.4007995 1.5980759 195 233 179 122 91 103
## 199 6.0565199 3.6609281 160 131 95 127 102 104
## 371 6.2403763 4.0811052 98 87 87 153 154 105
## 233 4.5673898 3.0554280 240 259 269 171 175 106
## 116 5.8273435 4.0674952 132 172 153 157 179 107
## 64 6.4691303 4.1541288 80 97 132 104 131 108
## 202 6.0397334 4.4530440 155 139 133 173 193 109
## 347 6.1984050 3.8490506 143 133 79 128 103 110
## 90 5.1712328 3.6665732 192 174 240 166 168 111
## 218 9.8660913 8.1890414 17 10 9 136 149 112
## 119 5.0627386 2.2494662 104 120 140 137 128 113
## 14 4.4673922 3.3425657 286 285 303 170 167 114
## 237 4.7710831 3.6071067 216 208 292 178 204 115
## 93 5.0059458 3.5931817 236 228 261 168 166 116
## 153 3.6248481 3.2040730 318 328 319 195 227 117
## 207 4.6050286 1.9592582 182 216 177 140 123 118
## 21 6.4646439 3.7429901 63 69 90 91 80 119
## 99 4.7053448 3.6214229 272 277 298 167 158 120
## 272 4.2131831 1.8699066 250 294 207 143 121 121
## 123 4.8569071 3.7534662 245 250 290 175 178 122
## 190 6.4664830 3.6121448 110 84 64 129 76 123
## 363 6.2389351 3.1815988 69 71 69 72 58 124
## 155 6.8701326 4.6588215 64 81 119 116 134 125
## 40 6.6279953 3.5806236 105 73 59 100 52 126
## 146 3.7102177 3.1499036 314 331 316 196 231 127
## 176 4.5462083 2.0795160 224 271 193 135 92 128
## 187 6.6362881 3.6799800 117 83 58 99 48 129
## 243 6.1519453 3.6855755 127 134 137 94 86 130
## 117 3.1413213 4.0013564 353 366 373 241 309 131
## 145 6.6429857 4.8321939 103 126 175 115 112 132
## 131 6.2432922 3.2304394 56 62 75 95 75 133
## 162 5.0479120 3.7978925 232 240 277 169 164 134
## 227 6.4245265 3.6725467 140 103 65 109 49 135
## 241 6.2529746 3.6922190 184 146 109 110 33 136
## 171 5.9457446 2.5149243 62 61 72 67 23 137
## 79 6.5886723 4.6759360 119 141 162 117 111 138
## 36 4.5711102 3.1832290 193 193 272 190 213 139
## 80 5.9786069 3.6010196 94 94 131 146 136 140
## 351 4.8719382 4.0143872 260 263 304 179 188 141
## 297 6.0926676 3.1652752 108 116 85 82 54 142
## 135 5.4809096 4.0453148 186 211 186 174 190 143
## 88 5.8666531 3.2229738 124 132 127 112 72 144
## 163 5.9261839 3.1464443 72 65 96 141 104 145
## 215 6.1316377 3.7448169 61 60 123 151 143 146
## 156 6.4182455 3.6493797 93 96 83 85 51 147
## 114 5.9757566 3.4086974 114 90 105 155 132 148
## 318 5.7562115 3.0591503 102 112 125 134 98 149
## 18 6.2789816 3.5181867 45 54 82 138 126 150
## 355 5.9984413 3.3868422 146 148 120 86 38 151
## 108 6.5912533 3.4611957 83 86 54 111 55 152
## 20 4.7416958 3.4484643 268 269 280 172 159 153
## 330 6.3896027 4.9087265 167 209 200 121 100 154
## 81 6.3992605 3.7910520 144 138 94 106 39 155
## 49 5.7434240 3.1770168 142 147 139 119 73 156
## 42 5.9744466 3.1038856 65 75 80 142 108 157
## 17 6.1440356 3.5315441 99 105 116 126 79 158
## 3 5.5445676 3.3098051 149 159 163 147 119 159
## 134 5.6903174 2.6497350 115 130 112 71 18 160
## 164 6.2829902 3.1482073 88 72 63 125 46 161
## 275 5.8081945 3.1586208 109 100 110 148 105 162
## 177 5.4838441 3.6442331 136 106 158 186 183 163
## 129 4.7152593 3.0977701 222 204 255 177 160 164
## 51 5.8598017 2.6683600 95 108 98 77 21 165
## 376 5.8729408 2.6772558 81 95 89 83 32 166
## 118 5.6438419 2.6118138 126 137 100 73 16 167
## 56 5.5694389 3.6192740 125 88 141 189 187 168
## 143 5.8788388 2.6123191 86 101 97 79 22 169
## 343 6.0977705 3.1604371 151 115 78 130 34 170
## 285 6.0424117 2.6551720 57 82 74 74 25 171
## 173 5.9179133 3.8429820 87 74 126 182 181 172
## 16 5.6421602 2.4735524 112 143 117 76 20 173
## 65 5.7607033 2.8851015 111 140 129 88 41 174
## 154 5.6466591 2.6676475 130 160 128 80 17 175
## 127 6.1429265 2.8020109 92 67 60 133 45 176
## 334 5.4829965 3.7777614 138 114 173 192 202 177
## 208 5.3978805 2.4879797 137 164 146 107 37 178
## 302 5.4429719 3.6137526 172 151 161 181 174 179
## 103 6.0957416 2.6665094 48 89 67 75 30 180
## 247 5.5811547 2.8543766 148 173 147 81 14 181
## 281 5.3499318 2.3074470 135 171 144 98 27 182
## 296 5.3458413 2.7716630 171 187 149 114 44 183
## 301 5.1568073 2.1055470 166 200 157 102 24 184
## 257 5.4770200 2.4551783 123 153 134 103 29 185
## 181 5.1173294 3.2134180 141 135 171 191 197 186
## 78 5.3109720 3.6976828 173 145 194 193 196 187
## 75 5.5344015 3.2571969 152 176 143 145 87 188
## 307 5.2967686 2.1693036 122 177 142 108 47 189
## 374 5.2105280 2.4393681 200 247 168 84 13 190
## 115 6.9815002 10.0000000 212 215 315 324 382 191
## 12 6.8975827 9.9663317 227 212 317 325 381 192
## 96 5.9904281 4.2318909 60 64 130 204 218 193
## 144 5.2482514 2.7789731 231 275 197 89 11 194
## 337 6.8037746 3.2854717 41 46 47 68 5 195
## 214 6.7275325 3.2511662 59 55 50 69 3 196
## 327 5.3340194 3.1153246 185 210 159 163 139 197
## 329 5.7471140 2.8866572 134 99 86 162 88 198
## 77 6.5325645 4.8848948 54 47 114 200 209 199
## 206 6.7332813 3.4046516 101 111 39 93 7 200
## 282 6.3070352 4.8771629 100 93 136 199 205 201
## 137 6.4961374 4.9345383 68 59 121 203 208 202
## 303 5.4897013 4.0029007 85 58 169 236 232 203
## 125 6.5441847 4.9048441 47 44 113 201 210 204
## 141 6.4619411 4.9384790 71 63 122 202 207 205
## 151 6.6753183 3.2078332 66 78 53 70 4 206
## 290 6.9226715 3.3932343 26 32 43 87 9 207
## 38 6.7110647 3.3816614 79 85 55 78 2 208
## 101 4.9721656 2.6341811 154 161 172 180 161 209
## 205 3.1953414 3.7189344 285 266 337 310 375 210
## 359 6.1826639 3.1183625 84 91 76 132 40 211
## 375 6.1030125 3.0601326 90 104 77 139 42 212
## 66 6.9540616 3.4704131 21 36 41 96 15 213
## 54 3.2103952 3.6333095 269 243 325 311 376 214
## 370 3.0852809 3.3938975 321 308 345 289 344 215
## 44 5.5356730 3.7441102 116 144 145 205 217 216
## 292 6.3021139 3.2363509 82 92 71 131 36 217
## 260 5.6825973 3.4036776 133 156 124 176 151 218
## 136 6.4326751 3.3522087 190 220 57 90 1 219
## 335 2.9019573 3.4510320 302 284 347 312 373 220
## 365 4.6950831 2.6604121 210 244 198 184 156 221
## 380 3.1982994 3.6225880 291 268 330 309 363 222
## 60 4.7665423 2.5503809 183 203 181 183 157 223
## 165 5.1351983 2.6948101 129 136 154 188 163 224
## 11 4.7203883 2.6219138 214 246 202 187 155 225
## 97 5.0877857 2.8954492 156 192 174 185 162 226
## 91 4.4948073 3.4558855 221 196 229 243 245 227
## 9 4.4182702 3.3681730 194 123 245 256 253 228
## 161 3.0859165 2.9399674 323 320 339 272 302 229
## 238 3.3888753 2.7775720 297 299 320 255 262 230
## 175 4.2041837 3.6078177 244 184 284 263 269 231
## 57 4.1962845 3.9193727 256 197 262 294 323 232
## 267 3.1444400 2.8548438 328 324 340 260 263 233
## 368 2.6874142 3.3949247 352 357 370 296 335 234
## 31 3.0776251 2.7221812 341 342 338 254 246 235
## 194 4.8487711 3.5040016 179 129 209 244 241 236
## 61 4.7101598 3.3578417 180 128 217 246 242 237
## 228 4.9199980 4.1298435 191 118 205 277 294 238
## 71 2.8240352 2.8917931 261 249 331 313 366 239
## 63 3.1452874 2.6523288 330 332 327 253 248 240
## 112 4.3089398 3.4988329 237 178 270 261 265 241
## 105 4.5752478 2.5393653 176 162 191 215 199 242
## 94 4.0508496 3.9754176 252 195 282 308 345 243
## 39 4.0853199 3.6568041 255 201 249 297 332 244
## 25 4.7196057 2.8785259 246 254 182 226 189 245
## 269 4.1785800 3.9629969 241 185 257 301 337 246
## 221 2.3932682 2.7908435 325 306 358 316 367 247
## 284 4.7146171 2.7310097 165 142 185 220 198 248
## 62 4.2845466 3.9431606 249 194 247 295 326 249
## 201 4.2328801 3.4671854 211 169 265 274 298 250
## 379 4.7098520 3.5059870 202 163 221 248 244 251
## 92 2.5578042 2.8150686 303 290 348 317 372 252
## 236 4.4908634 4.1788618 265 205 242 286 306 253
## 142 4.7091947 2.7360035 161 154 187 219 200 254
## 179 4.7443013 4.1460446 253 198 230 278 278 255
## 295 4.1678567 3.9668618 267 219 268 300 334 256
## 89 4.1066715 4.2635443 283 248 289 304 333 257
## 15 4.6621739 2.7458627 164 168 192 216 194 258
## 13 4.8333614 4.1051195 205 150 216 284 308 259
## 336 4.5130632 3.0432756 181 127 219 251 243 260
## 273 4.1387620 3.1220706 206 167 259 268 267 261
## 124 4.5010134 2.6109698 187 199 196 218 192 262
## 107 2.8539949 2.4078379 306 301 336 266 259 263
## 172 4.3916828 2.6181058 219 214 206 223 186 264
## 69 3.0204304 3.5021735 301 282 322 330 379 265
## 276 4.8648585 2.8801431 209 224 164 228 195 266
## 251 4.6701690 4.1802060 270 213 235 282 283 267
## 332 3.8752137 3.9464654 292 261 309 307 339 268
## 203 4.6435720 2.9067908 274 281 189 230 184 269
## 180 4.3266577 4.2953347 273 229 267 298 327 270
## 299 4.4642901 3.8035280 281 242 241 275 250 271
## 59 4.3430631 2.6867738 189 234 233 213 185 272
## 138 4.6325781 3.7511865 223 166 220 281 271 273
## 2 2.4613204 2.5388071 288 276 343 319 371 274
## 367 4.4194769 2.6280425 203 231 212 221 191 275
## 242 4.6909946 3.9012110 290 255 222 259 237 276
## 319 4.2437394 3.1548413 226 181 248 262 249 277
## 98 4.1135392 3.8056662 247 189 274 306 331 278
## 132 4.3764698 3.0388293 235 182 228 245 222 279
## 121 4.2726959 4.0417797 310 274 279 283 272 280
## 41 4.3693845 2.9425949 198 165 227 250 238 281
## 217 4.6286356 2.5782671 168 191 184 222 201 282
## 346 2.2644518 2.3874200 311 292 352 321 369 283
## 167 4.2030541 3.9264176 296 262 275 292 303 284
## 230 3.5612525 2.6456936 207 230 299 265 286 285
## 74 2.2017242 2.3669106 315 303 354 320 368 286
## 323 4.3515090 2.8019550 238 257 236 227 182 287
## 373 3.9618932 3.0026085 242 202 273 273 257 288
## 104 4.6892276 3.5959903 279 253 225 247 233 289
## 52 5.1677592 3.2594421 263 321 180 197 147 290
## 287 4.1872240 3.2868282 254 238 263 264 247 291
## 263 2.4859085 1.8006320 313 302 342 279 264 292
## 188 4.1270766 2.3253715 234 235 226 240 203 293
## 29 3.5213655 4.2841892 345 316 321 315 347 294
## 195 4.3012782 3.8476999 298 265 264 288 277 295
## 8 4.2217248 3.4585030 251 227 266 276 266 296
## 68 3.7796930 2.6718024 300 329 302 224 172 297
## 82 2.1192885 2.0868713 304 295 351 326 370 298
## 150 4.2507664 2.8767292 215 188 234 257 240 299
## 212 3.1804607 1.4736932 228 217 285 249 224 300
## 140 5.5267685 4.9597348 175 125 208 267 252 301
## 232 5.6190833 4.9379952 157 110 201 271 256 302
## 34 5.5925378 4.9577038 153 121 203 270 254 303
## 262 4.2104564 2.9008756 282 311 283 234 180 304
## 353 1.2544612 0.0000000 348 304 297 328 329 305
## 28 4.1475340 3.5223392 308 278 256 287 260 306
## 147 3.7474741 1.8375220 266 272 238 242 173 307
## 86 4.1025473 1.9701653 264 264 215 233 150 308
## 280 3.1982852 3.9011208 317 319 326 343 378 309
## 45 2.1620649 1.0702569 299 291 333 290 255 310
## 270 3.6389615 2.6438580 239 222 276 293 285 311
## 100 5.7767209 4.5901413 243 251 214 239 170 312
## 364 4.7122291 1.7808620 169 245 138 198 95 313
## 306 5.2084562 3.1833763 233 225 178 231 146 314
## 178 1.0801014 0.3538869 376 370 335 340 324 315
## 348 4.1141428 3.7616927 248 232 252 318 336 316
## 358 3.4861540 3.8633899 350 333 318 322 321 317
## 204 1.3593971 0.2697127 295 293 360 314 313 318
## 53 4.0830846 0.7483421 162 157 135 232 97 319
## 43 4.8961787 3.0838949 150 226 165 258 219 320
## 24 3.3652667 0.8191050 204 206 210 252 176 321
## 102 2.3309322 0.8434290 346 349 306 291 221 322
## 47 3.5348913 2.2542213 196 179 211 323 315 323
## 220 2.3191032 0.5177418 331 314 281 302 225 324
## 231 2.6254754 0.7077917 276 280 246 303 236 325
## 377 4.3356986 2.5779499 229 283 188 269 211 326
## 293 2.3550896 0.5077637 316 310 278 299 226 327
## 300 2.3010657 0.6522414 337 330 313 305 229 328
## 253 3.2948991 2.2732085 289 273 239 327 307 329
## 333 4.0097221 2.6459428 262 313 231 280 220 330
## 26 4.0862714 2.4851347 230 286 204 285 228 331
## 152 3.1054211 2.2865518 275 279 254 342 328 332
## 19 3.7378986 3.7897386 366 360 286 339 330 333
## 248 2.7474676 2.3176524 320 317 300 352 342 334
## 274 2.4831206 2.6311526 307 348 341 353 362 335
## 366 1.7300820 0.6269293 361 336 305 345 284 336
## 50 1.8925378 0.7229523 357 322 307 346 292 337
## 255 3.5672689 3.8331942 372 368 310 341 325 338
## 314 2.2689501 1.1433916 294 305 312 329 295 339
## 109 1.9107184 0.9216004 362 340 301 347 279 340
## 305 1.7385190 0.9040717 347 358 329 331 282 341
## 356 2.1186188 1.1585349 333 345 314 332 289 342
## 354 1.8873801 0.9481435 368 347 311 349 280 343
## 216 1.8788788 1.3453188 360 367 356 334 275 344
## 70 1.8952267 1.3783374 354 364 359 335 276 345
## 5 2.3696763 1.0676185 339 296 258 348 297 346
## 294 2.0562621 0.9807200 356 327 294 350 290 347
## 340 2.0929172 1.0605808 359 326 295 351 287 348
## 310 1.3842136 1.1000477 336 346 363 365 360 349
## 244 2.2876636 0.9405361 293 312 293 333 301 350
## 312 2.2318455 1.1441337 258 297 308 344 322 351
## 298 1.5751601 1.2357547 342 352 364 359 341 352
## 149 1.8858964 1.5643555 363 372 369 336 270 353
## 211 0.8476329 0.9591138 349 362 374 379 377 354
## 288 1.9117893 1.4660889 358 365 362 337 274 355
## 313 1.5671390 1.1214200 335 341 346 361 353 356
## 264 1.6508810 1.4327544 326 334 350 362 355 357
## 277 2.6559397 2.7216657 319 338 323 360 354 358
## 381 1.8257104 1.5320133 364 373 368 338 268 359
## 128 0.6369855 1.1380410 370 375 377 380 374 360
## 317 1.3934404 1.1364277 338 355 361 364 357 361
## 338 0.7374817 1.1033774 373 376 378 368 361 362
## 291 1.1600152 1.1460059 375 377 375 357 320 363
## 234 0.8891280 1.3601493 369 374 376 369 359 364
## 311 1.4571784 1.1528305 305 344 355 366 364 365
## 35 1.5876505 1.0734066 377 371 344 356 273 366
## 360 0.5662315 1.3208289 379 379 379 363 338 367
## 316 1.0794471 1.2318000 355 369 372 367 356 368
## 133 1.9069644 0.9499003 374 361 291 354 281 369
## 166 2.1566817 1.0776063 365 356 287 355 291 370
## 225 0.5874150 1.6034749 378 378 380 376 358 371
## 185 2.1794463 1.2349963 371 359 288 358 288 372
## 106 0.0000000 1.5409426 382 382 382 370 340 373
## 23 0.2457946 1.5870825 381 380 381 381 365 374
## 139 2.1509588 2.2689950 334 337 353 371 346 375
## 223 2.1673149 2.1259442 322 323 349 373 352 376
## 84 2.3403715 2.1958816 309 315 328 372 351 377
## 33 2.2820859 2.1483794 312 318 334 374 350 378
## 304 2.0418298 2.2706841 344 354 365 377 348 379
## 113 2.0446693 2.2490424 343 353 366 378 349 380
## 4 1.9633440 2.2559544 351 363 367 375 343 381
## 250 0.3733262 0.5867334 367 350 371 382 380 382
## RankM7 RankM8
## 352 16 16
## 1 32 32
## 308 23 24
## 229 10 9
## 183 22 4
## 372 20 6
## 10 39 31
## 209 38 34
## 361 28 25
## 345 3 35
## 30 29 19
## 324 19 5
## 268 11 43
## 87 26 21
## 76 25 23
## 83 37 29
## 213 44 18
## 328 45 46
## 48 46 8
## 309 40 17
## 357 43 26
## 321 4 27
## 160 24 10
## 110 54 22
## 256 50 48
## 222 48 7
## 322 17 42
## 85 7 44
## 245 1 11
## 254 9 20
## 193 42 47
## 342 15 40
## 157 5 13
## 261 27 54
## 265 13 36
## 22 30 12
## 196 51 28
## 283 8 41
## 170 33 38
## 266 41 50
## 95 21 39
## 198 108 72
## 252 12 33
## 278 6 15
## 289 56 49
## 130 34 30
## 341 53 45
## 226 18 37
## 58 58 53
## 55 14 14
## 73 35 65
## 174 36 64
## 246 47 69
## 186 49 68
## 239 31 59
## 344 60 75
## 169 57 67
## 148 295 317
## 235 278 297
## 191 283 320
## 224 75 74
## 27 260 255
## 189 207 212
## 7 232 201
## 67 55 71
## 184 52 76
## 249 227 209
## 325 253 237
## 120 94 141
## 286 158 73
## 72 66 91
## 378 118 55
## 37 59 70
## 279 88 114
## 32 109 61
## 331 133 58
## 320 123 66
## 46 85 52
## 159 220 174
## 259 105 56
## 271 87 103
## 362 128 60
## 182 154 63
## 369 149 62
## 192 73 92
## 349 78 94
## 326 71 90
## 382 100 102
## 210 132 51
## 240 155 57
## 111 102 123
## 197 317 241
## 350 82 104
## 258 80 105
## 200 117 95
## 219 114 171
## 6 298 191
## 168 254 221
## 315 293 172
## 339 99 99
## 126 195 300
## 122 92 96
## 158 241 333
## 199 129 155
## 371 112 112
## 233 230 236
## 116 146 113
## 64 89 108
## 202 131 93
## 347 115 131
## 90 185 154
## 218 2 3
## 119 191 315
## 14 237 200
## 237 203 166
## 93 193 169
## 153 289 215
## 207 226 327
## 21 91 147
## 99 216 161
## 272 259 328
## 123 200 143
## 190 90 164
## 363 113 218
## 155 65 88
## 40 77 170
## 146 287 225
## 176 231 325
## 187 76 152
## 243 119 151
## 117 308 119
## 145 74 86
## 131 111 210
## 162 192 137
## 227 96 153
## 241 110 150
## 171 139 295
## 79 81 87
## 36 229 217
## 80 136 167
## 351 198 117
## 297 127 220
## 135 173 115
## 88 144 211
## 163 140 227
## 215 122 145
## 156 97 157
## 114 137 188
## 318 151 235
## 18 107 176
## 355 134 195
## 108 79 183
## 20 206 187
## 330 101 82
## 81 98 138
## 49 153 219
## 42 138 231
## 17 120 173
## 3 166 202
## 134 156 278
## 164 106 226
## 275 147 223
## 177 171 158
## 129 210 232
## 51 145 272
## 376 143 270
## 118 160 288
## 56 165 162
## 143 142 287
## 343 125 222
## 285 130 276
## 173 141 133
## 16 161 299
## 65 150 249
## 154 159 273
## 127 121 257
## 334 172 140
## 208 176 296
## 302 175 163
## 103 126 274
## 247 164 254
## 281 177 308
## 296 178 262
## 301 187 323
## 257 174 301
## 181 189 213
## 78 180 149
## 75 168 206
## 307 181 319
## 374 183 302
## 115 61 1
## 12 64 2
## 96 135 101
## 144 182 260
## 337 67 204
## 214 69 207
## 327 179 230
## 329 152 248
## 77 84 84
## 206 68 189
## 282 103 85
## 137 86 81
## 303 170 118
## 125 83 83
## 141 93 79
## 151 72 214
## 290 63 194
## 38 70 196
## 101 194 282
## 205 304 148
## 359 116 229
## 375 124 234
## 66 62 181
## 54 301 159
## 370 311 193
## 44 167 146
## 292 104 208
## 260 157 190
## 136 95 199
## 335 314 186
## 365 217 275
## 380 302 160
## 60 204 292
## 165 188 268
## 11 208 285
## 97 190 246
## 91 235 185
## 9 240 197
## 161 310 243
## 238 297 261
## 175 262 165
## 57 264 127
## 267 307 253
## 368 319 192
## 31 312 266
## 194 201 178
## 61 213 198
## 228 196 110
## 71 316 247
## 63 306 277
## 112 249 180
## 105 228 293
## 94 279 120
## 39 276 156
## 25 209 251
## 269 266 122
## 221 326 259
## 284 211 265
## 62 251 125
## 201 257 182
## 379 214 177
## 92 322 256
## 236 236 107
## 142 215 264
## 179 205 109
## 295 267 121
## 89 273 100
## 15 222 263
## 13 202 111
## 336 233 238
## 273 269 228
## 124 234 289
## 107 315 303
## 172 242 286
## 69 313 179
## 276 199 250
## 251 221 106
## 332 282 124
## 203 223 244
## 180 248 97
## 299 238 136
## 59 246 269
## 138 224 144
## 2 325 294
## 367 239 284
## 242 218 128
## 319 256 224
## 98 272 135
## 132 243 239
## 121 252 116
## 41 244 242
## 217 225 290
## 346 336 304
## 167 263 126
## 230 291 280
## 74 338 305
## 323 245 258
## 373 281 240
## 104 219 168
## 52 186 205
## 287 265 203
## 263 323 330
## 188 270 306
## 29 294 98
## 195 250 132
## 8 258 184
## 68 284 271
## 82 344 324
## 150 255 252
## 212 305 338
## 140 169 77
## 232 162 80
## 34 163 78
## 262 261 245
## 353 370 382
## 28 268 175
## 147 285 329
## 86 274 326
## 280 303 129
## 45 341 360
## 270 288 281
## 100 148 89
## 364 212 331
## 306 184 216
## 178 372 380
## 348 271 142
## 358 296 130
## 204 369 381
## 53 277 372
## 43 197 233
## 24 299 371
## 102 330 370
## 47 292 314
## 220 331 378
## 231 321 374
## 377 247 291
## 293 328 379
## 300 332 375
## 253 300 310
## 333 280 279
## 26 275 298
## 152 309 309
## 19 286 139
## 248 318 307
## 274 324 283
## 366 361 376
## 50 355 373
## 255 290 134
## 314 335 352
## 109 352 368
## 305 360 369
## 356 345 348
## 354 356 366
## 216 358 343
## 70 354 341
## 5 327 361
## 294 347 363
## 340 346 362
## 310 368 357
## 244 333 367
## 312 337 351
## 298 364 345
## 149 357 335
## 211 375 364
## 288 351 339
## 313 365 355
## 264 362 340
## 277 320 267
## 381 359 337
## 128 377 353
## 317 367 354
## 338 376 356
## 291 371 350
## 234 374 342
## 311 366 349
## 35 363 359
## 360 379 344
## 316 373 347
## 133 353 365
## 166 342 358
## 225 378 332
## 185 339 346
## 106 382 336
## 23 381 334
## 139 343 312
## 223 340 322
## 84 329 318
## 33 334 321
## 304 349 311
## 113 348 316
## 4 350 313
## 250 380 377
hist=read.csv('c:/users/lfult/documents/minimum wage/for sir/for histograms.csv')
boxplot(hist$Difference~hist$Model, horizontal = F, notch=TRUE, col=rainbow(6), ylab='Difference in Ranking', xlab='Model Number')
#Basic Operating System Stuff
import os
import gc #garbage collector
import random #random seed generator
#Basic dataframe, array, and math stuff
import pandas as pd #data frame
import math #math functions
import numpy as np #numerical package
#TensorFlow
import tensorflow as tf
from tensorflow.python.client import device_lib #GPU Check
import tensorflow.keras #keras
from tensorflow.keras import layers
from tensorflow.keras import Sequential,Input,Model
from tensorflow.keras.layers import Dense, Dropout, Flatten, Input, Add, Activation, ZeroPadding2D,GlobalAveragePooling2D
from tensorflow.keras.layers import BatchNormalization,Conv2D, AveragePooling2D, MaxPooling2D, GlobalMaxPooling2D, LeakyReLU
from tensorflow.keras.callbacks import EarlyStopping, ReduceLROnPlateau, ModelCheckpoint #use for early stopping and reduction on level-out
from tensorflow.keras.initializers import glorot_uniform, he_uniform #to initialize random weights for filters
from tensorflow.keras.preprocessing.image import ImageDataGenerator
from tensorflow.keras.preprocessing import image as image_utils
from tensorflow.keras.applications.imagenet_utils import preprocess_input, decode_predictions, preprocess_input
from tensorflow.keras.models import Model, load_model #Can't do much without a model
from tensorflow.keras import utils
from tensorflow.keras.utils import get_file, plot_model, to_categorical, model_to_dot
from tensorflow.keras.metrics import AUC
from tensorflow.keras.preprocessing import image
from tensorflow.keras.applications import ResNet50V2
from tensorflow.keras.optimizers import Adam
import tensorflow.keras.backend as K #let's write our own metrics and loss functions
#Graphing
import seaborn as sns
from IPython.display import SVG #Same here
import matplotlib.pyplot as plt #plotting
import matplotlib #image save
from matplotlib.pyplot import imshow #Show images
from PIL import Image #Another image utility
import seaborn as sns
import pydot
full=r.mydata
train=full.iloc[:, [3,4,5,6,7,8,9,10,11]]
train
## X1A X1B X1C ... X3A X3B X3C
## 0 0.876454 0.679524 0.821950 ... 0.296081 0.409132 0.978251
## 1 0.469000 0.301275 -2.092300 ... 0.427759 0.556290 -0.567498
## 2 -0.306983 0.587005 0.216335 ... -0.457682 -0.620902 1.116972
## 3 -0.908877 0.688564 -1.661572 ... -1.557264 -1.178275 -1.043113
## 4 0.401644 0.230071 0.146937 ... 0.352563 0.304395 -2.311420
## .. ... ... ... ... ... ... ...
## 377 0.016846 0.356482 0.291466 ... -2.454380 -0.190875 -1.796170
## 378 0.269602 -0.274356 -0.357070 ... 0.988170 1.117096 -0.468412
## 379 0.300280 0.191226 -1.955005 ... -0.298687 -0.309723 -0.547100
## 380 -0.960661 -2.092283 -0.660316 ... -1.793695 -1.872533 -1.162017
## 381 0.210762 0.225109 0.727711 ... -3.195241 -0.956891 1.116972
##
## [382 rows x 9 columns]
train2=full.iloc[:, [3,4,5,6,7,8,9,12,11]]
autoencoder=tf.keras.Sequential()
autoencoder = Sequential()
autoencoder.add(Input(shape=(9,)))
autoencoder.add(Dense(3, activation='elu', name='weights'))
autoencoder.add(Dense(1, activation='linear', name="bottleneck"))
autoencoder.compile(loss='mean_squared_error', optimizer = Adam())
autoencoder.summary()
## Model: "sequential_1"
## ┌─────────────────────────────────┬───────────────────────────┬────────────┐
## │ Layer (type) │ Output Shape │ Param # │
## ├─────────────────────────────────┼───────────────────────────┼────────────┤
## │ weights (Dense) │ (None, 3) │ 30 │
## ├─────────────────────────────────┼───────────────────────────┼────────────┤
## │ bottleneck (Dense) │ (None, 1) │ 4 │
## └─────────────────────────────────┴───────────────────────────┴────────────┘
## Total params: 34 (136.00 B)
## Trainable params: 34 (136.00 B)
## Non-trainable params: 0 (0.00 B)
plot_model(autoencoder)
autoencoder2=tf.keras.Sequential()
autoencoder2 = Sequential()
autoencoder2.add(Input(shape=(9,)))
autoencoder2.add(Dense(3, activation='elu', name='weights'))
autoencoder2.add(Dense(1, activation='linear', name="bottleneck"))
autoencoder2.compile(loss='mean_squared_error', optimizer = Adam())
mod10 = autoencoder.fit(train,train, batch_size=128, epochs=1000, verbose=0, validation_data=(train,train))
autoencoder.save("c:/users/lfult/documents/minimum wage/encoded.h5")
## WARNING:absl:You are saving your model as an HDF5 file via `model.save()` or `keras.saving.save_model(model)`. This file format is considered legacy. We recommend using instead the native Keras format, e.g. `model.save('my_model.keras')` or `keras.saving.save_model(model, 'my_model.keras')`.
mod11 = autoencoder2.fit(train2,train2, batch_size=128, epochs=1000, verbose=0, validation_data=(train2,train2))
autoencoder.save("c:/users/lfult/documents/minimum wage/encoded2.h5")
## WARNING:absl:You are saving your model as an HDF5 file via `model.save()` or `keras.saving.save_model(model)`. This file format is considered legacy. We recommend using instead the native Keras format, e.g. `model.save('my_model.keras')` or `keras.saving.save_model(model, 'my_model.keras')`.
rating=pd.DataFrame(np.round(autoencoder.predict(train),2), columns=['Rating'])
##
[1m 1/12[0m [32m━[0m[37m━━━━━━━━━━━━━━━━━━━[0m [1m0s[0m 25ms/step
[1m12/12[0m [32m━━━━━━━━━━━━━━━━━━━━[0m[37m[0m [1m0s[0m 1ms/step
rating2=pd.DataFrame(np.round(autoencoder2.predict(train2),2), columns=['Rating'])
##
[1m 1/12[0m [32m━[0m[37m━━━━━━━━━━━━━━━━━━━[0m [1m0s[0m 24ms/step
[1m12/12[0m [32m━━━━━━━━━━━━━━━━━━━━[0m[37m[0m [1m0s[0m 2ms/step
rating['Geo']=full.GeoName
rating=rating.sort_values(by='Rating', ascending=False)
rating['Ranking']=np.arange(1,383)
rating2['Geo']=full.GeoName
rating2=rating.sort_values(by='Rating', ascending=False)
rating2['Ranking']=np.arange(1,383)
library(reticulate)
## Warning: package 'reticulate' was built under R version 4.3.2
py$rating[373:382,]
## Rating Geo Ranking
## 177 -1.02 Kahului-Wailuku-Lahaina, HI MSA 373
## 290 -1.09 Riverside-San Bernardino-Ontario, CA MSA 374
## 18 -1.09 Atlantic City-Hammonton, NJ MSA 375
## 254 -1.24 Ocean City, NJ MSA 376
## 34 -1.34 Binghamton, NY MSA 377
## 224 -1.43 Merced, CA MSA 378
## 359 -1.44 Visalia-Porterville, CA MSA 379
## 22 -1.60 Bakersfield, CA MSA 380
## 105 -2.09 El Centro, CA MSA 381
## 285 -2.36 Rapid City, SD MSA 382
py$rating2[373:382,]
## Rating Geo Ranking
## 177 -1.02 Kahului-Wailuku-Lahaina, HI MSA 373
## 290 -1.09 Riverside-San Bernardino-Ontario, CA MSA 374
## 18 -1.09 Atlantic City-Hammonton, NJ MSA 375
## 254 -1.24 Ocean City, NJ MSA 376
## 34 -1.34 Binghamton, NY MSA 377
## 224 -1.43 Merced, CA MSA 378
## 359 -1.44 Visalia-Porterville, CA MSA 379
## 22 -1.60 Bakersfield, CA MSA 380
## 105 -2.09 El Centro, CA MSA 381
## 285 -2.36 Rapid City, SD MSA 382