Library & Data import

library(Lahman) #data for this project
library(fastDummies)
library(corrplot)
library(purrr)
library(tidyverse)
library(ggplot2)
library(plyr)
library(dplyr)
library(data.table)
library(arules)
library(igraph)
library(lattice)
library(lmtest)
library(caret)
library(ggplot2)
library(nnet)
library(neuralnet)
library(MASS)
library(class)
library(factoextra)
library(FactoMineR)
library(arm)
library(plm)
library(plotly)
library(Lahman)
library(tidyverse)
library(gganimate)
library(gapminder)
library(tidyverse)
library(lubridate)
library(ggthemes)
library(ggrepel)
theme_set(theme_classic())

options(scipen=999)

#Batting <- read.csv("Batting.csv")
#decided not to use library(Lahman)

EDA

Batting Dataset

#Histogram of Batting

set.seed(1234)
df <- Batting[,c(5,7)]
df <- subset(df, !(lgID == "NA"))
df <- subset(df, (lgID == "AL")|(lgID == "NL"))
mu <- ddply(df, "lgID", summarise, grp.mean=mean(AB))
df1 <- ggplot(df, aes(x=AB, fill=lgID, color=lgID)) +  geom_histogram(position="identity", alpha=0.5) 
df1 +geom_vline(data=mu, aes(xintercept=grp.mean, color=lgID),linetype="dashed")

df2 <- Batting[,c(5,8)]
df2 <- subset(df2, !(lgID == "NA"))
df2 <- subset(df2, (lgID == "AL")|(lgID == "NL"))
mu2 <- ddply(df2, "lgID", summarise, grp.mean2=mean(R))
ggplot(df2, aes(x=R, fill=lgID, color=lgID)) +  geom_histogram(position="identity", alpha=0.5)+geom_vline(data=mu2, aes(xintercept=grp.mean2, color=lgID),linetype="dashed")

df3 <- Batting[,c(5,9)]
df3 <- subset(df3, !(lgID == "NA"))
df3 <- subset(df3, (lgID == "AL")|(lgID == "NL"))
mu3 <- ddply(df3, "lgID", summarise, grp.mean3=mean(H))
ggplot(df3, aes(x=H, fill=lgID, color=lgID)) +  geom_histogram(position="identity", alpha=0.5)+geom_vline(data=mu3, aes(xintercept=grp.mean3, color=lgID),linetype="dashed")

df4 <- Batting[,c(5,10)]
df4 <- subset(df4, !(lgID == "NA"))
df4 <- subset(df4, (lgID == "AL")|(lgID == "NL"))
mu4 <- ddply(df4, "lgID", summarise, grp.mean4=mean(X2B))
ggplot(df4, aes(x=X2B, fill=lgID, color=lgID)) +  geom_histogram(position="identity", alpha=0.5)+geom_vline(data=mu4, aes(xintercept=grp.mean4, color=lgID),linetype="dashed")

df5 <- Batting[,c(5,11)]
df5 <- subset(df5, !(lgID == "NA"))
df5 <- subset(df5, (lgID == "AL")|(lgID == "NL"))
mu5 <- ddply(df5, "lgID", summarise, grp.mean5=mean(X3B))
ggplot(df5, aes(x=X3B, fill=lgID, color=lgID)) +  geom_histogram(position="identity", alpha=0.5)+geom_vline(data=mu5, aes(xintercept=grp.mean5, color=lgID),linetype="dashed")

df6 <- Batting[,c(5,12)]
df6 <- subset(df6, !(lgID == "NA"))
df6 <- subset(df6, (lgID == "AL")|(lgID == "NL"))
mu6 <- ddply(df6, "lgID", summarise, grp.mean6=mean(HR))
ggplot(df6, aes(x=HR, fill=lgID, color=lgID)) +  geom_histogram(position="identity", alpha=0.5)+geom_vline(data=mu6, aes(xintercept=grp.mean6, color=lgID),linetype="dashed")

df7 <- Batting[,c(5,13)]
df7 <- subset(df7, !(lgID == "NA"))
df7 <- subset(df7, (lgID == "AL")|(lgID == "NL"))
mu7 <- ddply(df7, "lgID", summarise, grp.mean7=mean(RBI))
ggplot(df7, aes(x=RBI, fill=lgID, color=lgID)) +  geom_histogram(position="identity", alpha=0.5)+geom_vline(data=mu7, aes(xintercept=grp.mean7, color=lgID),linetype="dashed")

df8 <- Batting[,c(5,14)]
df8 <- subset(df8, !(lgID == "NA"))
df8 <- subset(df8, (lgID == "AL")|(lgID == "NL"))
mu8 <- ddply(df8, "lgID", summarise, grp.mean8=mean(SB))
ggplot(df8, aes(x=SB, fill=lgID, color=lgID)) +  geom_histogram(position="identity", alpha=0.5)+geom_vline(data=mu8, aes(xintercept=grp.mean8, color=lgID),linetype="dashed")

df9 <- Batting[,c(5,15)]
df9 <- subset(df9, !(lgID == "NA"))
df9 <- subset(df9, (lgID == "AL")|(lgID == "NL"))
mu9 <- ddply(df9, "lgID", summarise, grp.mean9=mean(CS))
ggplot(df9, aes(x=CS, fill=lgID, color=lgID)) +  geom_histogram(position="identity", alpha=0.5)+geom_vline(data=mu9, aes(xintercept=grp.mean9, color=lgID),linetype="dashed")

df10 <- Batting[,c(5,16)]
df10 <- subset(df10, !(lgID == "NA"))
df10 <- subset(df10, (lgID == "AL")|(lgID == "NL"))
mu10 <- ddply(df10, "lgID", summarise, grp.mean10=mean(BB))
ggplot(df10, aes(x=BB, fill=lgID, color=lgID)) +  geom_histogram(position="identity", alpha=0.5)+geom_vline(data=mu10, aes(xintercept=grp.mean10, color=lgID),linetype="dashed")

df11 <- Batting[,c(5,17)]
df11 <- subset(df11, !(lgID == "NA"))
df11 <- subset(df11, (lgID == "AL")|(lgID == "NL"))
mu11 <- ddply(df11, "lgID", summarise, grp.mean11=mean(SO))
ggplot(df11, aes(x=SO, fill=lgID, color=lgID)) +  geom_histogram(position="identity", alpha=0.5)+geom_vline(data=mu11, aes(xintercept=grp.mean11, color=lgID),linetype="dashed")

df12 <- Batting[,c(5,18)]
df12 <- subset(df12, !(lgID == "NA"))
df12 <- subset(df12, (lgID == "AL")|(lgID == "NL"))
mu12 <- ddply(df12, "lgID", summarise, grp.mean12=mean(IBB))
ggplot(df12, aes(x=IBB, fill=lgID, color=lgID)) +  geom_histogram(position="identity", alpha=0.5)+geom_vline(data=mu12, aes(xintercept=grp.mean12, color=lgID),linetype="dashed")

df13 <- Batting[,c(5,19)]
df13 <- subset(df13, !(lgID == "NA"))
df13 <- subset(df13, (lgID == "AL")|(lgID == "NL"))
mu13 <- ddply(df13, "lgID", summarise, grp.mean13=mean(HBP))
ggplot(df13, aes(x=HBP, fill=lgID, color=lgID)) +  geom_histogram(position="identity", alpha=0.5)+geom_vline(data=mu13, aes(xintercept=grp.mean13, color=lgID),linetype="dashed")

df14 <- Batting[,c(5,20)]
df14 <- subset(df14, !(lgID == "NA"))
df14 <- subset(df14, (lgID == "AL")|(lgID == "NL"))
mu14 <- ddply(df14, "lgID", summarise, grp.mean14=mean(SH))
ggplot(df14, aes(x=SH, fill=lgID, color=lgID)) +  geom_histogram(position="identity", alpha=0.5)+geom_vline(data=mu14, aes(xintercept=grp.mean14, color=lgID),linetype="dashed")

df15 <- Batting[,c(5,21)]
df15 <- subset(df15, !(lgID == "NA"))
df15 <- subset(df15, (lgID == "AL")|(lgID == "NL"))
mu15 <- ddply(df15, "lgID", summarise, grp.mean15=mean(SF))
ggplot(df15, aes(x=SF, fill=lgID, color=lgID)) +  geom_histogram(position="identity", alpha=0.5)+geom_vline(data=mu15, aes(xintercept=grp.mean15, color=lgID),linetype="dashed")

df16 <- Batting[,c(5,22)]
df16 <- subset(df16, !(lgID == "NA"))
df16 <- subset(df16, (lgID == "AL")|(lgID == "NL"))
mu16 <- ddply(df16, "lgID", summarise, grp.mean16=mean(GIDP))
ggplot(df16, aes(x=GIDP, fill=lgID, color=lgID)) +  geom_histogram(position="identity", alpha=0.5)+geom_vline(data=mu16, aes(xintercept=grp.mean16, color=lgID),linetype="dashed")

Highly skewed data and there are some outliers

Pitching

df_pitching1 <- Pitching[,c(5,17)]
df_pitching1 <- subset(df_pitching1, !(lgID == "NA"))
df_pitching1 <- subset(df_pitching1, (lgID == "AL")|(lgID == "NL"))
mu_pitching1 <- ddply(df_pitching1, "lgID", summarise, grp.mean1=mean(BB))
ggplot(df_pitching1, aes(x=BB, fill=lgID, color=lgID)) +  geom_histogram(aes(y=..density..), position="identity", alpha=0.5)+geom_density(alpha = 0.6)+geom_vline(data=mu_pitching1, aes(xintercept=grp.mean1, color=lgID),linetype="dashed") +scale_color_manual(values=c("#999999", "#E69F00", "#56B4E9"))+
  scale_fill_manual(values=c("#999999", "#E69F00", "#56B4E9")) +labs(title="Base On Balls (Walks)",x="BB", y = "Count")+
theme_classic()

df_pitching2 <- Pitching[,c(5,18)]
df_pitching2 <- subset(df_pitching2, !(lgID == "NA"))
df_pitching2 <- subset(df_pitching2, (lgID == "AL")|(lgID == "NL"))
mu_pitching2 <- ddply(df_pitching2, "lgID", summarise, grp.mean2=mean(SO))
ggplot(df_pitching2, aes(x=SO, fill=lgID, color=lgID)) +  geom_histogram(aes(y=..density..), position="identity", alpha=0.5)+geom_density(alpha = 0.6)+geom_vline(data=mu_pitching2, aes(xintercept=grp.mean2, color=lgID),linetype="dashed") +scale_color_manual(values=c("#999999", "#E69F00", "#56B4E9"))+
  scale_fill_manual(values=c("#999999", "#E69F00", "#56B4E9")) +labs(title="Strike Out by League ",x="SO", y = "Count")+
theme_classic()

df_pitching3 <- Pitching[,c(5,20)]
df_pitching3 <- subset(df_pitching3, !(lgID == "NA"))
df_pitching3 <- subset(df_pitching3, (lgID == "AL")|(lgID == "NL"))
mu_pitching3 <- ddply(df_pitching3, "lgID", summarise, grp.mean3=mean(ERA))
ggplot(df_pitching3, aes(x=ERA, fill=lgID, color=lgID)) +  geom_histogram(aes(y=..density..), position="identity", alpha=0.5)+geom_density(alpha = 0.6)+geom_vline(data=mu_pitching3, aes(xintercept=grp.mean3, color=lgID),linetype="dashed") +scale_color_manual(values=c("#999999", "#E69F00", "#56B4E9"))+
  scale_fill_manual(values=c("#999999", "#E69F00", "#56B4E9")) +labs(title="Earned Run Average by League ",x="ERA", y = "Count")+
theme_classic()

Teams

#remove column with NA

teams0 <- Teams[,c(1,4,6)]
teams0 <- teams0[order(teams0[,"yearID"],teams0[,"Rank"]),]
teams0_1 <- subset(teams0, Rank == 1 )

ggplot(data = teams0_1, mapping = aes(x = yearID, y = franchID, color = franchID)) + 
    geom_line()

Figure out each year’s rank 1 team

Top 10 Players

top_sal <- merge(x=Salaries, y=Master, by="playerID", all.x=TRUE)
top_sal <- top_sal[, c("yearID", "salary", "nameFirst", "nameLast")]
top_sal$name <- paste(top_sal$nameFirst,top_sal$nameLast)
top_sal <- top_sal[, c("yearID", "salary", "name")]
top_sal$yearID <- as.factor(top_sal$yearID)

a <- top_sal %>%
  group_by(yearID) %>%
  top_n(n = 10, wt = salary) %>% arrange(-desc(yearID))

resultis <- list()
for (i in 1985:2016) {
p <- top_sal  %>%  filter (yearID == i) %>% arrange(desc(salary)) %>% top_n(10) %>% mutate(Rank = min_rank(-salary)*1, Value_rel = salary/salary[Rank==1], Value_lbl = paste0(" ",salary))
resultis[[i]] <- p
}

b <- as.data.frame(do.call("rbind", resultis))


p <- b  %>%
  # build rank, labels and relative values
  group_by(yearID) %>% ungroup() 
  
# plot

p2<-  ggplot(p, aes(-Rank,Value_rel, fill = name, label = name)) +
  geom_col(width = 0.8, position="identity") +
  coord_flip() + 
  geom_text(aes(-Rank,y=0,label = name,hjust=0)) +       #country label
  geom_text(aes(-Rank,y=Value_rel,label = Value_lbl, hjust=0)) + # value label
  theme_minimal() +
  theme(plot.title = element_text(hjust = 1, size = 30),
        axis.ticks.y = element_blank(),
        axis.text.y  = element_blank()) +
  # animate along Year
  transition_states(yearID,1,1) +
  #add
  scale_color_viridis_d(name="")+
  scale_fill_viridis_d(name="")+
  theme_tufte(14,"Avenir")+
  guides(color=F,fill=F)+
  labs(title = 'Year: {closest_state}',  x = "",y="Salary compared to Rank 1's")

animate(p2, 100, fps = 25, duration = 40, width = 800, height = 600)

Attendance

#Number of spectators per game
spectator <- Teams %>% filter(yearID > 1990)
spectator$attend <- -spectator$attendance/spectator$G
spectator$affiliation <- paste(spectator$lgID, spectator$divID)

histogram(~spectator$attend|spectator$affiliation, type = "density", panel = function(x,...){
          panel.histogram(x,...,col = "gray")
          apg <- seq(min(x), max(x))
          density <- dnorm(apg, mean(x), sd(x))
          panel.lines(apg, density, col = "black")})

##Home-Run ##Batting Performance

#season batting average 0.3
player3 <- subset(Batting, yearID>1975 & yearID <2017, selected = c("yearID", "AB", "H", "G"))
player3$avg <- player3$H / player3$AB
player3 <- na.omit(player3)

func <- function(player3){return(data.frame(sd=sd(player3$avg[player3$AB > 400]),
                                              mean = mean(player3$avg[player3$AB > 400])))}
env<- ddply(player3, .(yearID), func)

#normalize the batting 0.3
env$z <- (0.3-env$mean)/env$sd

env$per <- pnorm(0.3, env$mean, env$sd, lower.tail=TRUE)
par(mfrow=c(1,2))
plot(env$yearID, env$z, xlab="year", ylab="z score of 0.3 average")
lines(smooth.spline(env$yearID, env$z))
plot(env$yearID, env$per, xlab = "year", ylab = "Percentile of 0.3 AVG")
lines(smooth.spline(env$yearID, env$per)) 

After 2010, AVG 0.3 batters increased.

Correlation

a <- Batting %>% filter (yearID == 2014)
b <- Batting %>% filter (yearID == 2015)
c <- merge(a,b, by = "playerID")
d<- c[c$AB.x > 10 & c$AB.y > 10,]

#the relationship between 2014 HR(HR.x) and 2015 HR(HR.y)
cor(d$HR.x,d$HR.y)
## [1] 0.6804912
#the relationship between 2014 batting average and 2015 batting average
cor(d$H.x/d$AB.x, d$H.y/d$AB.y)
## [1] 0.4868565
#high correlation: individual home run > batting ratio

#How to predict RBI(Run Battled In) for next season?
a <- subset(Batting, yearID > 2014)
a$teamID <- as.numeric(as.factor(a$teamID))
b <- function(a) {return(data.frame(
                  team = ifelse(mean(a$teamID) == a$teamID,0,1),
                  a$playerID, a$lgID, a$SF, a$SH, a$H, a$yearID, a$teamID, a$RBI, a$AB))}
d <- ddply(a,.(playerID),b)

#sort seasons
d$lag_team <- as.numeric(sapply(1:nrow(d), function(x){d$a.teamID[x-1]}))
d$lag_RBI <- as.numeric(sapply(1:nrow(d), function(x){d$a.RBI[x-1]}))
d$lag_AB <- as.numeric(sapply(1:nrow(d), function(x){d$a.AB[x-1]}))
d$lag_SF <- as.numeric(sapply(1:nrow(d), function(x){d$a.SF[x-1]}))
d$lag_SH <- as.numeric(sapply(1:nrow(d), function(x){d$a.SH[x-1]}))
d$lag_H <- as.numeric(sapply(1:nrow(d), function(x){d$a.H[x-1]}))
d$lag_playerID <- as.character(sapply(1:nrow(d), function(x){d$playerID[x-1]}))

d$lag_team <- ifelse(d$playerID == d$lag_playerID, d$lag_team, "NA")
d$lag_avg <- d$lag_H/d$lag_AB
d$sac <- d$lag_SF + d$lag_SH
d <- subset(d, a.AB > 400 & lag_AB > 400)
d$change_rbi <- d$a.RBI/d$lag_RBI
d <- subset(d, !((lag_team == "NA")|(a.teamID == lag_team)))
d$lg_col <- ifelse(d$a.lgID == "NL", "grey", "black")
d$lg_shape <- ifelse(d$a.lgID == "NL", 2,15)

colnames(d)
##  [1] "playerID"     "team"         "a.playerID"   "a.lgID"      
##  [5] "a.SF"         "a.SH"         "a.H"          "a.yearID"    
##  [9] "a.teamID"     "a.RBI"        "a.AB"         "lag_team"    
## [13] "lag_RBI"      "lag_AB"       "lag_SF"       "lag_SH"      
## [17] "lag_H"        "lag_playerID" "lag_avg"      "sac"         
## [21] "change_rbi"   "lg_col"       "lg_shape"
cor(d$lag_avg, d$change_rbi)
## [1] -0.4890067

Used 2015 and 2016 data and assigned 0 that players did not change their team (1 to players change their team) The correlation betwwen AVG of previous season and change of RBI is negative. It seems it does not make sense. But, the higher AVG of previous season leads not to high score of RBI to next season.

d$lag_team <- ifelse(d$playerID == d$lag_playerID, d$lag_team, "NA")
d$lag_avg <- d$lag_H/d$lag_AB
d$sac <- d$lag_SF + d$lag_SH
d <- subset(d, a.AB > 400 & lag_AB > 400)
d$change_rbi <- d$a.RBI/d$lag_RBI
d <- subset(d, !((lag_team == "NA")|(a.teamID == lag_team)))
d$lg_col <- ifelse(d$a.lgID == "NL", "grey", "black")
d$lg_shape <- ifelse(d$a.lgID == "NL", 2,15)

##relationship between former year RBI

cor(d$lag_avg, d$change_rbi)
## [1] -0.4890067
plot(d$lag_avg, d$change_rbi, main = "Predictor of RBI", xlab = "Batting Average of Prior yer",
     ylab = "Change in RBI", las = 1, cex.axis=0.8, pch=19, col=d$lg_col)
legend(x=0.29, y=1.4, c("National", "American"), col = c("gray", "black"), pch=c(19,19))
abline(lm(change_rbi~lag_avg,d))

cor(d$sac, d$change_rbi)
## [1] 0.5042151
plot(d$sac, d$change_rbi, main = "Predictor of RBI", font.main = 3, xlab = "Sacrifice Flies & Hits", ylab = "Change in RBI", las = 1, cex.axis = 0.8, pch=d$lg_shape)
text(x=4, y = 1.6, label = "r=0.5")
abline(lm(change_rbi~sac, d), lty = 2, lwd =3)

Sometime peopl ignore not big indicators such as sacrifice flies and hits. But we can find the positive relationship between the preditors.

Association

Do people really prefer a specific team in the transfer market?

a1 <- subset(Batting, yearID > 2010, select = c(playerID, teamID))
a1$teamID <- as.factor(a1$teamID)
a1$teamID <- as.character(a1$teamID)

move <- dcast(setDT(a1)[,idx := 1:.N, by = playerID],
              playerID~idx, value.var=c("teamID"))
move[is.na(move)] <- ""
move[,1] <- NULL
write.csv(move, file="move.csv")

move <- read.transactions("move.csv", sep = ",")
summary(move)
## transactions as itemMatrix in sparse format with
##  2586 rows (elements/itemsets/transactions) and
##  2616 columns (items) and a density of 0.001098008 
## 
## most frequent items:
##     NYA     SDN     TOR     CHN     BOS (Other) 
##     191     183     179     178     177    6520 
## 
## element (itemset/transaction) length distribution:
## sizes
##    2    3    4    5    6    7    8    9   11 
## 1346  632  343  167   68   20    7    2    1 
## 
##    Min. 1st Qu.  Median    Mean 3rd Qu.    Max. 
##   2.000   2.000   2.000   2.872   3.000  11.000 
## 
## includes extended item information - examples:
##   labels
## 1      1
## 2     10
## 3    100
itemFrequencyPlot(move, support=0.01, cex.names=0.6)

pattern <- apriori(move, list(support = 0.0015, confidence = 0.50, minlen = 2))
## Apriori
## 
## Parameter specification:
##  confidence minval smax arem  aval originalSupport maxtime support minlen
##         0.5    0.1    1 none FALSE            TRUE       5  0.0015      2
##  maxlen target   ext
##      10  rules FALSE
## 
## Algorithmic control:
##  filter tree heap memopt load sort verbose
##     0.1 TRUE TRUE  FALSE TRUE    2    TRUE
## 
## Absolute minimum support count: 3 
## 
## set item appearances ...[0 item(s)] done [0.00s].
## set transactions ...[2616 item(s), 2586 transaction(s)] done [0.00s].
## sorting and recoding items ... [31 item(s)] done [0.00s].
## creating transaction tree ... done [0.00s].
## checking subsets of size 1 2 3 done [0.00s].
## writing ... [6 rule(s)] done [0.00s].
## creating S4 object  ... done [0.00s].
summary(inspect(pattern))
##     lhs          rhs   support    confidence lift      count
## [1] {FLO}     => {MIA} 0.00966744 0.5681818   9.995362 25   
## [2] {FLO,LAN} => {MIA} 0.00154679 1.0000000  17.591837  4   
## [3] {CLE,SLN} => {OAK} 0.00154679 0.6666667   9.795455  4   
## [4] {CHN,WAS} => {MIA} 0.00154679 0.5000000   8.795918  4   
## [5] {HOU,MIA} => {SDN} 0.00154679 0.5000000   7.065574  4   
## [6] {CLE,PIT} => {PHI} 0.00154679 0.8000000  12.930000  4
##         lhs              rhs       support           confidence    
##  {CHN,WAS}:1   =>:6   {MIA}:3   Min.   :0.001547   Min.   :0.5000  
##  {CLE,PIT}:1          {OAK}:1   1st Qu.:0.001547   1st Qu.:0.5170  
##  {CLE,SLN}:1          {PHI}:1   Median :0.001547   Median :0.6174  
##  {FLO,LAN}:1          {SDN}:1   Mean   :0.002900   Mean   :0.6725  
##  {FLO}    :1                    3rd Qu.:0.001547   3rd Qu.:0.7667  
##  {HOU,MIA}:1                    Max.   :0.009667   Max.   :1.0000  
##       lift            count     
##  Min.   : 7.066   Min.   : 4.0  
##  1st Qu.: 9.046   1st Qu.: 4.0  
##  Median : 9.895   Median : 4.0  
##  Mean   :11.029   Mean   : 7.5  
##  3rd Qu.:12.196   3rd Qu.: 4.0  
##  Max.   :17.592   Max.   :25.0

We can confirm that the players who played in NYA move frequently. FLO is much less frequent than other teams because the team has changed its name from 2012 to MIA.

Network Analysis

#relationship coaches and aced pitches

str(Pitching)
## 'data.frame':    44963 obs. of  30 variables:
##  $ playerID: chr  "bechtge01" "brainas01" "fergubo01" "fishech01" ...
##  $ yearID  : int  1871 1871 1871 1871 1871 1871 1871 1871 1871 1871 ...
##  $ stint   : int  1 1 1 1 1 1 1 1 1 1 ...
##  $ teamID  : Factor w/ 149 levels "ALT","ANA","ARI",..: 97 142 90 111 90 136 111 56 97 136 ...
##  $ lgID    : Factor w/ 7 levels "AA","AL","FL",..: 4 4 4 4 4 4 4 4 4 4 ...
##  $ W       : int  1 12 0 4 0 0 0 6 18 12 ...
##  $ L       : int  2 15 0 16 1 0 1 11 5 15 ...
##  $ G       : int  3 30 1 24 1 1 3 19 25 29 ...
##  $ GS      : int  3 30 0 24 1 0 1 19 25 29 ...
##  $ CG      : int  2 30 0 22 1 0 1 19 25 28 ...
##  $ SHO     : int  0 0 0 1 0 0 0 1 0 0 ...
##  $ SV      : int  0 0 0 0 0 0 0 0 0 0 ...
##  $ IPouts  : int  78 792 3 639 27 3 39 507 666 747 ...
##  $ H       : int  43 361 8 295 20 1 20 261 285 430 ...
##  $ ER      : int  23 132 3 103 10 0 5 97 113 153 ...
##  $ HR      : int  0 4 0 3 0 0 0 5 3 4 ...
##  $ BB      : int  11 37 0 31 3 0 3 21 40 75 ...
##  $ SO      : int  1 13 0 15 0 0 1 17 15 12 ...
##  $ BAOpp   : num  NA NA NA NA NA NA NA NA NA NA ...
##  $ ERA     : num  7.96 4.5 27 4.35 10 0 3.46 5.17 4.58 5.53 ...
##  $ IBB     : int  NA NA NA NA NA NA NA NA NA NA ...
##  $ WP      : int  NA NA NA NA NA NA NA NA NA NA ...
##  $ HBP     : int  NA NA NA NA NA NA NA NA NA NA ...
##  $ BK      : int  0 0 0 0 0 0 0 2 0 0 ...
##  $ BFP     : int  NA NA NA NA NA NA NA NA NA NA ...
##  $ GF      : int  NA NA NA NA NA NA NA NA NA NA ...
##  $ R       : int  42 292 9 257 21 0 30 243 223 362 ...
##  $ SH      : int  NA NA NA NA NA NA NA NA NA NA ...
##  $ SF      : int  NA NA NA NA NA NA NA NA NA NA ...
##  $ GIDP    : int  NA NA NA NA NA NA NA NA NA NA ...
a2 <- subset(Pitching, yearID>2014&G>35)
a2 <- a2[,c("playerID", "yearID", "teamID")]
a2$teamyear <- paste(a2$teamID, a2$yearID, sep = "")
manager <- subset(Managers, yearID > 2014, select = c("playerID", "yearID", "teamID"))
manager$teamyear <- paste(manager$teamID, manager$yearID, sep = "")
node <- merge(a2, manager, by = "teamyear")
node <- subset(node, select = c("playerID.x", "playerID.y"))

mlb_network <- graph.data.frame(node, directed=FALSE)
V(mlb_network)$label <- ifelse(V(mlb_network)$name %in% c(manager$playerID)>0,
                               as.character(manager$teamyear),NA)
mlb_network
## IGRAPH c999503 UN-- 304 409 -- 
## + attr: name (v/c), label (v/c)
## + edges from c999503 (vertex names):
##  [1] zieglbr01--halech01  collmjo01--halech01  chafian01--halech01 
##  [4] perezol01--halech01  hudsoda01--halech01  hernada01--halech01 
##  [7] delgara01--halech01  reedad01 --halech01  zieglbr01--halech01 
## [10] burgoen02--halech01  clippty01--halech01  barreja01--halech01 
## [13] delgara01--halech01  hudsoda01--halech01  corbipa01--halech01 
## [16] cunnibr02--gonzafr99 johnsji04--gonzafr99 avilalu01--gonzafr99
## [19] grillja01--gonzafr99 vizcaar01--gonzafr99 withrch01--snitkbr99
## [22] withrch01--gonzafr99 vizcaar01--snitkbr99 vizcaar01--gonzafr99
## + ... omitted several edges
manager2 <- V(mlb_network)$name %in% c(manager$playerID) +1
plot(mlb_network, vertex.label.cex=0.6, vertex.label.color = "black",
     vertax.size = c(8,22)[manager2], vertex.color = "khaki2"[manager2])

Baseball is a sport where other teams’ strategies are easily imitated and learned through the scouts of players, coaches, and club staff. Using data from the 2015 and 2016 seasons, we analyzed the network of ace-class pitchers played in at least 35 games per season and coaches. If neither the coach was replaced during the two observed seasons, or if the ace pitchers did not have a team move, network appears to be independent like TOR2015.

Model

Simple linear Regression

#check homoscedasticity to use regression
rec <- subset(Teams)
rec$wp <- rec$W / rec$G #wins / games
lm1 <- lm(wp~R, rec)
bptest(lm1)
## 
##  studentized Breusch-Pagan test
## 
## data:  lm1
## BP = 168.73, df = 1, p-value < 0.00000000000000022
b <- BoxCoxTrans(rec$wp)
c <- cbind(rec, wp_adj=predict(b, rec$wp))
lm2 <- lm(wp_adj~R, c)
bptest(lm2)
## 
##  studentized Breusch-Pagan test
## 
## data:  lm2
## BP = 168.73, df = 1, p-value < 0.00000000000000022
rec$avg <- rec$H/rec$AB

avg_model <- lm(wp~avg, rec)
ERA_model <- lm(wp~ERA, rec)

summary(avg_model)
## 
## Call:
## lm(formula = wp ~ avg, data = rec)
## 
## Residuals:
##      Min       1Q   Median       3Q      Max 
## -0.41552 -0.05289  0.00511  0.05781  0.29032 
## 
## Coefficients:
##             Estimate Std. Error t value             Pr(>|t|)    
## (Intercept) -0.13384    0.02316  -5.778         0.0000000084 ***
## avg          2.40671    0.08842  27.220 < 0.0000000000000002 ***
## ---
## Signif. codes:  0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
## 
## Residual standard error: 0.08399 on 2833 degrees of freedom
## Multiple R-squared:  0.2073, Adjusted R-squared:  0.207 
## F-statistic: 740.9 on 1 and 2833 DF,  p-value: < 0.00000000000000022
summary(ERA_model)
## 
## Call:
## lm(formula = wp ~ ERA, data = rec)
## 
## Residuals:
##      Min       1Q   Median       3Q      Max 
## -0.47655 -0.04634  0.00664  0.05626  0.32005 
## 
## Coefficients:
##              Estimate Std. Error t value            Pr(>|t|)    
## (Intercept)  0.715668   0.008025   89.17 <0.0000000000000002 ***
## ERA         -0.057722   0.002061  -28.01 <0.0000000000000002 ***
## ---
## Signif. codes:  0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
## 
## Residual standard error: 0.08348 on 2833 degrees of freedom
## Multiple R-squared:  0.2168, Adjusted R-squared:  0.2166 
## F-statistic: 784.4 on 1 and 2833 DF,  p-value: < 0.00000000000000022

ERA_model has a little higher R2 than AVG_Model. You can conclude that using ERA_model to predict team winning can yield more accurate results.

ERA_model shows the negative relationship between team winning and ERA. We might address that baseball is a pitcher game.

The smaller the AVG standard deviation, the more spectators will get at the baseball field?

team <- subset(Teams, yearID>1975 & yearID < 2017,
        select = c("teamID", "G", "yearID", "attendance"))
team$att <- (team$attendance/team$G)
c2 <- subset(team, teamID == "ATL" | teamID == "BAL" | teamID == "BOS" | teamID == "CHA" |
                 teamID == "CHN" | teamID == "CIN" | teamID == "CLE" | teamID == "DET" |
                 teamID == "HOU" | teamID == "KCA" | teamID == "LAN" | teamID == "MIN" |
                 teamID == "NYA" | teamID == "NYN" | teamID == "OAK" | teamID == "PHI" |
                 teamID == "PIT" | teamID == "SDN" | teamID == "SFN" | teamID == "SLN" |
                 teamID == "TEX" )

func_1 <- function(c2){return(data.frame(sum=sum(c2$att)))}
attend <- ddply(c2, .(yearID), func_1)
attend$apg <- attend$sum/21 #the number of teams = 21; APG(attendance per game)
d2 <- merge(env, attend, by = "yearID")

d2$lag_apg <- as.numeric(sapply(1:nrow(d2), function(x){d2$apg[x-1]}))
d2$ratio <- d2$apg/d2$lag_apg

d2 <- subset(d2, !(yearID == 1976|yearID==1994|yearID==1995|yearID==1996)) #1976 starting point, 94~96: strike
summary(lm(ratio~sd+mean+yearID, data=d2))
## 
## Call:
## lm(formula = ratio ~ sd + mean + yearID, data = d2)
## 
## Residuals:
##       Min        1Q    Median        3Q       Max 
## -0.079044 -0.027927  0.003881  0.028459  0.119069 
## 
## Coefficients:
##               Estimate Std. Error t value Pr(>|t|)  
## (Intercept)  3.2687319  1.2446773   2.626   0.0130 *
## sd          -6.6899580  3.6632381  -1.826   0.0769 .
## mean         1.5431600  1.2957632   1.191   0.2422  
## yearID      -0.0012481  0.0005919  -2.109   0.0427 *
## ---
## Signif. codes:  0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
## 
## Residual standard error: 0.04309 on 33 degrees of freedom
## Multiple R-squared:  0.2197, Adjusted R-squared:  0.1488 
## F-statistic: 3.097 on 3 and 33 DF,  p-value: 0.04007
#lower sd(all teams almost same performance), increase attendence

plot(d2$sd, d2$ratio, ylab = "Change in attendance", xlab = "Avg standard deviation")
abline(lm(ratio~sd, data=d2)) 

Corresponding the regression result, if the significant level is 90%, sd is significant factor. Also look at the graph, the lower sd which means that all teams almost same performance, the more attendence.

T-test

team90 <- subset(Teams, yearID > 1990)

#attendence per team
att_t <- team90$attendance/162
affiliation <- paste(team90$lgID, team90$divID)
att_total <- cbind(team90, att_t, affiliation)

ggplot(att_total, aes(att_t)) + geom_density() +labs(x="attendance per game") + geom_vline(xintercept=c(11800,14500,18700), linetype = "dashed")

ggplot(att_total, aes(att_t)) + geom_density() + labs(x="attendance per game") + theme(axis.text.x = element_blank()) + facet_grid(.~affiliation)

ggplot(att_total, aes(att_total$W, att_total$att_t)) + geom_point(size=0.5) + stat_smooth(method="lm", col="black") + facet_wrap(~affiliation) +labs(x="Games won", y="Attendance per Game") 

#Beyond visually confirming the difference in the number of attendance per game for each of the six groups, we can use statistical t test to identify the difference
#Visually the big difference is NL W and ALC

#Using the T test to compare thier attendance mean

t.test(att_t[affiliation == "NL W"], att_t[affiliation == "AL C"])
## 
##  Welch Two Sample t-test
## 
## data:  att_t[affiliation == "NL W"] and att_t[affiliation == "AL C"]
## t = 8.1872, df = 242.91, p-value = 0.00000000000001518
## alternative hypothesis: true difference in means is not equal to 0
## 95 percent confidence interval:
##  3237.514 5288.917
## sample estimates:
## mean of x mean of y 
##  16685.90  12422.68
t.test(att_t[affiliation == "AL E"], att_t[affiliation == "NL C"])
## 
##  Welch Two Sample t-test
## 
## data:  att_t[affiliation == "AL E"] and att_t[affiliation == "NL C"]
## t = 0.10185, df = 250.45, p-value = 0.919
## alternative hypothesis: true difference in means is not equal to 0
## 95 percent confidence interval:
##  -1025.570  1137.428
## sample estimates:
## mean of x mean of y 
##  15133.86  15077.93
box <- subset(att_total, att_total$affiliation == c("NL W", "AL E", "NL C", "AL C"))
boxplot(box$att_t ~ box$affiliation)

First of all, p value is very low therefore the t test is significant. ALso there is a difference mean betwwen NL W and AL C. Specifally NL team has lower bound of 3,238 and upper bound of 5,289.

Using the box plot, we can find the attendance difference. As we can see above NL W which has LAD, AFG a more attendance than other league.

GLM

#team Earned Run Average(ERA)
#2015, 2016 Season ERA to win division
#possibility to win in division (calculate going to post season)

a1516 <- subset(Teams, Teams$yearID > 2014)
a1516$d_win <-ifelse(a1516$DivWin == "Y",1,0)
a1516$avg <- a1516$H/a1516$AB

#ERA, avg affects division winning?

div <- (glm(d_win~ERA+avg+HR+ X2B + X3B + SO + SB + SF, data = a1516, family = binomial(link = "logit")))
div2 <- (glm(d_win~ERA, data = a1516, family = binomial(link = "logit")))

summary(div)
## 
## Call:
## glm(formula = d_win ~ ERA + avg + HR + X2B + X3B + SO + SB + 
##     SF, family = binomial(link = "logit"), data = a1516)
## 
## Deviance Residuals: 
##      Min        1Q    Median        3Q       Max  
## -1.83141  -0.37396  -0.16834  -0.03113   2.57664  
## 
## Coefficients:
##              Estimate Std. Error z value Pr(>|z|)   
## (Intercept) 13.226031  26.700574   0.495  0.62036   
## ERA         -4.632629   1.641943  -2.821  0.00478 **
## avg         19.550157  95.365847   0.205  0.83757   
## HR           0.050941   0.025723   1.980  0.04766 * 
## X2B          0.034376   0.030212   1.138  0.25519   
## X3B         -0.024835   0.068167  -0.364  0.71562   
## SO          -0.015610   0.007939  -1.966  0.04928 * 
## SB           0.028869   0.023606   1.223  0.22135   
## SF          -0.057906   0.075757  -0.764  0.44465   
## ---
## Signif. codes:  0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
## 
## (Dispersion parameter for binomial family taken to be 1)
## 
##     Null deviance: 60.048  on 59  degrees of freedom
## Residual deviance: 30.659  on 51  degrees of freedom
## AIC: 48.659
## 
## Number of Fisher Scoring iterations: 7
summary(div2)
## 
## Call:
## glm(formula = d_win ~ ERA, family = binomial(link = "logit"), 
##     data = a1516)
## 
## Deviance Residuals: 
##     Min       1Q   Median       3Q      Max  
## -1.4936  -0.5697  -0.4449  -0.1551   2.3607  
## 
## Coefficients:
##             Estimate Std. Error z value Pr(>|z|)   
## (Intercept)   10.241      3.867   2.648  0.00809 **
## ERA           -2.966      1.008  -2.944  0.00324 **
## ---
## Signif. codes:  0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
## 
## (Dispersion parameter for binomial family taken to be 1)
## 
##     Null deviance: 60.048  on 59  degrees of freedom
## Residual deviance: 47.823  on 58  degrees of freedom
## AIC: 51.823
## 
## Number of Fisher Scoring iterations: 5

I can find that ERA is important factor to win the division. If the ERA is reduced by one unit, the district winner increases the odds ratio of the y variable. If you are a baseball fan, you know that the lower the ERA, the more likely it will be for the team to win.

Logistic model

ERA_logit <- data.frame(success = a1516$d_win, ERA = a1516$ERA, fit=predict(div2, a1516))
ERA_logit$prob <- exp(ERA_logit$fit) / (1+exp(ERA_logit$fit))
c3 <- data.frame(ERA = seq(min(a1516$ERA), max(a1516$ERA)))
c3$d_win <- predict(div2, newdata = c3, type = "response")
ggplot(ERA_logit, aes(x=ERA_logit$ERA, y=ERA_logit$success)) + geom_point() + geom_line(aes(x=ERA_logit$ERA, y=ERA_logit$prob))

After team ERA passes 4.0, the probability of win the division will decrease less thn 15% dramatically. However if team ERA is less than 3.5, the probability win the division increases to 50% above. Therefore, ERA is the important factor to determin the win the division.

#Add AVG(batting average), X3R (triple against at bats), RR(Run scored against run lost)
#nnet to predict possitilities winning the district

Teams$AVG <- Teams$H/Teams$AB
Teams$X3R <- Teams$X3B/Teams$AB
Teams$RR <- Teams$R / Teams$RA

team9016 <- subset(Teams, yearID > 1900 & yearID < 2016)
team9016 <- team9016[ ,c("yearID", "DivWin", "ERA", "AVG", "X3R","RR")]
colnames(team9016) <- c("yearID", "DivWin", "ERA", "AVG", "X3R", "RR")
team9016 <- na.omit(team9016)


train <- subset(team9016, !(yearID<2016 & yearID>1995))
test <- subset(team9016, (yearID<2016 & yearID>1995))

LDA

#Create dataframe to store model results
model.accuracytest<- setNames(data.frame(matrix(ncol = 2, nrow = 0)), 
                              c("modelx", "accuracy_test"))

ldafit <- lda(DivWin ~ ERA+AVG+X3R+RR, data = test)
ldapred <- predict(ldafit, test)
test$DivWin <- as.factor(test$DivWin)
lda_cm <- confusionMatrix(ldapred$class, test$DivWin)
lda_cm
## Confusion Matrix and Statistics
## 
##           Reference
## Prediction   N   Y
##          N 444  63
##          Y  32  57
##                                           
##                Accuracy : 0.8406          
##                  95% CI : (0.8087, 0.8691)
##     No Information Rate : 0.7987          
##     P-Value [Acc > NIR] : 0.005196        
##                                           
##                   Kappa : 0.4514          
##  Mcnemar's Test P-Value : 0.002084        
##                                           
##             Sensitivity : 0.9328          
##             Specificity : 0.4750          
##          Pos Pred Value : 0.8757          
##          Neg Pred Value : 0.6404          
##              Prevalence : 0.7987          
##          Detection Rate : 0.7450          
##    Detection Prevalence : 0.8507          
##       Balanced Accuracy : 0.7039          
##                                           
##        'Positive' Class : N               
## 
model.accuracytest['lda',] <- c('lda', lda_cm$overall[1])
model.accuracytest
##     modelx     accuracy_test
## lda    lda 0.840604026845638

QDA

#qda
qdafit <- qda(DivWin ~ ERA+AVG+X3R+RR, data = test)
qdapred <- predict(qdafit)
qda_cm <- confusionMatrix(qdapred$class, test$DivWin)
qda_cm
## Confusion Matrix and Statistics
## 
##           Reference
## Prediction   N   Y
##          N 441  58
##          Y  35  62
##                                           
##                Accuracy : 0.844           
##                  95% CI : (0.8123, 0.8722)
##     No Information Rate : 0.7987          
##     P-Value [Acc > NIR] : 0.002722        
##                                           
##                   Kappa : 0.4774          
##  Mcnemar's Test P-Value : 0.022531        
##                                           
##             Sensitivity : 0.9265          
##             Specificity : 0.5167          
##          Pos Pred Value : 0.8838          
##          Neg Pred Value : 0.6392          
##              Prevalence : 0.7987          
##          Detection Rate : 0.7399          
##    Detection Prevalence : 0.8372          
##       Balanced Accuracy : 0.7216          
##                                           
##        'Positive' Class : N               
## 
model.accuracytest['qda',] <- c('qda', qda_cm$overall[1])
model.accuracytest
##     modelx     accuracy_test
## lda    lda 0.840604026845638
## qda    qda 0.843959731543624

Neural Nets

#transfer charater to 0,1
train$ID_a = class.ind(train$DivWin)
test$ID_b = class.ind(test$DivWin)

fitnn = nnet(ID_a~ERA+AVG+X3R+RR, train, size=3, softmax = TRUE)
## # weights:  23
## initial  value 388.864180 
## iter  10 value 215.783534
## iter  20 value 162.093209
## iter  30 value 158.240614
## iter  40 value 157.465767
## iter  50 value 157.024377
## iter  60 value 156.612662
## iter  70 value 156.006475
## iter  80 value 153.571631
## iter  90 value 152.466899
## iter 100 value 152.146641
## final  value 152.146641 
## stopped after 100 iterations
fitnn
## a 4-3-2 network with 23 weights
## inputs: ERA AVG X3R RR 
## output(s): ID_a 
## options were - softmax modelling
summary(fitnn)
## a 4-3-2 network with 23 weights
## options were - softmax modelling 
##  b->h1 i1->h1 i2->h1 i3->h1 i4->h1 
## -40.09  -1.33 -10.27   9.41  44.50 
##  b->h2 i1->h2 i2->h2 i3->h2 i4->h2 
## -56.16  -5.38 104.26 288.32  31.93 
##  b->h3 i1->h3 i2->h3 i3->h3 i4->h3 
##  11.81   4.62  -1.03  -0.32  -5.96 
##  b->o1 h1->o1 h2->o1 h3->o1 
##  -6.90  -1.90 -51.67   9.11 
##  b->o2 h1->o2 h2->o2 h3->o2 
##   7.15   2.47  52.13  -9.42
#AIC is 150.64


team9016$win <- team9016$DivWin == "Y"
team9016$not_win <- team9016$DivWin == "N"

set.seed(123)
nnet1 <- neuralnet(win+not_win ~ERA+AVG+X3R+RR, data = team9016, linear.output = F, hidden = 3, stepmax = 1e6 )

# display weights
nnet1$weights
## [[1]]
## [[1]][[1]]
##              [,1]        [,2]         [,3]
## [1,]   -90.408653   1.0268732   1.04870700
## [2,]    -4.542408  -0.1981622  -0.09018121
## [3,]   105.600002  -3.2932937   0.71984142
## [4,] -1859.359316 -42.3580707 -23.40048397
## [5,]    86.770491  -3.3375831  -2.89890309
## 
## [[1]][[2]]
##            [,1]      [,2]
## [1,]   2.890745 -2.912388
## [2,]   1.575138 -1.569493
## [3,] -49.748674 45.441594
## [4,] -57.511477 58.307532
# display predictions
prediction(nnet1)
## $rep1
##       ERA       AVG         X3R        RR            win    not_win
## 1    4.24 0.2245660 0.007840209 0.6273458 0.000002053158 0.99999786
## 2    5.30 0.2400293 0.007135016 0.6368534 0.000009213291 0.99999070
## 3    4.58 0.2288089 0.004986150 0.6518072 0.000003128049 0.99999675
## 4    4.92 0.2478727 0.006844247 0.6655093 0.000013943162 0.99998593
## 5    4.75 0.2385939 0.005983545 0.6662791 0.000009002358 0.99999081
## 6    5.00 0.2419235 0.005012997 0.6778291 0.000013537909 0.99998625
## 7    4.98 0.2527056 0.006854257 0.6840934 0.000027605250 0.99997231
## 8    4.54 0.2379619 0.003732736 0.6970849 0.000010013360 0.99998969
## 9    3.82 0.2263561 0.006532235 0.7060086 0.000013100260 0.99998637
## 10   6.38 0.2555154 0.003797468 0.7098821 0.000123012427 0.99987837
## 11   4.82 0.2511525 0.006269592 0.7111369 0.000046340652 0.99995339
## 12   4.16 0.2437237 0.004764523 0.7116883 0.000014068471 0.99998555
## 13   4.79 0.2395089 0.002932014 0.7193396 0.000022705446 0.99997678
## 14   5.20 0.2484707 0.006477150 0.7226436 0.000105939353 0.99989404
## 15   3.99 0.2348931 0.005895357 0.7234332 0.000023904983 0.99997537
## 16   4.27 0.2493477 0.007267984 0.7251908 0.000051299076 0.99994812
## 17   4.28 0.2383391 0.003658314 0.7269076 0.000019570234 0.99997984
## 18   4.83 0.2560440 0.006043956 0.7298246 0.000077960805 0.99992187
## 19   5.29 0.2530335 0.006245539 0.7331155 0.000147972292 0.99985249
## 20   4.64 0.2545554 0.005284257 0.7334123 0.000057625477 0.99994194
## 21   3.78 0.2265490 0.007289469 0.7338346 0.000039408919 0.99995931
## 22   3.53 0.2171406 0.003380394 0.7340764 0.000008877606 0.99999056
## 23   4.56 0.2359904 0.005178472 0.7342569 0.000053620453 0.99994534
## 24   3.93 0.2355334 0.002958033 0.7349570 0.000013478857 0.99998596
## 25   4.33 0.2398967 0.006089684 0.7357775 0.000055671992 0.99994332
## 26   4.57 0.2523071 0.007567368 0.7360097 0.000107007133 0.99989263
## 27   4.67 0.2476670 0.006905562 0.7362110 0.000101279099 0.99989811
## 28   4.09 0.2424632 0.005147059 0.7489879 0.000049217019 0.99994969
## 29   5.49 0.2625840 0.006178448 0.7497326 0.000279514911 0.99972323
## 30   4.41 0.2511070 0.003321033 0.7539474 0.000049276345 0.99994989
## 31   4.12 0.2398801 0.005809595 0.7540541 0.000071802829 0.99992674
## 32   4.53 0.2420839 0.004418262 0.7561275 0.000082248445 0.99991642
## 33   4.85 0.2537044 0.003614022 0.7575419 0.000096610208 0.99990269
## 34   4.94 0.2581463 0.003801593 0.7576099 0.000111244812 0.99988840
## 35   4.23 0.2541133 0.004753199 0.7588933 0.000069543295 0.99992953
## 36   4.54 0.2500921 0.007182320 0.7612903 0.000196948224 0.99980223
## 37   4.73 0.2528163 0.005355494 0.7637028 0.000160816019 0.99983846
## 38   5.05 0.2469433 0.004631345 0.7657343 0.000198634394 0.99980044
## 39   4.01 0.2363226 0.008178229 0.7660455 0.000171589059 0.99982574
## 40   4.38 0.2475861 0.006376389 0.7692308 0.000171820586 0.99982664
## 41   4.58 0.2473118 0.004556224 0.7699005 0.000135132150 0.99986339
## 42   4.54 0.2528110 0.003627131 0.7700730 0.000101304698 0.99989756
## 43   3.62 0.2450667 0.005825973 0.7710145 0.000069356925 0.99992897
## 44   4.51 0.2575920 0.005001786 0.7726131 0.000150479006 0.99984870
## 45   4.69 0.2485079 0.006691988 0.7737948 0.000288783511 0.99971024
## 46   5.49 0.2474820 0.005960946 0.7748815 0.000538262981 0.99946375
## 47   5.28 0.2632893 0.005351409 0.7753396 0.000376981260 0.99962567
## 48   4.66 0.2505919 0.004735021 0.7769697 0.000187926278 0.99981073
## 49   3.98 0.2404788 0.009793254 0.7777778 0.000344749067 0.99965220
## 50   4.55 0.2419123 0.002695902 0.7791878 0.000106316038 0.99989170
## 51   5.65 0.2710682 0.006620147 0.7796087 0.000778039938 0.99923366
## 52   4.59 0.2592726 0.003781059 0.7808917 0.000151734440 0.99984736
## 53   5.09 0.2448720 0.003811944 0.7832957 0.000277550169 0.99972081
## 54   5.41 0.2731557 0.010694218 0.7838676 0.001663870755 0.99836623
## 55   4.56 0.2534975 0.007647827 0.7859873 0.000442837298 0.99955707
## 56   4.25 0.2548807 0.008315257 0.7865772 0.000391084957 0.99960821
## 57   4.28 0.2486685 0.004775023 0.7877170 0.000175339765 0.99982246
## 58   3.91 0.2439159 0.005162242 0.7889039 0.000136776112 0.99986055
## 59   4.78 0.2506787 0.004343891 0.7893491 0.000272951646 0.99972552
## 60   5.76 0.2793207 0.006793207 0.7907762 0.001151717167 0.99886959
## 61   3.66 0.2362805 0.006859756 0.7921260 0.000180581073 0.99981541
## 62   3.71 0.2306845 0.005689117 0.7941176 0.000149488981 0.99984634
## 63   5.16 0.2630725 0.007212405 0.7942158 0.000845972734 0.99916153
## 64   4.87 0.2553739 0.006430277 0.7952381 0.000568361550 0.99943274
## 65   5.15 0.2585771 0.005236547 0.7955801 0.000561780868 0.99944048
## 66   3.22 0.2329482 0.005777115 0.7967213 0.000096991324 0.99989960
## 67   3.94 0.2370923 0.004473439 0.7991632 0.000160333886 0.99983588
## 68   4.35 0.2343865 0.004959589 0.7997497 0.000282037038 0.99971316
## 69   4.97 0.2737330 0.005531763 0.8000000 0.000555725253 0.99944918
## 70   4.08 0.2440067 0.007247723 0.8000000 0.000375893479 0.99962021
## 71   5.39 0.2735951 0.007204611 0.8012821 0.001202296086 0.99881487
## 72   4.72 0.2573954 0.007936508 0.8021978 0.000830637188 0.99917263
## 73   3.97 0.2465048 0.005150846 0.8022599 0.000213275964 0.99978334
## 74   4.13 0.2476712 0.005662100 0.8024862 0.000286215717 0.99971045
## 75   4.96 0.2548411 0.006028498 0.8049065 0.000724661919 0.99927692
## 76   4.21 0.2411089 0.003465256 0.8050734 0.000193988938 0.99980224
## 77   3.55 0.2484970 0.010020040 0.8055556 0.000503784660 0.99949198
## 78   4.16 0.2469468 0.006285920 0.8072917 0.000391789533 0.99960441
## 79   5.49 0.2596171 0.004115226 0.8076110 0.000808765940 0.99919613
## 80   4.51 0.2396969 0.006653114 0.8076416 0.000607148739 0.99938816
## 81   4.04 0.2396417 0.006905562 0.8077437 0.000413228859 0.99958132
## 82   5.31 0.2762926 0.004220893 0.8080594 0.000690092510 0.99931743
## 83   3.48 0.2438755 0.004052312 0.8081991 0.000113945872 0.99988285
## 84   4.19 0.2459283 0.005066956 0.8092190 0.000317113598 0.99967889
## 85   3.54 0.2296840 0.005455229 0.8098160 0.000185826043 0.99980859
## 86   4.28 0.2506279 0.007534984 0.8099274 0.000627621282 0.99936999
## 87   4.90 0.2626389 0.007888132 0.8110329 0.001187978051 0.99882089
## 88   3.71 0.2327555 0.006319971 0.8110465 0.000284946890 0.99970859
## 89   5.02 0.2638763 0.005459509 0.8118343 0.000791232163 0.99921310
## 90   5.27 0.2573878 0.006384531 0.8119565 0.001223114794 0.99878496
## 91   4.62 0.2525839 0.005258386 0.8122699 0.000540320341 0.99945777
## 92   5.00 0.2577826 0.006917896 0.8123570 0.001096322524 0.99890937
## 93   3.92 0.2586334 0.006980162 0.8130194 0.000421508337 0.99957615
## 94   4.84 0.2556757 0.004864865 0.8130841 0.000615536914 0.99938413
## 95   4.14 0.2394966 0.005737553 0.8133515 0.000400735969 0.99959352
## 96   3.70 0.2304348 0.005671078 0.8136364 0.000258395589 0.99973496
## 97   4.17 0.2380865 0.010630499 0.8136986 0.001267663517 0.99872708
## 98   4.32 0.2483504 0.005865103 0.8144192 0.000499733760 0.99949649
## 99   4.83 0.2588576 0.009219089 0.8147268 0.001631943375 0.99837996
## 100  4.87 0.2607123 0.005785572 0.8158845 0.000829283286 0.99917356
## 101  4.21 0.2418906 0.004448563 0.8162450 0.000339180586 0.99965565
## 102  4.23 0.2513228 0.003439153 0.8176583 0.000277917557 0.99971850
## 103  5.02 0.2564888 0.004325883 0.8178694 0.000725898735 0.99927471
## 104  5.13 0.2664860 0.006323397 0.8181818 0.001225501347 0.99878507
## 105  4.19 0.2509424 0.009513552 0.8188976 0.001129213243 0.99886997
## 106  4.64 0.2464032 0.008377345 0.8189655 0.001318147901 0.99868186
## 107  4.39 0.2488003 0.005906238 0.8224777 0.000665310856 0.99933069
## 108  4.49 0.2555010 0.007092199 0.8227848 0.000947783271 0.99905224
## 109  4.58 0.2555436 0.005185980 0.8232446 0.000679107129 0.99931924
## 110  4.89 0.2567592 0.002327663 0.8232558 0.000467843424 0.99953026
## 111  3.90 0.2329828 0.004058292 0.8237037 0.000280203152 0.99971286
## 112  4.49 0.2564103 0.005128205 0.8240621 0.000629349350 0.99936868
## 113  3.66 0.2509582 0.006205512 0.8245083 0.000375550294 0.99961940
## 114  3.61 0.2330258 0.005719557 0.8249258 0.000326772392 0.99966528
## 115  4.68 0.2530210 0.005205429 0.8251232 0.000787198882 0.99921100
## 116  4.35 0.2610261 0.007740774 0.8255659 0.001025516141 0.99897647
## 117  4.33 0.2486299 0.004932408 0.8257477 0.000547644652 0.99944760
## 118  4.77 0.2464620 0.003675795 0.8259833 0.000623658364 0.99937198
## 119  4.88 0.2599285 0.005361398 0.8262548 0.000990899840 0.99901180
## 120  5.01 0.2598880 0.010836193 0.8264840 0.003400529782 0.99663501
## 121  5.19 0.2500906 0.005074302 0.8269019 0.001260624125 0.99874120
## 122  3.98 0.2489664 0.003415423 0.8271093 0.000280743382 0.99971452
## 123  5.21 0.2556459 0.007588076 0.8271605 0.002152976472 0.99786214
## 124  4.73 0.2654616 0.003544214 0.8273736 0.000587411209 0.99941280
## 125  4.31 0.2371357 0.003058654 0.8275862 0.000366270375 0.99962687
## 126  4.59 0.2517999 0.006276537 0.8281250 0.000995650432 0.99900267
## 127  5.53 0.2681924 0.006436617 0.8283898 0.002218802273 0.99780794
## 128  5.01 0.2577116 0.003586801 0.8284091 0.000796678931 0.99920351
## 129  3.45 0.2349727 0.004293521 0.8285714 0.000214680685 0.99977888
## 130  4.67 0.2483553 0.004385965 0.8287093 0.000715959556 0.99927992
## 131  4.26 0.2483790 0.008465418 0.8288410 0.001227916335 0.99876898
## 132  4.37 0.2423963 0.005714286 0.8296489 0.000759233354 0.99923394
## 133  4.43 0.2493752 0.008746876 0.8297362 0.001539175663 0.99846077
## 134  4.58 0.2500000 0.004723837 0.8306943 0.000747377046 0.99924865
## 135  5.08 0.2583511 0.003731343 0.8314480 0.000944645315 0.99905677
## 136  4.12 0.2435309 0.006423197 0.8335535 0.000782716008 0.99920964
## 137  4.41 0.2496408 0.004849138 0.8337696 0.000712712087 0.99928229
## 138  4.71 0.2642385 0.004820568 0.8349398 0.000934764130 0.99906717
## 139  4.54 0.2603017 0.005886681 0.8350649 0.001026161532 0.99897451
## 140  4.32 0.2582926 0.008337865 0.8352941 0.001448884885 0.99855361
## 141  4.93 0.2637712 0.005120056 0.8366165 0.001260499701 0.99874502
## 142  4.24 0.2476746 0.005836221 0.8375165 0.000841883235 0.99915172
## 143  4.09 0.2472487 0.004585473 0.8381344 0.000559715908 0.99943340
## 144  4.93 0.2654758 0.006720906 0.8392019 0.001867883210 0.99814541
## 145  3.93 0.2511325 0.004348614 0.8392857 0.000466579008 0.99952730
## 146  4.16 0.2440888 0.005456530 0.8393574 0.000756624662 0.99923523
## 147  3.72 0.2563215 0.009315944 0.8395604 0.001185868495 0.99881051
## 148  3.84 0.2502236 0.007333214 0.8399433 0.000868828370 0.99912426
## 149  4.86 0.2526655 0.006638503 0.8402062 0.001816362987 0.99818724
## 150  4.09 0.2440684 0.007173073 0.8411602 0.001090372350 0.99890097
## 151  4.69 0.2649757 0.008454758 0.8417351 0.002317671779 0.99769906
## 152  3.88 0.2470054 0.004718693 0.8423237 0.000528888506 0.99946341
## 153  3.59 0.2337083 0.004266874 0.8423645 0.000362583807 0.99962778
## 154  4.77 0.2603380 0.005393743 0.8425481 0.001354370711 0.99864858
## 155  4.38 0.2619520 0.006675086 0.8432148 0.001290810671 0.99871075
## 156  3.89 0.2511123 0.006228866 0.8434066 0.000773949240 0.99921901
## 157  4.54 0.2360499 0.004955947 0.8437118 0.001075992050 0.99891274
## 158  4.00 0.2443677 0.007812500 0.8437936 0.001235394081 0.99875541
## 159  4.39 0.2653097 0.004601770 0.8451053 0.000864205623 0.99913511
## 160  5.25 0.2556930 0.004124081 0.8451327 0.001669888947 0.99833489
## 161  4.90 0.2540141 0.003788562 0.8451537 0.001157004994 0.99884125
## 162  5.06 0.2740781 0.005191550 0.8455641 0.001736084912 0.99827975
## 163  3.68 0.2413666 0.004775900 0.8455882 0.000485704582 0.99950479
## 164  5.15 0.2684229 0.003381986 0.8458904 0.001307026428 0.99870022
## 165  4.91 0.2650624 0.005608829 0.8466899 0.001745933901 0.99826348
## 166  5.18 0.2614486 0.006203248 0.8469388 0.002484387846 0.99753288
## 167  4.81 0.2582528 0.008389568 0.8479810 0.002938138574 0.99707878
## 168  4.76 0.2698357 0.003495281 0.8481166 0.001006803086 0.99899602
## 169  4.33 0.2480704 0.004846527 0.8488064 0.000974034603 0.99901834
## 170  4.87 0.2663477 0.006556796 0.8496503 0.002191984434 0.99782271
## 171  4.55 0.2634795 0.004313444 0.8498123 0.001055165948 0.99894454
## 172  5.14 0.2699911 0.008726625 0.8500000 0.004124626226 0.99592380
## 173  4.50 0.2372944 0.006468305 0.8513011 0.001727891205 0.99825981
## 174  4.48 0.2465830 0.004493541 0.8514589 0.001102689720 0.99888922
## 175  5.22 0.2736238 0.009324009 0.8516854 0.005027218743 0.99504103
## 176  3.72 0.2400220 0.005858660 0.8517442 0.000764162462 0.99922333
## 177  3.06 0.2418199 0.005033882 0.8517824 0.000332384994 0.99965846
## 178  4.42 0.2527352 0.004923414 0.8520942 0.001156768024 0.99883783
## 179  4.34 0.2497211 0.006322053 0.8522727 0.001470198874 0.99852367
## 180  5.67 0.2658898 0.006355932 0.8524252 0.004184429186 0.99586360
## 181  4.81 0.2495889 0.006212315 0.8528708 0.002160873539 0.99783913
## 182  4.77 0.2514970 0.007258211 0.8530120 0.002582361935 0.99742228
## 183  4.12 0.2459231 0.005435801 0.8532236 0.001030522897 0.99895948
## 184  5.35 0.2773619 0.003565062 0.8542825 0.001926596543 0.99809332
## 185  4.70 0.2594693 0.005468908 0.8546196 0.001735886632 0.99826675
## 186  3.97 0.2503715 0.004457652 0.8553547 0.000758631907 0.99923260
## 187  5.03 0.2615105 0.004003640 0.8555046 0.001717001010 0.99828813
## 188  4.09 0.2473321 0.005021971 0.8556231 0.000970552645 0.99901957
## 189  4.04 0.2430696 0.006609143 0.8557823 0.001326405158 0.99866127
## 190  4.93 0.2627043 0.005566529 0.8557920 0.002187480219 0.99782278
## 191  4.04 0.2444198 0.006456373 0.8567416 0.001311626574 0.99867658
## 192  4.64 0.2702944 0.008742194 0.8580324 0.003371693712 0.99665752
## 193  3.87 0.2559502 0.007506408 0.8581662 0.001444359584 0.99854952
## 194  5.13 0.2665103 0.006134969 0.8584687 0.003029752898 0.99699455
## 195  4.74 0.2677645 0.008233399 0.8585132 0.003354380029 0.99667248
## 196  3.70 0.2344865 0.004027091 0.8588235 0.000593546163 0.99939262
## 197  3.87 0.2574894 0.006984010 0.8589563 0.001313974669 0.99868007
## 198  4.30 0.2482231 0.003644979 0.8590426 0.000939808946 0.99905077
## 199  4.58 0.2563406 0.005615942 0.8595147 0.001827789292 0.99817151
## 200  4.65 0.2531830 0.007457257 0.8598802 0.002845435565 0.99715945
## 201  4.27 0.2582418 0.003220917 0.8602740 0.000847074496 0.99914703
## 202  4.39 0.2634698 0.007902299 0.8603491 0.002508028732 0.99750131
## 203  4.79 0.2555515 0.004004368 0.8607748 0.001607918980 0.99839010
## 204  4.49 0.2594150 0.007495430 0.8609432 0.002557988578 0.99744870
## 205  3.65 0.2471596 0.006891414 0.8610272 0.001133582302 0.99885372
## 206  3.41 0.2483234 0.006333830 0.8616352 0.000817745716 0.99917013
## 207  4.59 0.2535689 0.005174875 0.8618090 0.001783915781 0.99821261
## 208  4.66 0.2612034 0.007320121 0.8625304 0.002922705087 0.99708985
## 209  4.46 0.2464032 0.004917137 0.8628719 0.001569877681 0.99842017
## 210  3.84 0.2434750 0.003728561 0.8628912 0.000697238156 0.99929058
## 211  4.36 0.2462687 0.006552603 0.8642132 0.002094912905 0.99789562
## 212  4.21 0.2469725 0.004954128 0.8644537 0.001324558208 0.99866400
## 213  4.77 0.2622246 0.003940534 0.8647059 0.001695138735 0.99830657
## 214  4.33 0.2564426 0.006487655 0.8650794 0.002026066662 0.99797159
## 215  4.14 0.2411243 0.006471893 0.8657718 0.001791066388 0.99819323
## 216  5.20 0.2735849 0.007438316 0.8663697 0.004789625636 0.99526576
## 217  4.05 0.2515057 0.007118087 0.8666667 0.001912459848 0.99807886
## 218  3.78 0.2436853 0.007450482 0.8666667 0.001650052277 0.99833390
## 219  4.52 0.2630443 0.003058654 0.8670013 0.001199123755 0.99879800
## 220  4.58 0.2687590 0.009379509 0.8670732 0.004440106421 0.99559658
## 221  4.21 0.2638606 0.007602448 0.8673051 0.002395637410 0.99760930
## 222  3.53 0.2457814 0.006786500 0.8673469 0.001167277541 0.99881751
## 223  4.42 0.2559105 0.003428984 0.8674389 0.001216387900 0.99877686
## 224  3.91 0.2387436 0.005628177 0.8684583 0.001313809474 0.99866798
## 225  3.89 0.2462340 0.007625070 0.8686030 0.001959851776 0.99802609
## 226  5.37 0.2717607 0.003964678 0.8696605 0.003047417007 0.99697952
## 227  4.11 0.2417845 0.003488159 0.8699862 0.001015702580 0.99896934
## 228  3.64 0.2457376 0.007042254 0.8702290 0.001458111280 0.99852575
## 229  3.85 0.2568456 0.007484483 0.8703704 0.001890419744 0.99810248
## 230  4.86 0.2568574 0.003996367 0.8705463 0.002138889657 0.99786120
## 231  5.52 0.2834632 0.006196884 0.8706366 0.005170563836 0.99490634
## 232  4.49 0.2562284 0.004909984 0.8706786 0.001908197476 0.99808778
## 233  4.63 0.2597004 0.003068038 0.8706897 0.001453163601 0.99854323
## 234  4.29 0.2525708 0.005773047 0.8707124 0.001943056920 0.99804927
## 235  5.17 0.2715326 0.005327651 0.8709677 0.003496773700 0.99653406
## 236  3.72 0.2439290 0.005617978 0.8710145 0.001173888917 0.99880989
## 237  4.33 0.2612596 0.004760161 0.8711409 0.001622478946 0.99837432
## 238  3.36 0.2349118 0.004271123 0.8716323 0.000637025525 0.99934631
## 239  4.72 0.2530273 0.003795409 0.8723898 0.001920798027 0.99807367
## 240  5.04 0.2654343 0.008793970 0.8732227 0.006395817929 0.99366344
## 241  4.06 0.2623843 0.006532390 0.8732970 0.001964791469 0.99803283
## 242  3.61 0.2418665 0.005949061 0.8734756 0.001219687594 0.99876184
## 243  4.91 0.2599892 0.006147170 0.8736330 0.003630147484 0.99638545
## 244  4.00 0.2377228 0.003273918 0.8737300 0.000967626972 0.99901482
## 245  4.65 0.2736480 0.002351239 0.8745476 0.001362883796 0.99863968
## 246  4.27 0.2600806 0.008797654 0.8754915 0.003838323815 0.99617244
## 247  4.52 0.2649274 0.005879111 0.8757842 0.002640102135 0.99736695
## 248  4.82 0.2576845 0.003681207 0.8763505 0.002219174717 0.99777997
## 249  4.18 0.2561785 0.005164146 0.8768021 0.001794469576 0.99819747
## 250  3.38 0.2291225 0.004435873 0.8768473 0.000775829838 0.99920275
## 251  3.78 0.2517329 0.006931777 0.8769018 0.001868898826 0.99811783
## 252  4.77 0.2670751 0.009010633 0.8771712 0.005925923994 0.99412340
## 253  3.98 0.2388899 0.006562910 0.8774648 0.002109466489 0.99786813
## 254  4.23 0.2439024 0.003752345 0.8780822 0.001457420515 0.99852606
## 255  4.68 0.2553885 0.006882811 0.8789731 0.003968395985 0.99603958
## 256  4.23 0.2518626 0.005088134 0.8795337 0.001977078542 0.99801171
## 257  3.87 0.2451684 0.008650837 0.8797101 0.003069097325 0.99691349
## 258  4.55 0.2472095 0.004391583 0.8800000 0.002272950466 0.99771466
## 259  4.31 0.2576632 0.006130545 0.8811370 0.002682573224 0.99731434
## 260  4.50 0.2693294 0.004213998 0.8813131 0.002095429450 0.99790838
## 261  6.02 0.2876137 0.006822953 0.8813230 0.010025958096 0.99015003
## 262  4.64 0.2638419 0.006409115 0.8824257 0.003737479132 0.99627695
## 263  4.13 0.2500923 0.005721669 0.8827493 0.002240251336 0.99774603
## 264  3.98 0.2445169 0.004712706 0.8828571 0.001616999510 0.99836406
## 265  4.91 0.2487553 0.004794394 0.8829152 0.003500173670 0.99649615
## 266  4.77 0.2614355 0.007051166 0.8831943 0.004751939667 0.99527041
## 267  3.64 0.2455654 0.007760532 0.8839695 0.002347712653 0.99763089
## 268  4.42 0.2592858 0.006818590 0.8842653 0.003581213181 0.99642327
## 269  4.48 0.2687233 0.004992867 0.8847631 0.002613950301 0.99739264
## 270  3.77 0.2438078 0.005545287 0.8853695 0.001712877550 0.99826562
## 271  3.42 0.2351865 0.004023409 0.8854489 0.000909950206 0.99906790
## 272  3.69 0.2416452 0.009731913 0.8854626 0.003757944672 0.99621690
## 273  3.86 0.2449018 0.005612722 0.8857143 0.001888131843 0.99809116
## 274  4.07 0.2547425 0.004878049 0.8861454 0.001927775274 0.99806008
## 275  4.45 0.2699432 0.008985879 0.8863937 0.005641163416 0.99439876
## 276  4.94 0.2672827 0.006753154 0.8866442 0.005428010978 0.99461092
## 277  4.01 0.2441002 0.003602954 0.8867133 0.001438282954 0.99854216
## 278  3.10 0.2313012 0.003275705 0.8871528 0.000593161803 0.99938715
## 279  3.61 0.2437683 0.006598240 0.8871716 0.001946810291 0.99802951
## 280  3.98 0.2495933 0.004879812 0.8873039 0.001850709975 0.99813235
## 281  4.34 0.2595066 0.004451864 0.8883117 0.002306551780 0.99768743
## 282  4.28 0.2514599 0.006204380 0.8887381 0.003186209145 0.99680319
## 283  4.09 0.2426242 0.006597031 0.8891929 0.003028545565 0.99694854
## 284  4.68 0.2599465 0.005352364 0.8892989 0.003686656801 0.99631934
## 285  3.95 0.2465505 0.005446623 0.8903134 0.002187121661 0.99779218
## 286  4.34 0.2632143 0.006428571 0.8914729 0.003637371195 0.99636795
## 287  4.99 0.2708148 0.002852558 0.8922559 0.003031028636 0.99698243
## 288  4.15 0.2569904 0.004966887 0.8924731 0.002422820093 0.99756664
## 289  4.94 0.2668793 0.005493532 0.8930180 0.004933682053 0.99509464
## 290  4.18 0.2565312 0.004898403 0.8930900 0.002485312843 0.99750392
## 291  4.92 0.2769313 0.006555634 0.8931034 0.005800338062 0.99425479
## 292  5.06 0.2636494 0.005028736 0.8936416 0.005044418705 0.99498111
## 293  4.16 0.2461538 0.006672845 0.8948069 0.003664493693 0.99631578
## 294  3.58 0.2459257 0.003845450 0.8953662 0.001282958630 0.99869531
## 295  3.99 0.2490083 0.005228994 0.8953975 0.002425345755 0.99755452
## 296  4.63 0.2455510 0.004493978 0.8958838 0.003554811593 0.99642841
## 297  5.02 0.2701455 0.007099752 0.8966318 0.007472268671 0.99259108
## 298  4.43 0.2649744 0.005661066 0.8968153 0.003773706258 0.99623300
## 299  4.76 0.2657330 0.005672753 0.8973105 0.004899383527 0.99512197
## 300  4.21 0.2535894 0.004666188 0.8984576 0.002759870174 0.99722540
## 301  4.55 0.2641949 0.005910801 0.8988764 0.004538767637 0.99547347
## 302  5.13 0.2673143 0.005396654 0.8998849 0.006422853505 0.99362088
## 303  4.07 0.2493131 0.003297307 0.9004208 0.001959309081 0.99801979
## 304  3.79 0.2420132 0.004818847 0.9010189 0.002172714779 0.99779771
## 305  4.63 0.2581061 0.004421518 0.9010856 0.003856695895 0.99614142
## 306  4.48 0.2637502 0.007442367 0.9015345 0.006046978116 0.99397555
## 307  5.54 0.2745204 0.006096468 0.9024919 0.010007149029 0.99010503
## 308  4.05 0.2477974 0.006791483 0.9032258 0.004131856749 0.99584583
## 309  4.11 0.2660142 0.004270463 0.9034853 0.002590372906 0.99740367
## 310  4.01 0.2597357 0.008810573 0.9035326 0.005776178002 0.99422856
## 311  4.16 0.2487374 0.003968254 0.9035813 0.002607923186 0.99736967
## 312  3.65 0.2545126 0.007761733 0.9036697 0.003652439379 0.99632885
## 313  3.46 0.2440387 0.008196721 0.9037855 0.003481849696 0.99648368
## 314  4.02 0.2599304 0.007321984 0.9041298 0.004471670198 0.99552451
## 315  3.77 0.2370930 0.005756431 0.9049708 0.002849714004 0.99711127
## 316  4.44 0.2630716 0.007105119 0.9051383 0.005963712488 0.99405346
## 317  3.91 0.2539596 0.006371746 0.9052925 0.003546043977 0.99643592
## 318  4.34 0.2623278 0.005076142 0.9053708 0.003815085495 0.99618308
## 319  4.09 0.2504967 0.004153874 0.9057105 0.002679065632 0.99729889
## 320  4.76 0.2676081 0.004236540 0.9060976 0.004511554445 0.99550338
## 321  4.30 0.2647268 0.006564940 0.9061662 0.004965318180 0.99504376
## 322  4.38 0.2528990 0.003865268 0.9072848 0.003295161754 0.99668818
## 323  3.99 0.2614687 0.004533092 0.9073306 0.002724459709 0.99726326
## 324  4.25 0.2706324 0.009646302 0.9074316 0.008446071462 0.99160676
## 325  4.48 0.2614745 0.008312252 0.9074550 0.007996630482 0.99203408
## 326  3.80 0.2384615 0.005128205 0.9085631 0.002789041902 0.99717231
## 327  4.01 0.2550815 0.006958433 0.9087193 0.004609785456 0.99537662
## 328  4.77 0.2630916 0.005038690 0.9087591 0.005621641871 0.99439440
## 329  4.47 0.2521320 0.004078606 0.9090909 0.003839544421 0.99614444
## 330  2.92 0.2343000 0.003456885 0.9094412 0.000932625082 0.99903830
## 331  3.72 0.2463504 0.006934307 0.9101796 0.003837397761 0.99613014
## 332  4.00 0.2604467 0.006304035 0.9102750 0.004146610514 0.99584458
## 333  4.49 0.2600449 0.007138487 0.9102750 0.006951520239 0.99306448
## 334  4.70 0.2603498 0.002855103 0.9106700 0.003707799541 0.99628784
## 335  4.71 0.2639757 0.006251116 0.9109756 0.006999077581 0.99302854
## 336  4.16 0.2606721 0.005086285 0.9110807 0.003780777982 0.99621012
## 337  4.09 0.2384229 0.007067138 0.9110807 0.005380945065 0.99457805
## 338  4.56 0.2687804 0.004863988 0.9112500 0.004865648994 0.99514877
## 339  4.11 0.2622774 0.005756431 0.9131016 0.004310065894 0.99568415
## 340  3.47 0.2488220 0.005980428 0.9131122 0.002774580476 0.99719333
## 341  3.58 0.2494922 0.008125577 0.9132420 0.004604563837 0.99536471
## 342  4.64 0.2453071 0.004009477 0.9132653 0.004772907294 0.99520300
## 343  3.26 0.2247322 0.006037001 0.9134948 0.002439640575 0.99750311
## 344  4.12 0.2651391 0.007092199 0.9135802 0.005598508422 0.99440867
## 345  4.29 0.2604414 0.007112894 0.9139785 0.006463633815 0.99354312
## 346  3.88 0.2543548 0.005865624 0.9143258 0.003832276240 0.99614565
## 347  4.68 0.2773309 0.007312614 0.9144981 0.008641568879 0.99143034
## 348  3.81 0.2564988 0.007634975 0.9145427 0.005086658983 0.99489761
## 349  3.73 0.2467391 0.005072464 0.9146165 0.002961871030 0.99700481
## 350  4.10 0.2688035 0.008377345 0.9147497 0.007080991714 0.99294448
## 351  4.27 0.2533821 0.003839122 0.9160207 0.003649232688 0.99633001
## 352  4.36 0.2607351 0.005822416 0.9161206 0.005625190422 0.99437564
## 353  4.09 0.2489908 0.003853211 0.9167842 0.003249212404 0.99672194
## 354  4.01 0.2469568 0.007930653 0.9169014 0.006631696008 0.99334071
## 355  4.05 0.2640998 0.008676790 0.9171271 0.007623147608 0.99239355
## 356  4.59 0.2615720 0.005264113 0.9171895 0.006140080086 0.99386839
## 357  3.66 0.2348315 0.003183521 0.9173047 0.002048820892 0.99790926
## 358  3.42 0.2466912 0.008823529 0.9177419 0.005140699694 0.99481895
## 359  4.22 0.2599192 0.004959589 0.9177898 0.004477848509 0.99551207
## 360  4.94 0.2668093 0.004102015 0.9179367 0.006457091926 0.99356881
## 361  4.64 0.2510933 0.005466472 0.9181141 0.006843983010 0.99314757
## 362  4.51 0.2715102 0.008032845 0.9186785 0.009551487288 0.99050889
## 363  3.91 0.2607290 0.005207398 0.9187675 0.003775473769 0.99620873
## 364  5.71 0.2754617 0.004045734 0.9194215 0.011026726946 0.98908693
## 365  4.51 0.2640928 0.005075222 0.9197452 0.005878896574 0.99412922
## 366  5.07 0.2730204 0.005374418 0.9198646 0.009083837275 0.99098476
## 367  3.75 0.2572197 0.005381166 0.9198856 0.003550270263 0.99642655
## 368  4.57 0.2536375 0.004850009 0.9202059 0.006053554353 0.99393622
## 369  4.42 0.2530099 0.008086253 0.9207161 0.009738747639 0.99026819
## 370  3.97 0.2609506 0.008387698 0.9207493 0.007405036775 0.99259804
## 371  3.93 0.2469478 0.006289308 0.9213162 0.005072511863 0.99489181
## 372  5.15 0.2610961 0.002695418 0.9223847 0.006460867712 0.99355328
## 373  4.13 0.2466896 0.006892799 0.9225543 0.006745438628 0.99322357
## 374  4.52 0.2541367 0.006654676 0.9226994 0.008466364928 0.99153523
## 375  4.84 0.2763721 0.007305773 0.9229829 0.011366419964 0.98873087
## 376  3.65 0.2452865 0.004759290 0.9235474 0.003198671572 0.99676048
## 377  4.23 0.2712573 0.006540698 0.9240000 0.006744774924 0.99327684
## 378  5.24 0.2714414 0.006803939 0.9248120 0.014198635233 0.98592501
## 379  3.86 0.2557078 0.006940639 0.9248227 0.005777781990 0.99420063
## 380  3.87 0.2579830 0.003427747 0.9248555 0.002974637223 0.99700111
## 381  3.86 0.2412606 0.005977178 0.9250000 0.004945501712 0.99500514
## 382  3.93 0.2654611 0.008318264 0.9254144 0.007758366051 0.99225241
## 383  4.45 0.2580995 0.004524887 0.9255051 0.005796188405 0.99419485
## 384  3.78 0.2543573 0.009077705 0.9258721 0.008191084695 0.99179188
## 385  4.32 0.2611967 0.005553565 0.9260700 0.006397663300 0.99360048
## 386  4.23 0.2504992 0.004719550 0.9263158 0.005249698639 0.99472159
## 387  4.10 0.2520901 0.002908033 0.9263456 0.003358094733 0.99661203
## 388  4.31 0.2601064 0.003900709 0.9268293 0.004770931273 0.99521582
## 389  4.48 0.2593000 0.005314275 0.9271357 0.007051767006 0.99294807
## 390  3.27 0.2264828 0.005666793 0.9272097 0.003120688249 0.99680965
## 391  4.83 0.2573770 0.002367942 0.9284065 0.005497506594 0.99449373
## 392  3.95 0.2618531 0.008143322 0.9286733 0.008196433959 0.99180573
## 393  4.44 0.2698696 0.006429720 0.9289474 0.008498260544 0.99153504
## 394  5.35 0.2816501 0.009246088 0.9294245 0.023522233258 0.97676391
## 395  4.29 0.2458151 0.005276565 0.9301713 0.006632159076 0.99333037
## 396  4.49 0.2556746 0.006173960 0.9307590 0.008935234325 0.99106392
## 397  4.18 0.2469791 0.004577078 0.9308437 0.005447484896 0.99451348
## 398  4.03 0.2462017 0.008786381 0.9311334 0.010409244489 0.98955884
## 399  4.64 0.2663725 0.005825243 0.9315726 0.009318498793 0.99071430
## 400  4.30 0.2579652 0.002353367 0.9315789 0.003936802533 0.99604079
## 401  3.84 0.2522835 0.007855316 0.9315866 0.007758074106 0.99221318
## 402  3.93 0.2550091 0.006921676 0.9317212 0.006999181541 0.99297732
## 403  5.37 0.2826466 0.006110335 0.9317477 0.015360026089 0.98481234
## 404  4.58 0.2623974 0.005886550 0.9328358 0.009327081067 0.99069243
## 405  4.14 0.2575100 0.006152733 0.9337838 0.007372421536 0.99261306
## 406  4.04 0.2540345 0.009791478 0.9338959 0.012916671264 0.98708648
## 407  4.34 0.2704830 0.005724508 0.9341935 0.007778572098 0.99224284
## 408  4.21 0.2618182 0.006060606 0.9346505 0.007706999790 0.99229081
## 409  4.51 0.2572983 0.002175884 0.9350811 0.004822277333 0.99515693
## 410  4.74 0.2728101 0.005099253 0.9351741 0.009357703092 0.99069059
## 411  4.09 0.2487346 0.003615329 0.9357045 0.004706651152 0.99525241
## 412  4.01 0.2417905 0.004219409 0.9362319 0.005061559854 0.99488350
## 413  4.01 0.2657031 0.006602427 0.9364005 0.007577577908 0.99242328
## 414  3.64 0.2408377 0.009568514 0.9367647 0.010235815139 0.98969804
## 415  4.18 0.2538813 0.009315068 0.9372497 0.013954428698 0.98605320
## 416  3.66 0.2465038 0.003915719 0.9374046 0.003721615593 0.99622973
## 417  3.60 0.2596084 0.010333575 0.9374069 0.011172522548 0.98882315
## 418  3.76 0.2509596 0.002558947 0.9396171 0.003295594729 0.99666260
## 419  4.64 0.2673356 0.008063071 0.9400749 0.015807495805 0.98427698
## 420  3.52 0.2540270 0.005739678 0.9406393 0.005026751176 0.99493035
## 421  3.72 0.2517673 0.004168932 0.9407407 0.004378912391 0.99557732
## 422  4.16 0.2543700 0.005098325 0.9410151 0.007217991527 0.99275295
## 423  3.69 0.2592254 0.004749726 0.9411765 0.004775005546 0.99519274
## 424  4.25 0.2524833 0.004334477 0.9417989 0.006844797806 0.99312160
## 425  4.13 0.2573610 0.007088332 0.9421488 0.010180737000 0.98980895
## 426  3.92 0.2435593 0.007491321 0.9423631 0.009670737250 0.99027033
## 427  4.57 0.2581845 0.005394345 0.9440994 0.010693672142 0.98930921
## 428  4.63 0.2591374 0.009502924 0.9443038 0.021475109639 0.97861383
## 429  3.45 0.2502250 0.004680468 0.9447853 0.004292528691 0.99565547
## 430  5.48 0.2879664 0.004729375 0.9451613 0.016745684464 0.98344839
## 431  3.75 0.2532290 0.007458614 0.9465541 0.009157657497 0.99080195
## 432  4.20 0.2606738 0.004683841 0.9465753 0.007657019738 0.99232576
## 433  3.42 0.2596695 0.007885843 0.9466667 0.007766641932 0.99219951
## 434  4.58 0.2591534 0.006251116 0.9468480 0.013062529813 0.98695546
## 435  4.03 0.2526259 0.004708439 0.9468665 0.006927575168 0.99303075
## 436  3.90 0.2503666 0.006048387 0.9471429 0.008080318141 0.99187161
## 437  5.14 0.2751453 0.003699137 0.9482379 0.012407988207 0.98767192
## 438  4.19 0.2540628 0.006681112 0.9486111 0.011278492901 0.98869894
## 439  3.94 0.2675273 0.004124081 0.9486824 0.005970223540 0.99401367
## 440  4.49 0.2721496 0.008714597 0.9488140 0.018507988334 0.98160299
## 441  4.55 0.2665103 0.006856730 0.9491094 0.014564451680 0.98548810
## 442  5.24 0.2690237 0.003768844 0.9491713 0.013780399427 0.98629383
## 443  3.87 0.2519351 0.005897530 0.9494799 0.008056841877 0.99189601
## 444  3.45 0.2432729 0.006040637 0.9495192 0.006165844011 0.99375978
## 445  4.43 0.2577561 0.007215007 0.9496124 0.014622533054 0.98539178
## 446  3.71 0.2439472 0.005319149 0.9499264 0.006598404575 0.99333166
## 447  3.39 0.2540250 0.007334526 0.9503205 0.007482015418 0.99246532
## 448  4.13 0.2640971 0.008029979 0.9507324 0.013878945570 0.98614435
## 449  4.38 0.2623457 0.005265069 0.9507576 0.010399043837 0.98960189
## 450  3.76 0.2338915 0.004914452 0.9508449 0.006555934384 0.99335102
## 451  3.84 0.2609692 0.008105727 0.9510086 0.011722567511 0.98826792
## 452  4.16 0.2659711 0.009701629 0.9512535 0.018557787826 0.98150902
## 453  3.56 0.2564056 0.009150805 0.9512938 0.011710238565 0.98825689
## 454  3.71 0.2505122 0.004283852 0.9515670 0.005617094916 0.99432706
## 455  4.14 0.2508929 0.008214286 0.9519890 0.015039609239 0.98493586
## 456  3.90 0.2541757 0.005809731 0.9525223 0.008588230050 0.99136887
## 457  4.00 0.2570755 0.005261248 0.9532967 0.008469243594 0.99149653
## 458  3.75 0.2454066 0.004729853 0.9533133 0.006540144133 0.99339083
## 459  4.33 0.2695636 0.004327443 0.9533679 0.008919309258 0.99108845
## 460  3.92 0.2596276 0.008316760 0.9534556 0.013436513584 0.98655682
## 461  3.80 0.2548411 0.003470954 0.9538905 0.005479591507 0.99447218
## 462  4.28 0.2616754 0.009449391 0.9539474 0.020308867319 0.97974964
## 463  3.63 0.2579700 0.008244778 0.9542587 0.011138848706 0.98882908
## 464  3.77 0.2502777 0.004072566 0.9545455 0.006024360594 0.99391716
## 465  4.60 0.2716049 0.007407407 0.9545455 0.017952005797 0.98214421
## 466  3.84 0.2526892 0.005104831 0.9557522 0.007777991410 0.99216914
## 467  5.15 0.2687789 0.004805981 0.9557823 0.017333577061 0.98276045
## 468  5.20 0.2667633 0.005617978 0.9562780 0.020508824494 0.97960689
## 469  3.79 0.2695495 0.011351351 0.9563380 0.020716173692 0.97936154
## 470  5.20 0.2650089 0.003730018 0.9566667 0.015445132294 0.98461607
## 471  4.11 0.2530909 0.006545455 0.9567643 0.012235387552 0.98772871
## 472  3.78 0.2526183 0.006500542 0.9569733 0.009746424171 0.99020061
## 473  3.53 0.2534060 0.005267938 0.9573820 0.006622743005 0.99331856
## 474  3.23 0.2349284 0.008289375 0.9574106 0.009329165824 0.99055016
## 475  4.22 0.2696689 0.008365967 0.9576380 0.017445371921 0.98261918
## 476  3.56 0.2572707 0.008389262 0.9576471 0.011631783042 0.98832894
## 477  4.90 0.2710512 0.007268215 0.9579930 0.022447738141 0.97769328
## 478  4.44 0.2750353 0.007228491 0.9580052 0.016715290236 0.98337260
## 479  4.36 0.2627041 0.004035957 0.9582773 0.009649315346 0.99033874
## 480  4.04 0.2454713 0.004066543 0.9584527 0.007962273850 0.99196716
## 481  4.38 0.2524237 0.004667864 0.9589041 0.011178768538 0.98878337
## 482  4.93 0.2636290 0.003661513 0.9592641 0.013501850367 0.98652645
## 483  4.83 0.2626134 0.006715064 0.9596577 0.020678813638 0.97939649
## 484  4.66 0.2654183 0.004884999 0.9597222 0.013889641224 0.98613982
## 485  3.38 0.2406730 0.004023409 0.9597990 0.005086646733 0.99482850
## 486  3.57 0.2728247 0.008218689 0.9608128 0.011810455331 0.98819984
## 487  3.91 0.2485814 0.004210141 0.9614815 0.007894707428 0.99203699
## 488  4.31 0.2520699 0.004599816 0.9618421 0.011162153735 0.98879287
## 489  4.41 0.2642896 0.006092098 0.9618806 0.014969530617 0.98505221
## 490  4.08 0.2558604 0.006541886 0.9631579 0.013476558515 0.98649181
## 491  4.01 0.2521194 0.005344637 0.9638728 0.010736109838 0.98920766
## 492  5.05 0.2740661 0.007004310 0.9642445 0.026111505183 0.97407482
## 493  3.91 0.2610120 0.009100837 0.9643367 0.018440424246 0.98156710
## 494  3.60 0.2574476 0.004597278 0.9644970 0.007224958085 0.99271955
## 495  3.57 0.2467724 0.004979712 0.9648855 0.007620957917 0.99229595
## 496  4.74 0.2622980 0.004847397 0.9653938 0.016224183389 0.98380211
## 497  3.86 0.2426090 0.006836659 0.9658754 0.013110151126 0.98679455
## 498  5.00 0.2911932 0.006392045 0.9660819 0.023173908876 0.97706567
## 499  3.17 0.2437488 0.004260048 0.9666667 0.005501839934 0.99440612
## 500  4.75 0.2628263 0.006480648 0.9668246 0.021549182384 0.97851533
## 501  3.64 0.2385787 0.005076142 0.9671151 0.008589158260 0.99130125
## 502  4.19 0.2637500 0.006964286 0.9676550 0.016631184703 0.98338174
## 503  4.46 0.2701642 0.005888651 0.9678663 0.016542808087 0.98350607
## 504  3.98 0.2531668 0.007238509 0.9685363 0.015686565835 0.98426639
## 505  3.94 0.2424298 0.004404478 0.9687034 0.009704181368 0.99019985
## 506  4.63 0.2656362 0.006290439 0.9693757 0.020258345099 0.97980084
## 507  4.04 0.2483601 0.003462099 0.9700855 0.009115318446 0.99080758
## 508  3.56 0.2549306 0.005478451 0.9701258 0.009106952394 0.99082479
## 509  3.58 0.2424187 0.004043374 0.9704510 0.007422073745 0.99247776
## 510  3.42 0.2370316 0.007753369 0.9705401 0.012505799168 0.98735655
## 511  3.72 0.2550832 0.007393715 0.9712230 0.014240367973 0.98570318
## 512  3.83 0.2589139 0.004936917 0.9712644 0.010198605316 0.98975138
## 513  4.52 0.2515505 0.004742795 0.9716749 0.015814701073 0.98414392
## 514  4.58 0.2753159 0.005516996 0.9717097 0.017889778891 0.98218988
## 515  3.82 0.2522928 0.008092070 0.9717682 0.017231077764 0.98271104
## 516  4.20 0.2644509 0.008128613 0.9717742 0.021546382095 0.97849349
## 517  3.59 0.2376274 0.003707136 0.9722222 0.007368997118 0.99251667
## 518  4.04 0.2634915 0.006338283 0.9726225 0.014989408116 0.98499942
## 519  4.13 0.2500926 0.007410152 0.9726776 0.019195155097 0.98075216
## 520  3.90 0.2550688 0.007060101 0.9731638 0.015723663173 0.98422664
## 521  4.11 0.2585117 0.004892966 0.9733542 0.012687619962 0.98727260
## 522  3.91 0.2625504 0.008427996 0.9733894 0.019484811401 0.98051757
## 523  3.86 0.2555814 0.003036256 0.9740791 0.009386354314 0.99054281
## 524  5.30 0.2878548 0.008284858 0.9744444 0.041218220209 0.95921213
## 525  4.08 0.2605225 0.007619739 0.9749652 0.019713635007 0.98028287
## 526  4.23 0.2594187 0.008432006 0.9750983 0.024515026196 0.97550812
## 527  3.88 0.2629028 0.005988024 0.9751131 0.013393293230 0.98657682
## 528  4.63 0.2613490 0.006432641 0.9751861 0.023072417430 0.97697062
## 529  4.10 0.2754897 0.006040637 0.9759358 0.015522503666 0.98451535
## 530  3.81 0.2596971 0.005725896 0.9761194 0.012554498827 0.98739697
## 531  4.32 0.2489656 0.006116208 0.9763469 0.018903661028 0.98103647
## 532  4.73 0.2865939 0.003870514 0.9766298 0.016693464180 0.98341916
## 533  3.79 0.2599641 0.004847397 0.9773371 0.011117569896 0.98882821
## 534  4.28 0.2564103 0.002868926 0.9775726 0.011635323369 0.98831046
## 535  3.41 0.2435384 0.005642519 0.9778130 0.009953237280 0.98992846
## 536  3.84 0.2607328 0.004626203 0.9786629 0.011455783741 0.98849251
## 537  3.95 0.2570025 0.005456530 0.9786629 0.013871381356 0.98607341
## 538  3.56 0.2467368 0.004350979 0.9792000 0.009367248360 0.99052654
## 539  3.94 0.2457080 0.003876685 0.9792593 0.011008449032 0.98888946
## 540  4.57 0.2480591 0.003512015 0.9793814 0.015559363071 0.98437040
## 541  4.16 0.2724464 0.005816067 0.9795082 0.016702096423 0.98332685
## 542  3.64 0.2533188 0.005455537 0.9796215 0.011595478888 0.98832095
## 543  4.93 0.2722746 0.009603415 0.9798578 0.045123812918 0.95517732
## 544  3.56 0.2674157 0.008146067 0.9802469 0.016815668641 0.98316971
## 545  3.76 0.2439693 0.008954678 0.9803030 0.022347601175 0.97753984
## 546  4.68 0.2737569 0.004777915 0.9805589 0.019970689522 0.98010547
## 547  4.17 0.2585551 0.007423502 0.9806630 0.022414358958 0.97757613
## 548  4.48 0.2667848 0.006731621 0.9809645 0.024156255725 0.97590506
## 549  3.92 0.2682663 0.009381202 0.9810496 0.025627955472 0.97442659
## 550  3.89 0.2563683 0.004002911 0.9815078 0.011664262809 0.98826502
## 551  3.70 0.2511377 0.003128555 0.9816934 0.011620639712 0.98827010
## 552  4.37 0.2642675 0.007633588 0.9818653 0.026362045935 0.97369163
## 553  4.80 0.2610643 0.006808816 0.9818841 0.030174226652 0.96990527
## 554  3.74 0.2592253 0.008261428 0.9821162 0.020061004818 0.97990129
## 555  3.69 0.2665701 0.008511409 0.9823009 0.020058626313 0.97993989
## 556  4.21 0.2613982 0.003218309 0.9825737 0.013469955012 0.98648841
## 557  3.77 0.2640102 0.005276565 0.9828080 0.013067761692 0.98688914
## 558  3.61 0.2576315 0.006252299 0.9830247 0.013719187361 0.98620999
## 559  3.37 0.2361473 0.008925251 0.9836334 0.018688394791 0.98112933
## 560  4.05 0.2714908 0.005077983 0.9836957 0.015267904237 0.98473620
## 561  3.57 0.2442678 0.006656805 0.9845201 0.014887704572 0.98498129
## 562  4.19 0.2696991 0.004118911 0.9845938 0.015184945190 0.98481280
## 563  3.92 0.2586328 0.005461593 0.9856115 0.015514155145 0.98442881
## 564  3.70 0.2466009 0.005587633 0.9856528 0.013909555333 0.98597248
## 565  3.20 0.2418884 0.004374772 0.9861592 0.009790433403 0.99005932
## 566  3.81 0.2516944 0.004945961 0.9867257 0.013786498869 0.98611793
## 567  4.44 0.2621907 0.005094614 0.9868421 0.020589906424 0.97940837
## 568  4.38 0.2834196 0.006425129 0.9869622 0.023492742303 0.97663883
## 569  4.58 0.2841108 0.007891968 0.9881094 0.033079337748 0.96715457
## 570  3.28 0.2516791 0.007835821 0.9882943 0.015847961717 0.98403726
## 571  3.82 0.2719765 0.007157277 0.9885387 0.019586402994 0.98042888
## 572  4.41 0.2579763 0.006745670 0.9886507 0.026841661296 0.97315832
## 573  4.14 0.2580999 0.003660992 0.9890710 0.015332231160 0.98460560
## 574  3.67 0.2500915 0.005126327 0.9893455 0.013705015922 0.98618349
## 575  3.99 0.2564242 0.006695621 0.9902235 0.021331919166 0.97861165
## 576  4.15 0.2643970 0.006519377 0.9903448 0.022680509652 0.97732084
## 577  4.28 0.2684527 0.006014215 0.9905787 0.022707381209 0.97732223
## 578  4.82 0.2658000 0.008400000 0.9906292 0.043333455363 0.95684960
## 579  3.56 0.2435156 0.004478447 0.9907121 0.012289242324 0.98756689
## 580  4.33 0.2591441 0.007315289 0.9910486 0.028921876893 0.97108582
## 581  3.68 0.2452072 0.008581340 0.9910847 0.024217598178 0.97564844
## 582  4.56 0.2581646 0.005838351 0.9924051 0.027343500692 0.97265749
## 583  4.87 0.2912991 0.005535375 0.9929988 0.030013089393 0.97023821
## 584  4.01 0.2582306 0.004598124 0.9930168 0.016911882312 0.98302069
## 585  4.26 0.2670921 0.007474932 0.9932157 0.029110532927 0.97094291
## 586  4.31 0.2650709 0.005851064 0.9933599 0.023801403794 0.97620968
## 587  4.26 0.2654616 0.006911217 0.9934726 0.027016982091 0.97301223
## 588  5.42 0.2777379 0.006463196 0.9936441 0.047218828349 0.95313184
## 589  4.83 0.2687311 0.002847482 0.9939394 0.022311721450 0.97773045
## 590  4.13 0.2717172 0.004040404 0.9940476 0.020310738324 0.97969944
## 591  4.93 0.2749196 0.007859950 0.9940898 0.044419419175 0.95584705
## 592  5.00 0.2796731 0.006041222 0.9942463 0.035927656504 0.96430595
## 593  4.07 0.2467532 0.008048290 0.9942857 0.029712853780 0.97018556
## 594  5.17 0.2787001 0.007064641 0.9944383 0.045322027179 0.95499998
## 595  4.23 0.2641374 0.006979241 0.9945726 0.027369163471 0.97264892
## 596  4.39 0.2588064 0.005840482 0.9946950 0.025714457190 0.97426818
## 597  4.71 0.2751595 0.007264352 0.9951632 0.037057762681 0.96313168
## 598  3.55 0.2471910 0.009025603 0.9952830 0.025656061795 0.97420680
## 599  3.45 0.2566340 0.010905125 0.9953560 0.031448372851 0.96849071
## 600  4.06 0.2631394 0.008964669 0.9959839 0.033796286034 0.96623666
## 601  4.85 0.2800395 0.007312253 0.9960422 0.040453489946 0.95980951
## 602  4.60 0.2558477 0.002901179 0.9962217 0.020449262910 0.97949706
## 603  5.60 0.2874776 0.006618962 0.9968880 0.054420936883 0.94608974
## 604  3.85 0.2648328 0.005933118 0.9970370 0.019728064200 0.98023339
## 605  3.83 0.2579531 0.004908199 0.9971056 0.017522558099 0.98239375
## 606  3.93 0.2669461 0.008017493 0.9971791 0.027799968187 0.97221680
## 607  4.32 0.2636232 0.005413208 0.9973118 0.024136440592 0.97585827
## 608  4.29 0.2604693 0.006317690 0.9973154 0.027116643852 0.97287135
## 609  4.26 0.2621777 0.003760745 0.9973719 0.020222177832 0.97973842
## 610  3.96 0.2491773 0.005484461 0.9985653 0.020442765963 0.97943680
## 611  3.88 0.2691892 0.007747748 0.9985856 0.026504571773 0.97351341
## 612  4.18 0.2703039 0.005686867 0.9986468 0.023602563509 0.97641908
## 613  4.27 0.2615637 0.005935325 1.0000000 0.026516077350 0.97346789
## 614  3.62 0.2504071 0.006151619 1.0000000 0.018848690990 0.98101885
## 615  3.68 0.2572920 0.006301764 1.0000000 0.019891568741 0.98001674
## 616  3.47 0.2629342 0.007021434 1.0000000 0.019410080660 0.98051871
## 617  4.81 0.2757223 0.005109232 1.0011669 0.032115329913 0.96802547
## 618  4.66 0.2704063 0.009708738 1.0012315 0.054985997973 0.94527224
## 619  4.36 0.2509492 0.003435184 1.0012970 0.021104771769 0.97879333
## 620  3.48 0.2514461 0.007230658 1.0015873 0.020847430796 0.97901757
## 621  3.20 0.2562106 0.009640341 1.0017182 0.025153968907 0.97473261
## 622  4.97 0.2815378 0.008610332 1.0025543 0.055991841013 0.94440634
## 623  4.30 0.2576743 0.006500542 1.0026810 0.030750515247 0.96921913
## 624  4.28 0.2762946 0.006271278 1.0027100 0.028790553503 0.97129695
## 625  4.08 0.2536612 0.006689568 1.0027894 0.028033041293 0.97188559
## 626  3.74 0.2521422 0.005834093 1.0031153 0.020527943939 0.97934765
## 627  3.48 0.2468077 0.005472455 1.0031746 0.017241584419 0.98259229
## 628  2.84 0.2439817 0.005922812 1.0037383 0.015549694734 0.98422953
## 629  4.55 0.2677526 0.005307467 1.0037736 0.030160222263 0.96988904
## 630  4.43 0.2540462 0.005091835 1.0039113 0.027835989844 0.97209984
## 631  4.19 0.2614379 0.007262164 1.0039894 0.032694283222 0.96730200
## 632  4.20 0.2601019 0.004368402 1.0041040 0.023185181735 0.97675817
## 633  3.69 0.2547557 0.009884372 1.0046296 0.036353640135 0.96357075
## 634  4.95 0.2759417 0.004975124 1.0046784 0.036041259793 0.96412957
## 635  4.49 0.2717584 0.004795737 1.0051086 0.028258292029 0.97179874
## 636  4.02 0.2840070 0.004745167 1.0053191 0.039108724658 0.96108251
## 637  3.83 0.2550505 0.005050505 1.0058824 0.021337463565 0.97854936
## 638  3.64 0.2716619 0.006569602 1.0059791 0.023144595399 0.97683847
## 639  4.50 0.2617426 0.006274650 1.0060533 0.035031081841 0.96499090
## 640  3.70 0.2490393 0.007319305 1.0061350 0.026168825835 0.97368421
## 641  3.69 0.2552102 0.005301645 1.0061633 0.020760687862 0.97911826
## 642  4.33 0.2562786 0.006599450 1.0064683 0.033815008205 0.96614628
## 643  4.51 0.2723022 0.006834532 1.0065274 0.037720145967 0.96240205
## 644  3.92 0.2641476 0.007342407 1.0069252 0.029673636720 0.97030450
## 645  3.51 0.2594052 0.007524185 1.0075529 0.024450531312 0.97545441
## 646  5.00 0.2833565 0.005385685 1.0076004 0.040503753776 0.95975703
## 647  3.60 0.2536028 0.005427662 1.0076104 0.020819914048 0.97904130
## 648  4.45 0.2658523 0.004131489 1.0076239 0.027856287385 0.97214725
## 649  4.29 0.2679469 0.004486719 1.0078329 0.027499065214 0.97250669
## 650  4.96 0.2673198 0.005073478 1.0081585 0.039259476080 0.96085462
## 651  3.35 0.2412264 0.005171777 1.0086059 0.018390306690 0.98138546
## 652  3.92 0.2513651 0.006734620 1.0095109 0.028887560996 0.97098788
## 653  4.04 0.2644716 0.008497079 1.0098177 0.039013096543 0.96100996
## 654  3.90 0.2473022 0.002877698 1.0099291 0.038132983302 0.96162185
## 655  4.30 0.2684742 0.005670743 1.0107672 0.031247881185 0.96877806
## 656  3.58 0.2519786 0.008098656 1.0108527 0.029474957804 0.97038291
## 657  3.43 0.2543876 0.007943839 1.0109204 0.026382480560 0.97347965
## 658  4.20 0.2750267 0.006234414 1.0110041 0.031852860280 0.96821820
## 659  3.60 0.2400898 0.007479432 1.0119760 0.028179010125 0.97158700
## 660  4.08 0.2617980 0.006280280 1.0126761 0.031052879155 0.96890495
## 661  3.75 0.2514461 0.006326826 1.0133929 0.026575525222 0.97327540
## 662  3.31 0.2513330 0.006618864 1.0136752 0.022312860259 0.97750942
## 663  4.70 0.2813808 0.003138075 1.0144578 0.062775512645 0.93762635
## 664  3.61 0.2598093 0.004583792 1.0152905 0.043765112987 0.95610328
## 665  3.73 0.2612340 0.007845934 1.0158730 0.033306121521 0.96662525
## 666  3.71 0.2550175 0.006260357 1.0165165 0.027544480044 0.97232001
## 667  5.25 0.2875245 0.007139033 1.0165198 0.064791068394 0.93570809
## 668  3.55 0.2639667 0.005243175 1.0169753 0.041584657346 0.95832725
## 669  3.49 0.2386696 0.003472222 1.0177994 0.045122944097 0.95441138
## 670  4.47 0.2698580 0.005435736 1.0184275 0.037978494536 0.96208133
## 671  4.28 0.2629348 0.006357055 1.0185923 0.038457657132 0.96153347
## 672  5.29 0.2922671 0.010720562 1.0187638 0.100883783439 0.89996456
## 673  4.25 0.2546878 0.004915347 1.0188172 0.032537255935 0.96735877
## 674  4.09 0.2716628 0.006125023 1.0188172 0.034850303830 0.96518073
## 675  3.28 0.2469251 0.006522549 1.0188679 0.024223216701 0.97555105
## 676  3.50 0.2609162 0.005010737 1.0196375 0.050587604862 0.94927843
## 677  3.98 0.2505084 0.008689222 1.0198300 0.046176195561 0.95369891
## 678  3.69 0.2442326 0.006863680 1.0200308 0.031050953211 0.96872453
## 679  4.77 0.2781955 0.004654493 1.0206061 0.043544445825 0.95663681
## 680  4.16 0.2562366 0.003385602 1.0208333 0.052832825551 0.94706183
## 681  3.91 0.2536127 0.006322254 1.0209205 0.033041309174 0.96682133
## 682  3.71 0.2518328 0.004215543 1.0209895 0.046938625399 0.95282852
## 683  4.32 0.2598439 0.005988024 1.0215463 0.039464891770 0.96049505
## 684  3.66 0.2610176 0.007524185 1.0217077 0.033959052067 0.96594826
## 685  4.07 0.2533821 0.006764168 1.0222222 0.038802128446 0.96107831
## 686  4.08 0.2548451 0.004890418 1.0222531 0.033756995990 0.96611478
## 687  3.97 0.2613616 0.006570542 1.0236769 0.036833981185 0.96309770
## 688  4.92 0.2800425 0.002481390 1.0243619 0.102683707009 0.89804790
## 689  3.56 0.2530942 0.004125561 1.0245023 0.067648093163 0.93207603
## 690  4.24 0.2574850 0.005806569 1.0245566 0.039138276174 0.96078207
## 691  4.54 0.2687432 0.004889533 1.0249344 0.043059451321 0.95699844
## 692  3.38 0.2508300 0.005901881 1.0255591 0.034041202706 0.96571422
## 693  4.10 0.2637623 0.005833030 1.0257143 0.038639952400 0.96131789
## 694  3.52 0.2573991 0.009327354 1.0258359 0.042744991977 0.95713013
## 695  3.50 0.2526723 0.003870254 1.0265340 0.076100374423 0.92357730
## 696  4.01 0.2687161 0.004520796 1.0266854 0.071220525425 0.92889436
## 697  4.05 0.2497721 0.003646308 1.0288600 0.059880677266 0.93988647
## 698  4.51 0.2473409 0.004687218 1.0289673 0.042459452413 0.95737578
## 699  3.92 0.2654243 0.007056269 1.0291667 0.041767301517 0.95819514
## 700  3.83 0.2628415 0.004371585 1.0306834 0.086961066592 0.91304770
## 701  3.58 0.2662679 0.006887801 1.0309598 0.041166946945 0.95875832
## 702  3.28 0.2498672 0.006903877 1.0314010 0.033869192422 0.96587203
## 703  3.13 0.2401675 0.009468318 1.0317460 0.039453255535 0.96019823
## 704  3.81 0.2555638 0.005934718 1.0319767 0.040304203701 0.95953113
## 705  3.30 0.2481357 0.007643550 1.0319865 0.034470290763 0.96526749
## 706  4.00 0.2516544 0.006066176 1.0320700 0.041007781336 0.95881529
## 707  4.26 0.2626967 0.002145923 1.0320856 0.109406510512 0.89074566
## 708  4.41 0.2619734 0.006661865 1.0322165 0.052904620246 0.94709500
## 709  3.47 0.2475038 0.006528418 1.0322581 0.035064676066 0.96466928
## 710  4.25 0.2658498 0.006038004 1.0322997 0.046879797454 0.95311902
## 711  3.16 0.2433126 0.004580432 1.0323741 0.074190942296 0.92519522
## 712  4.13 0.2649607 0.007858150 1.0327869 0.053509254789 0.94649986
## 713  3.46 0.2625251 0.007469484 1.0332278 0.039559318248 0.96031483
## 714  3.12 0.2500929 0.005574136 1.0335008 0.070990429395 0.92857112
## 715  2.95 0.2402562 0.003110704 1.0335821 0.063062013981 0.93615623
## 716  3.39 0.2559513 0.006085556 1.0336000 0.052330042614 0.94743512
## 717  3.26 0.2464724 0.004398021 1.0340136 0.082406591612 0.91704520
## 718  3.86 0.2624113 0.007274050 1.0347323 0.045622864716 0.95429749
## 719  4.47 0.2661517 0.002282303 1.0350195 0.125851456435 0.87453533
## 720  3.32 0.2429907 0.005131024 1.0351171 0.062311069723 0.93718881
## 721  4.32 0.2581799 0.005363848 1.0358056 0.048771542516 0.95114348
## 722  3.80 0.2518052 0.005184225 1.0359820 0.052721808243 0.94704014
## 723  3.42 0.2538879 0.005605787 1.0360065 0.068130198569 0.93158274
## 724  4.15 0.2663693 0.005173952 1.0362416 0.062582616598 0.93744692
## 725  3.85 0.2527813 0.007112894 1.0364656 0.045694367390 0.95412017
## 726  4.22 0.2482257 0.003821656 1.0370879 0.063203934645 0.93655377
## 727  3.65 0.2589547 0.005116959 1.0375375 0.087858727441 0.91201046
## 728  4.15 0.2603238 0.005275605 1.0375522 0.054039465374 0.94588171
## 729  4.11 0.2674986 0.007777175 1.0376851 0.056395452152 0.94363017
## 730  4.11 0.2765353 0.007454739 1.0390625 0.056331250017 0.94378861
## 731  3.71 0.2589863 0.005751258 1.0392749 0.064721386883 0.93512865
## 732  3.93 0.2747860 0.008737518 1.0394366 0.058963615953 0.94113357
## 733  3.36 0.2581937 0.008557902 1.0394737 0.044650274307 0.95516627
## 734  4.36 0.2622029 0.002903284 1.0395778 0.130397842093 0.86985323
## 735  3.81 0.2734236 0.007125045 1.0402299 0.055809539343 0.94423592
## 736  4.05 0.2733970 0.006821282 1.0402542 0.056295940579 0.94377591
## 737  4.73 0.2731357 0.001976640 1.0407643 0.153077658361 0.84772112
## 738  4.91 0.2676184 0.003433321 1.0409146 0.087871742636 0.91238343
## 739  3.66 0.2636584 0.006272764 1.0410509 0.066797151011 0.93312002
## 740  3.56 0.2576540 0.003504242 1.0410742 0.106274275645 0.89348411
## 741  4.24 0.2658273 0.005215827 1.0412234 0.068818335675 0.93121966
## 742  3.80 0.2585419 0.005846885 1.0412371 0.060914591886 0.93893503
## 743  3.91 0.2506431 0.005696435 1.0412518 0.050012686134 0.94975001
## 744  3.79 0.2548234 0.005278486 1.0415430 0.072811333787 0.92697936
## 745  3.97 0.2593612 0.003487518 1.0416069 0.125203141572 0.87481137
## 746  4.45 0.2823280 0.013974881 1.0428922 0.135047120033 0.86560043
## 747  3.51 0.2625789 0.005229937 1.0432692 0.118201647197 0.88176393
## 748  4.27 0.2690233 0.006511628 1.0433604 0.060270245706 0.93977599
## 749  3.62 0.2661204 0.009084432 1.0434783 0.056695271447 0.94324527
## 750  4.52 0.2720327 0.004442075 1.0440806 0.106638167303 0.89373952
## 751  4.77 0.2788717 0.003725386 1.0441718 0.146091732134 0.85479555
## 752  3.38 0.2617090 0.007865570 1.0447020 0.051034037266 0.94879888
## 753  3.40 0.2551298 0.005084438 1.0450161 0.115153804124 0.88454546
## 754  3.67 0.2564810 0.005331862 1.0453125 0.100822764741 0.89899298
## 755  3.54 0.2540687 0.010849910 1.0461538 0.070839402743 0.92896562
## 756  4.04 0.2743805 0.005170262 1.0464481 0.138531229532 0.86199739
## 757  3.84 0.2633752 0.008438061 1.0467153 0.061525589918 0.93840237
## 758  3.59 0.2511038 0.004966887 1.0479233 0.115496104066 0.88415169
## 759  3.58 0.2641240 0.007307075 1.0479876 0.062548032247 0.93733955
## 760  4.50 0.2644716 0.006018764 1.0492424 0.069094764492 0.93092466
## 761  3.31 0.2547237 0.005882353 1.0492611 0.115778268767 0.88389058
## 762  4.71 0.2708970 0.003235664 1.0493381 0.171038136595 0.82974534
## 763  4.35 0.2652434 0.007960919 1.0503979 0.077398506135 0.92264441
## 764  3.70 0.2559653 0.004338395 1.0507726 0.140278282229 0.85954870
## 765  4.20 0.2597865 0.002491103 1.0507757 0.150385492371 0.84972117
## 766  3.34 0.2612891 0.003095412 1.0508197 0.104858647330 0.89483167
## 767  5.15 0.2736861 0.005097557 1.0509413 0.084368826756 0.91591258
## 768  3.12 0.2376549 0.005508558 1.0520446 0.105627240817 0.89348700
## 769  3.84 0.2548305 0.005286183 1.0520833 0.110932240884 0.88887808
## 770  3.54 0.2566225 0.005150846 1.0522876 0.139194806862 0.86060996
## 771  3.82 0.2562194 0.003994916 1.0527066 0.149110130466 0.85077071
## 772  3.79 0.2622359 0.009391367 1.0527066 0.072873263007 0.92703894
## 773  3.72 0.2722988 0.005620015 1.0527108 0.158877771931 0.84157112
## 774  3.48 0.2654804 0.008185053 1.0528771 0.063584110536 0.93629858
## 775  3.72 0.2635025 0.005637389 1.0529501 0.139344873189 0.86074508
## 776  4.01 0.2772679 0.005294741 1.0530612 0.171700891895 0.82906733
## 777  3.57 0.2528569 0.006167241 1.0537125 0.094058086443 0.90562419
## 778  4.13 0.2784993 0.008297258 1.0537931 0.076861818944 0.92331978
## 779  4.00 0.2586052 0.004325104 1.0539359 0.157153117416 0.84291170
## 780  3.73 0.2672532 0.006598845 1.0545455 0.109303412716 0.89076929
## 781  4.60 0.2704639 0.004656994 1.0546584 0.131027079790 0.86940016
## 782  3.72 0.2779909 0.010987095 1.0548341 0.085238624750 0.91492564
## 783  4.31 0.2668845 0.004357298 1.0550336 0.169724931996 0.83076166
## 784  4.62 0.2759111 0.008177778 1.0550796 0.093739983065 0.90653455
## 785  3.85 0.2619652 0.008158086 1.0552239 0.068890476845 0.93099644
## 786  3.69 0.2737711 0.009149623 1.0553977 0.071216615468 0.92882020
## 787  4.69 0.2591049 0.003623845 1.0555556 0.141799930652 0.85840715
## 788  4.03 0.2744818 0.005539671 1.0561010 0.170691447110 0.82995853
## 789  4.41 0.2770999 0.007957560 1.0567282 0.086365382822 0.91385729
## 790  3.47 0.2467001 0.008365867 1.0568182 0.060845766789 0.93877594
## 791  3.05 0.2492260 0.004643963 1.0569260 0.122039677731 0.87724241
## 792  3.53 0.2561111 0.008055556 1.0569307 0.063984341730 0.93576313
## 793  3.85 0.2748663 0.009625668 1.0572246 0.080933614956 0.91916649
## 794  4.45 0.2624932 0.003968970 1.0578187 0.175811282457 0.82457929
## 795  4.22 0.2594925 0.007018175 1.0586630 0.076866771058 0.92302766
## 796  3.82 0.2693133 0.007510730 1.0590778 0.085075563000 0.91493838
## 797  3.56 0.2562408 0.005506608 1.0591054 0.157743907562 0.84207148
## 798  4.38 0.2598625 0.004885993 1.0594595 0.125673935106 0.87438881
## 799  3.48 0.2433068 0.002894356 1.0595041 0.127957493582 0.87122342
## 800  3.78 0.2534529 0.008071749 1.0595930 0.070730620919 0.92900988
## 801  3.83 0.2592861 0.004348614 1.0600293 0.174188087335 0.82583862
## 802  4.18 0.2829320 0.007967422 1.0602094 0.090045540495 0.91024411
## 803  4.21 0.2598182 0.004000000 1.0605227 0.188567415525 0.81168173
## 804  3.08 0.2512112 0.008612955 1.0608108 0.064670906243 0.93492232
## 805  4.29 0.2655914 0.003405018 1.0625000 0.200847866783 0.79965771
## 806  3.72 0.2615188 0.004917137 1.0627737 0.182224449636 0.81786128
## 807  4.23 0.2669627 0.005683837 1.0630872 0.145987351496 0.85428655
## 808  3.88 0.2668415 0.006857604 1.0631068 0.115953002670 0.88411259
## 809  3.24 0.2548868 0.005031933 1.0642202 0.155963899346 0.84358404
## 810  3.80 0.2663685 0.007155635 1.0642336 0.112018732624 0.88799967
## 811  4.33 0.2625412 0.006590992 1.0649867 0.091855321669 0.90810414
## 812  4.73 0.2859971 0.003959683 1.0650307 0.251118102443 0.75057796
## 813  4.60 0.2776109 0.004064322 1.0650510 0.237963533727 0.76329430
## 814  4.60 0.2669246 0.007209425 1.0652709 0.098281104898 0.90180570
## 815  3.25 0.2498584 0.007554297 1.0653710 0.097104604990 0.90240960
## 816  3.25 0.2588536 0.006389193 1.0663350 0.177036568642 0.82275950
## 817  3.77 0.2704000 0.007822222 1.0664740 0.105490181897 0.89456774
## 818  3.93 0.2592457 0.006041743 1.0668605 0.150750733570 0.84922605
## 819  3.34 0.2517470 0.004229496 1.0670017 0.155950552825 0.84347160
## 820  4.50 0.2744294 0.007132668 1.0679739 0.104355577382 0.89585612
## 821  4.09 0.2675055 0.003829322 1.0681818 0.209096666519 0.79138903
## 822  4.18 0.2660371 0.007127584 1.0681818 0.099338722710 0.90066476
## 823  3.71 0.2499540 0.005153690 1.0683891 0.187230177735 0.81241680
## 824  3.46 0.2591447 0.006369427 1.0685358 0.187111937684 0.81280161
## 825  4.17 0.2697277 0.002618715 1.0687332 0.192225748209 0.80823712
## 826  2.99 0.2492240 0.005294869 1.0690909 0.153895673311 0.84530908
## 827  3.82 0.2608221 0.006547836 1.0695652 0.154910516661 0.84507862
## 828  4.76 0.2773578 0.004499640 1.0700246 0.248873314107 0.75241007
## 829  3.39 0.2482569 0.008073394 1.0702875 0.082613721280 0.91691576
## 830  4.38 0.2630053 0.003987674 1.0706806 0.235944611299 0.76460239
## 831  3.81 0.2695857 0.010132079 1.0712209 0.101362861080 0.89863840
## 832  4.26 0.2752899 0.004817128 1.0720000 0.249322360775 0.75172288
## 833  4.53 0.2680843 0.005270811 1.0726817 0.198941347226 0.80161649
## 834  4.37 0.2843102 0.004824875 1.0729443 0.264261469045 0.73724455
## 835  3.55 0.2566903 0.006553796 1.0733138 0.192772366401 0.80707526
## 836  4.01 0.2765542 0.004618117 1.0737589 0.233104940777 0.76777427
## 837  3.41 0.2571217 0.009988901 1.0737834 0.089058977653 0.91063507
## 838  3.10 0.2496324 0.006250000 1.0738916 0.187251166474 0.81207904
## 839  4.14 0.2625678 0.009764919 1.0739191 0.116077269953 0.88386995
## 840  4.14 0.2714311 0.009445732 1.0739247 0.111924753588 0.88816807
## 841  3.91 0.2567459 0.005343308 1.0740741 0.220460088711 0.77958486
## 842  4.65 0.2750890 0.006761566 1.0740741 0.124626044289 0.87567440
## 843  4.19 0.2812278 0.004613201 1.0744828 0.250521365337 0.75069728
## 844  3.86 0.2784969 0.004674577 1.0744986 0.222775273778 0.77804991
## 845  3.79 0.2594348 0.006563355 1.0746951 0.184475846474 0.81551741
## 846  3.98 0.2668121 0.005634315 1.0750708 0.240335398825 0.76020573
## 847  3.49 0.2516364 0.008909091 1.0751174 0.087798481927 0.91179723
## 848  3.55 0.2568724 0.007828145 1.0752864 0.120006002137 0.87970080
## 849  3.35 0.2458533 0.004607446 1.0755034 0.184320099022 0.81487842
## 850  4.45 0.2667021 0.004075846 1.0755442 0.261369391163 0.73941848
## 851  3.87 0.2627331 0.006814921 1.0765832 0.182046663973 0.81805292
## 852  3.27 0.2561879 0.004878049 1.0767974 0.185523483550 0.81402786
## 853  3.31 0.2626720 0.005430847 1.0770465 0.200003692698 0.79987894
## 854  4.00 0.2590318 0.004335260 1.0772532 0.236814847970 0.76334582
## 855  3.61 0.2578675 0.008145131 1.0774092 0.111740564780 0.88798121
## 856  3.86 0.2734306 0.011957095 1.0775862 0.132760717550 0.86737128
## 857  3.49 0.2505967 0.007343492 1.0776398 0.145504284668 0.85402857
## 858  4.00 0.2618572 0.005815010 1.0776978 0.235042611650 0.76526407
## 859  3.45 0.2600469 0.006127230 1.0782030 0.225683648577 0.77427809
## 860  3.48 0.2520766 0.005958830 1.0782209 0.219534398875 0.78010188
## 861  4.33 0.2741792 0.006388642 1.0784314 0.215743182621 0.78497882
## 862  4.12 0.2719455 0.006449301 1.0786517 0.235557916127 0.76515113
## 863  4.35 0.2792520 0.004274265 1.0788436 0.271233063115 0.73002511
## 864  3.81 0.2753927 0.004450262 1.0790021 0.226136789557 0.77450862
## 865  4.02 0.2768683 0.010142349 1.0790960 0.121208118263 0.87896070
## 866  5.26 0.2939929 0.009363958 1.0791527 0.172508620402 0.82835483
## 867  5.00 0.2886107 0.005641749 1.0792193 0.250345149501 0.75116888
## 868  4.11 0.2714542 0.004129264 1.0799458 0.250147993430 0.75057551
## 869  3.68 0.2642572 0.005266981 1.0802469 0.237138748379 0.76308319
## 870  3.47 0.2593931 0.005419075 1.0808241 0.223944775582 0.77593089
## 871  3.10 0.2614086 0.012037370 1.0814941 0.105054842516 0.89464538
## 872  3.48 0.2589026 0.007630814 1.0820189 0.197981739778 0.80188037
## 873  4.69 0.2791026 0.005829359 1.0823245 0.259709204160 0.74146734
## 874  3.50 0.2547529 0.007604563 1.0830619 0.180869012018 0.81880987
## 875  4.53 0.2657168 0.006055209 1.0831234 0.189128901794 0.81117693
## 876  3.75 0.2678963 0.007121987 1.0835821 0.250862344807 0.74960116
## 877  3.74 0.2565908 0.004153124 1.0835913 0.229080343127 0.77074323
## 878  3.46 0.2597070 0.008058608 1.0837438 0.182382591330 0.81744664
## 879  3.11 0.2550117 0.005959906 1.0846561 0.214817670661 0.78465577
## 880  3.52 0.2627472 0.009617129 1.0848485 0.112843815484 0.88692354
## 881  4.04 0.2657559 0.008014572 1.0852601 0.138199916177 0.86177703
## 882  4.28 0.2713568 0.003589375 1.0852820 0.269562780607 0.73124119
## 883  2.96 0.2365170 0.006275740 1.0856031 0.208559689645 0.79007782
## 884  3.65 0.2594624 0.007313951 1.0864198 0.230011077837 0.76998439
## 885  4.73 0.2739100 0.007208158 1.0864799 0.150176408615 0.85011142
## 886  3.55 0.2608219 0.007488584 1.0866142 0.239510446919 0.76053252
## 887  3.63 0.2542712 0.006906579 1.0868217 0.237684790851 0.76211655
## 888  3.85 0.2525745 0.006323397 1.0885341 0.252031889767 0.74780503
## 889  3.87 0.2619693 0.005058717 1.0886427 0.275441052980 0.72482290
## 890  4.56 0.2759248 0.006872852 1.0886628 0.210133019349 0.79049701
## 891  4.03 0.2729230 0.005741970 1.0893617 0.305475895840 0.69545897
## 892  4.04 0.2711324 0.004961900 1.0898551 0.292607196813 0.70817251
## 893  4.03 0.2656945 0.004268184 1.0909091 0.279339815422 0.72111277
## 894  4.45 0.2724359 0.005341880 1.0911425 0.333271884781 0.66789228
## 895  3.71 0.2493766 0.004987531 1.0914454 0.267960404577 0.73161774
## 896  4.23 0.2757640 0.010305615 1.0918775 0.153758760272 0.84642435
## 897  3.83 0.2661871 0.007378712 1.0922619 0.276670704477 0.72375624
## 898  4.64 0.2783929 0.006607143 1.0925700 0.271750908165 0.72927537
## 899  4.61 0.2803606 0.003535443 1.0931990 0.323735800719 0.67772233
## 900  3.64 0.2713801 0.006070345 1.0934150 0.290585446753 0.71000336
## 901  4.08 0.2847753 0.010303784 1.0934449 0.153059425976 0.84727973
## 902  4.16 0.2633977 0.003645643 1.0934959 0.285361148277 0.71502426
## 903  3.47 0.2606083 0.010562739 1.0939908 0.127270813171 0.87240504
## 904  4.03 0.2509431 0.003412969 1.0945559 0.272216832029 0.72749538
## 905  3.77 0.2586207 0.004355717 1.0945736 0.267923255053 0.73201398
## 906  4.10 0.2568874 0.005108557 1.0946502 0.314187829781 0.68604269
## 907  4.25 0.2783597 0.009231315 1.0946822 0.157939180151 0.84231202
## 908  3.36 0.2530391 0.006732747 1.0949367 0.280590657039 0.71904471
## 909  3.82 0.2702462 0.011951481 1.0950292 0.159041036963 0.84094778
## 910  4.12 0.2770113 0.010392403 1.0950413 0.155394076004 0.84477792
## 911  3.53 0.2584249 0.004029304 1.0950872 0.237773763092 0.76190645
## 912  3.86 0.2725981 0.007103534 1.0953058 0.326097806112 0.67478129
## 913  3.95 0.2621289 0.005068791 1.0959097 0.306479857105 0.69385747
## 914  3.34 0.2491707 0.008293402 1.0960630 0.211347243721 0.78804029
## 915  3.32 0.2468412 0.006592199 1.0962733 0.277400937216 0.72190655
## 916  4.25 0.2731941 0.009383853 1.0964333 0.156626629990 0.84348512
## 917  4.02 0.2742052 0.003973988 1.0972818 0.287971613521 0.71277314
## 918  3.99 0.2682310 0.004512635 1.0975610 0.301132122588 0.69942090
## 919  3.70 0.2700018 0.012417953 1.0978916 0.163713351549 0.83623580
## 920  3.26 0.2453518 0.006379876 1.0987868 0.278476143721 0.72068674
## 921  4.06 0.2651855 0.006987995 1.0992908 0.319640361990 0.68090616
## 922  4.20 0.2676231 0.003896564 1.0994624 0.313188533300 0.68743928
## 923  3.50 0.2572418 0.008198215 1.0996785 0.269358830117 0.73048767
## 924  4.31 0.2619392 0.005581972 1.1000000 0.352650041876 0.64797338
## 925  3.32 0.2640591 0.012076424 1.1000000 0.143690315826 0.85601141
## 926  5.07 0.2925146 0.005131835 1.1001164 0.421981854874 0.58037857
## 927  4.60 0.2785614 0.005641749 1.1005025 0.388126119210 0.61342705
## 928  3.74 0.2608300 0.004550419 1.1010720 0.288714090467 0.71130399
## 929  3.74 0.2734996 0.003976862 1.1011561 0.271049047671 0.72941097
## 930  4.37 0.2713459 0.005246020 1.1014304 0.368188830137 0.63289921
## 931  2.94 0.2534321 0.007947977 1.1018519 0.291106688047 0.70832213
## 932  3.45 0.2494948 0.007716333 1.1031746 0.296493564729 0.70303514
## 933  3.48 0.2574565 0.011161940 1.1033386 0.147766085754 0.85179517
## 934  4.08 0.2694192 0.006123163 1.1036835 0.366276638956 0.63458704
## 935  3.61 0.2737549 0.010067114 1.1037594 0.200063787421 0.80006438
## 936  4.29 0.2704652 0.005950234 1.1038961 0.380090154890 0.62094437
## 937  3.80 0.2503228 0.006825309 1.1042296 0.322836165287 0.67692788
## 938  4.11 0.2745649 0.004746001 1.1050477 0.341614455283 0.65933272
## 939  3.83 0.2592388 0.005148005 1.1061286 0.329556217293 0.67054463
## 940  4.21 0.2738957 0.007603186 1.1062069 0.351068655361 0.64988118
## 941  4.17 0.2633497 0.003462730 1.1062937 0.320960619795 0.67938925
## 942  3.68 0.2689816 0.010255488 1.1063174 0.176012007146 0.82388501
## 943  4.02 0.2553461 0.005255527 1.1065089 0.352662643929 0.64744184
## 944  4.65 0.2880242 0.004975124 1.1067344 0.406505024871 0.59542643
## 945  4.04 0.2768290 0.006111810 1.1068917 0.374589091211 0.62655895
## 946  3.37 0.2499542 0.007885568 1.1070234 0.320812891821 0.67872049
## 947  3.87 0.2731839 0.009327354 1.1070423 0.238582625567 0.76170170
## 948  3.62 0.2511054 0.002394989 1.1070866 0.241941086777 0.75726163
## 949  3.45 0.2567420 0.004737609 1.1071429 0.282663821957 0.71693077
## 950  2.78 0.2559772 0.007400380 1.1081594 0.284665244935 0.71465613
## 951  4.06 0.2752600 0.004316264 1.1083086 0.334832737583 0.66603488
## 952  4.03 0.2606363 0.005919349 1.1084165 0.373606512689 0.62680731
## 953  3.73 0.2578430 0.004455170 1.1086310 0.309013120634 0.69083570
## 954  3.69 0.2635938 0.005221462 1.1088647 0.324299227615 0.67587002
## 955  3.78 0.2617620 0.004904632 1.1092814 0.327304960955 0.67282078
## 956  5.21 0.2866972 0.003352152 1.1094972 0.432947285778 0.56914043
## 957  3.72 0.2670261 0.007008086 1.1100478 0.373854183324 0.62670784
## 958  4.69 0.2683192 0.003618600 1.1101485 0.392065933333 0.60895103
## 959  4.79 0.2803705 0.003740648 1.1104442 0.403520494117 0.59807371
## 960  4.06 0.2823427 0.005419580 1.1111111 0.371256424687 0.63004161
## 961  3.24 0.2514567 0.007465404 1.1111111 0.340243875901 0.65926521
## 962  4.21 0.2695778 0.006315410 1.1115702 0.408671647874 0.59228680
## 963  4.28 0.2671506 0.007078040 1.1118509 0.388367934030 0.61240918
## 964  3.61 0.2767508 0.009242801 1.1119632 0.374242959516 0.62665811
## 965  4.06 0.2625608 0.002341077 1.1124829 0.298707229200 0.70133853
## 966  4.41 0.2804770 0.004805125 1.1125320 0.397735526730 0.60368567
## 967  4.01 0.2616384 0.005413208 1.1125731 0.375639270968 0.62474133
## 968  3.70 0.2508909 0.005523877 1.1126126 0.347798926867 0.65184487
## 969  3.63 0.2646310 0.012722646 1.1127098 0.191721202245 0.80800585
## 970  4.16 0.2786740 0.008686403 1.1127517 0.337799285746 0.66310415
## 971  4.57 0.2869550 0.008685079 1.1129235 0.291819346965 0.70914274
## 972  3.79 0.2617090 0.006971755 1.1130690 0.387799890010 0.61256345
## 973  3.77 0.2580216 0.007214733 1.1132075 0.383841607879 0.61633894
## 974  3.43 0.2444364 0.003075810 1.1141925 0.261482772753 0.73731040
## 975  3.76 0.2581290 0.005812897 1.1141975 0.365498858645 0.63455776
## 976  3.25 0.2635221 0.003095975 1.1151203 0.242603632263 0.75687364
## 977  3.25 0.2521453 0.006755523 1.1153184 0.340405319589 0.65904669
## 978  3.78 0.2755697 0.005652712 1.1163476 0.365975742692 0.63482510
## 979  4.36 0.2816102 0.010640184 1.1172680 0.218583287522 0.78169443
## 980  4.07 0.2659650 0.006778452 1.1174825 0.425062510559 0.57565925
## 981  4.02 0.2625181 0.005442671 1.1176471 0.394053221835 0.60636216
## 982  4.03 0.2803468 0.007406069 1.1177285 0.438219225143 0.56313402
## 983  3.02 0.2505482 0.006213450 1.1197053 0.315688294861 0.68339217
## 984  3.44 0.2499536 0.004828227 1.1210084 0.328174154220 0.67106616
## 985  3.26 0.2420533 0.005115090 1.1210692 0.318307723445 0.68043722
## 986  3.25 0.2625718 0.004310345 1.1218274 0.293290425772 0.70627743
## 987  3.80 0.2683102 0.008457801 1.1220588 0.430457947404 0.57021323
## 988  3.98 0.2753304 0.008076358 1.1227080 0.455934215277 0.54516359
## 989  3.58 0.2596309 0.007167174 1.1228879 0.408781754840 0.59132638
## 990  4.52 0.2759602 0.002844950 1.1229404 0.391148978291 0.60990822
## 991  3.36 0.2674875 0.009992862 1.1238095 0.395864076307 0.60447039
## 992  4.50 0.2756205 0.004003203 1.1242938 0.425194511630 0.57595681
## 993  3.30 0.2458925 0.007797271 1.1243655 0.396938108634 0.60236162
## 994  4.22 0.2608373 0.009262690 1.1244755 0.237935341094 0.76180210
## 995  4.23 0.2841196 0.004141160 1.1244870 0.397120768599 0.60418614
## 996  3.37 0.2637947 0.009532539 1.1247947 0.413012262071 0.58721925
## 997  3.47 0.2716459 0.007468880 1.1252955 0.411161695773 0.58940860
## 998  3.82 0.2545288 0.004208600 1.1253776 0.365720973014 0.63397550
## 999  3.99 0.2659708 0.004858737 1.1266376 0.403224415892 0.59720959
## 1000 3.43 0.2608380 0.008162525 1.1268058 0.428849164876 0.57128301
## 1001 4.34 0.2816216 0.007747748 1.1276042 0.499045234264 0.50243372
## 1002 4.02 0.2668101 0.003586157 1.1276297 0.374343339108 0.62598462
## 1003 3.26 0.2635590 0.005671748 1.1290850 0.353427765941 0.64635722
## 1004 3.33 0.2534085 0.005271769 1.1294498 0.354012203952 0.64531656
## 1005 3.93 0.2736842 0.005626134 1.1294643 0.424358817914 0.57643211
## 1006 3.23 0.2659786 0.006880319 1.1294719 0.383981677044 0.61604875
## 1007 3.73 0.2685202 0.007533632 1.1294852 0.454002218973 0.54661899
## 1008 3.63 0.2610611 0.005691206 1.1300940 0.398899856344 0.60110479
## 1009 4.74 0.2806612 0.003732670 1.1304348 0.461689847917 0.53976012
## 1010 3.73 0.2650752 0.004244032 1.1304985 0.370102767755 0.62997242
## 1011 4.53 0.2810064 0.005846917 1.1309824 0.496416671578 0.50506179
## 1012 3.31 0.2616520 0.006838222 1.1314935 0.399387018773 0.60050894
## 1013 3.36 0.2442178 0.005463486 1.1332237 0.377026942400 0.62191167
## 1014 4.39 0.2799680 0.006203722 1.1332378 0.498475557297 0.50289055
## 1015 3.59 0.2604583 0.004365224 1.1337481 0.369622749268 0.63012605
## 1016 4.05 0.2714520 0.004736746 1.1344538 0.429826877896 0.57083997
## 1017 4.32 0.2795642 0.006325778 1.1345646 0.498702973242 0.50260628
## 1018 4.04 0.2612304 0.004871008 1.1348637 0.436897946577 0.56334012
## 1019 3.58 0.2638306 0.005660033 1.1350932 0.407758849646 0.59230202
## 1020 3.30 0.2474844 0.007070982 1.1364764 0.424133237772 0.57512398
## 1021 3.49 0.2670205 0.006771596 1.1365079 0.432029541103 0.56822839
## 1022 3.34 0.2514150 0.005477451 1.1377551 0.387929941535 0.61131534
## 1023 3.51 0.2563164 0.005858660 1.1379310 0.416752304608 0.58292783
## 1024 3.69 0.2703971 0.008122744 1.1380323 0.491759910017 0.50891404
## 1025 4.28 0.2824670 0.006519824 1.1389646 0.512352930173 0.48901635
## 1026 3.43 0.2696889 0.009600000 1.1392801 0.498385270304 0.50217759
## 1027 3.66 0.2627630 0.005306496 1.1406250 0.424915325104 0.57510046
## 1028 3.65 0.2670053 0.005441683 1.1406491 0.426345113179 0.57386428
## 1029 3.93 0.2706439 0.003819571 1.1408046 0.411855999890 0.58854265
## 1030 4.14 0.2643037 0.007376754 1.1409029 0.525243297103 0.47537883
## 1031 3.98 0.2648560 0.006744440 1.1409496 0.498239812045 0.50226943
## 1032 4.21 0.2789030 0.007528231 1.1415094 0.537692761842 0.46352854
## 1033 3.99 0.2729875 0.005724508 1.1428571 0.476303082164 0.52443730
## 1034 3.49 0.2735882 0.008067345 1.1430746 0.485684305574 0.51495196
## 1035 2.94 0.2463527 0.006278855 1.1431227 0.383925550395 0.61479489
## 1036 3.30 0.2488839 0.004464286 1.1433390 0.373746336639 0.62517525
## 1037 4.19 0.2736318 0.004619758 1.1434371 0.469697376737 0.53110011
## 1038 3.91 0.2633671 0.007225434 1.1436950 0.511757175696 0.48866945
## 1039 3.70 0.2766034 0.004836976 1.1439510 0.422735051438 0.57783899
## 1040 3.17 0.2436697 0.005871560 1.1444043 0.403206681319 0.59554521
## 1041 3.39 0.2462275 0.004416636 1.1445783 0.387168631667 0.61170158
## 1042 3.93 0.2761313 0.004436557 1.1455604 0.442311093163 0.55836941
## 1043 3.57 0.2634049 0.003610760 1.1466459 0.386917867928 0.61279761
## 1044 3.26 0.2464039 0.006164767 1.1471572 0.429665460064 0.56934101
## 1045 3.62 0.2623098 0.004118174 1.1477987 0.410629348251 0.58914727
## 1046 3.29 0.2461286 0.003825833 1.1478992 0.370078109820 0.62859123
## 1047 3.57 0.2627210 0.003247925 1.1482059 0.382020007720 0.61760869
## 1048 3.61 0.2622536 0.004159884 1.1488372 0.414024104243 0.58574295
## 1049 3.84 0.2552917 0.006626173 1.1491228 0.508750618126 0.49121207
## 1050 4.52 0.2769476 0.004981072 1.1492109 0.530493747128 0.47061248
## 1051 3.15 0.2551002 0.005881272 1.1493849 0.414320458152 0.58493720
## 1052 3.08 0.2539769 0.009399855 1.1497326 0.500926781242 0.49868743
## 1053 3.63 0.2683983 0.005952381 1.1503876 0.468681324767 0.53158638
## 1054 4.05 0.2666423 0.006583760 1.1504298 0.530187886949 0.47036155
## 1055 4.38 0.2703541 0.004563709 1.1504540 0.511208358083 0.48953005
## 1056 3.58 0.2435614 0.006806475 1.1514658 0.496240059953 0.50302125
## 1057 3.91 0.2634545 0.008909091 1.1515152 0.560427891362 0.44002040
## 1058 4.05 0.2683797 0.004817987 1.1519886 0.488403504820 0.51204817
## 1059 4.01 0.2715540 0.005377308 1.1532847 0.502013164061 0.49858315
## 1060 3.82 0.2604223 0.006677495 1.1535022 0.520124275949 0.48001657
## 1061 2.96 0.2478365 0.004603204 1.1544118 0.375160501905 0.62336094
## 1062 4.16 0.2579778 0.006274650 1.1565585 0.554068158806 0.44615177
## 1063 3.28 0.2564472 0.006672678 1.1574394 0.475865305366 0.52365820
## 1064 3.75 0.2602538 0.008644473 1.1580547 0.571739962181 0.42848266
## 1065 3.29 0.2640813 0.007202216 1.1581633 0.491296875458 0.50862701
## 1066 4.45 0.2853077 0.005869797 1.1603154 0.575007867265 0.42628297
## 1067 3.84 0.2646269 0.008946144 1.1603631 0.591043191554 0.40938410
## 1068 3.39 0.2653394 0.007239819 1.1608040 0.510683147943 0.48936227
## 1069 3.48 0.2379229 0.005789759 1.1612378 0.490477488850 0.50826959
## 1070 4.66 0.2844616 0.005612066 1.1614518 0.592108468239 0.40920383
## 1071 3.40 0.2703156 0.004188774 1.1619537 0.431151915019 0.56874372
## 1072 3.80 0.2736052 0.009477825 1.1620029 0.602823180246 0.39790636
## 1073 4.49 0.2694197 0.004729853 1.1628205 0.563464310879 0.43721470
## 1074 3.49 0.2578812 0.005498534 1.1630435 0.484007234733 0.51558753
## 1075 3.40 0.2654676 0.006834532 1.1636953 0.509861728233 0.49013821
## 1076 3.95 0.2731681 0.007363506 1.1639344 0.576968429136 0.42371211
## 1077 4.84 0.2884040 0.005278902 1.1642157 0.607074797711 0.39437347
## 1078 4.00 0.2780068 0.007528231 1.1643454 0.585294138329 0.41558146
## 1079 4.07 0.2749147 0.005566529 1.1645207 0.545771965184 0.45492218
## 1080 3.35 0.2549303 0.009770219 1.1647059 0.579172751613 0.42065591
## 1081 4.58 0.2699640 0.004136691 1.1648216 0.563280314568 0.43741294
## 1082 3.30 0.2495886 0.009508137 1.1650000 0.571180196412 0.42838645
## 1083 3.67 0.2721126 0.008223684 1.1656250 0.575787409757 0.42474923
## 1084 4.66 0.2860432 0.005844846 1.1656734 0.608306468538 0.39300530
## 1085 3.83 0.2857893 0.010325516 1.1657061 0.629733395210 0.37136656
## 1086 3.31 0.2418673 0.005329903 1.1663894 0.474527171758 0.52414936
## 1087 3.42 0.2471889 0.005529954 1.1666667 0.491488537017 0.50756820
## 1088 3.21 0.2689531 0.010288809 1.1669394 0.584591766378 0.41570526
## 1089 3.56 0.2701678 0.007038441 1.1674277 0.541502270036 0.45879692
## 1090 2.99 0.2415699 0.007554819 1.1682070 0.505659261936 0.49304074
## 1091 3.70 0.2651833 0.008024804 1.1686747 0.584423784044 0.41584505
## 1092 3.21 0.2596342 0.004794885 1.1694631 0.453844775775 0.54545957
## 1093 3.68 0.2599857 0.005706134 1.1704036 0.531541180423 0.46829296
## 1094 3.49 0.2640378 0.005815010 1.1704918 0.513650048987 0.48621468
## 1095 3.44 0.2683595 0.010778224 1.1719243 0.624633298048 0.37573338
## 1096 3.95 0.2870403 0.006069261 1.1727672 0.566282028067 0.43474410
## 1097 3.69 0.2623067 0.003554292 1.1732523 0.483725170265 0.51594419
## 1098 4.90 0.2891374 0.005679801 1.1732558 0.643911647295 0.35743925
## 1099 3.56 0.2582830 0.004576240 1.1732909 0.498460389363 0.50105686
## 1100 3.05 0.2485089 0.006687150 1.1740675 0.505223910695 0.49370127
## 1101 3.88 0.2554003 0.006534761 1.1750000 0.586548097968 0.41331663
## 1102 3.06 0.2525888 0.005362426 1.1769912 0.478563510854 0.52034533
## 1103 3.67 0.2593602 0.007270084 1.1772727 0.589246344124 0.41068230
## 1104 2.96 0.2606325 0.005089059 1.1778929 0.460792706307 0.53834226
## 1105 3.34 0.2531646 0.007233273 1.1783961 0.561093019630 0.43835285
## 1106 3.84 0.2676308 0.006070345 1.1790831 0.579510640376 0.42071272
## 1107 3.63 0.2531876 0.005464481 1.1795276 0.549132509863 0.45032654
## 1108 3.57 0.2496794 0.004762777 1.1796117 0.526079547229 0.47308870
## 1109 3.97 0.2829576 0.007033363 1.1799710 0.611883820683 0.38897075
## 1110 3.71 0.2714337 0.006581288 1.1805556 0.582300039893 0.41801354
## 1111 4.03 0.2829541 0.006139401 1.1828411 0.604614980722 0.39620523
## 1112 3.73 0.2639296 0.005315249 1.1833856 0.563607254681 0.43631676
## 1113 3.68 0.2702703 0.005334282 1.1835148 0.557648711339 0.44248410
## 1114 3.40 0.2589087 0.007795100 1.1841680 0.595179220928 0.40457217
## 1115 4.35 0.2697095 0.004510193 1.1847826 0.605688673272 0.39473790
## 1116 3.38 0.2491362 0.003637025 1.1850594 0.492693350445 0.50609265
## 1117 3.95 0.2735268 0.008970976 1.1851351 0.666531860699 0.33402407
## 1118 3.81 0.2679460 0.005152807 1.1864662 0.575066173076 0.42503009
## 1119 3.76 0.2593335 0.008013112 1.1870398 0.639755167907 0.36023340
## 1120 4.20 0.2802102 0.006129597 1.1872456 0.631794518558 0.36895479
## 1121 4.48 0.2889582 0.006933611 1.1878863 0.670385272734 0.33068235
## 1122 3.43 0.2562545 0.002859185 1.1888702 0.487105718926 0.51192282
## 1123 3.86 0.2625828 0.005373455 1.1890694 0.593869273806 0.40606811
## 1124 3.71 0.2750752 0.006368300 1.1891496 0.599776936047 0.40058867
## 1125 3.93 0.2695413 0.005871560 1.1900585 0.612552679978 0.38769980
## 1126 3.69 0.2802376 0.007559395 1.1901840 0.626111792490 0.37446734
## 1127 2.66 0.2567019 0.009477390 1.1903323 0.582237948921 0.41705039
## 1128 3.91 0.2682665 0.003939828 1.1923642 0.571835962125 0.42819630
## 1129 3.54 0.2696829 0.004987531 1.1929012 0.562338542293 0.43759442
## 1130 3.40 0.2539510 0.006176203 1.1932367 0.582938888904 0.41641916
## 1131 3.63 0.2543205 0.004366018 1.1944012 0.564380257146 0.43494763
## 1132 3.44 0.2496884 0.005608641 1.1944444 0.577533810354 0.42161656
## 1133 3.00 0.2628480 0.005174875 1.1946903 0.519040489408 0.48020288
## 1134 3.36 0.2571064 0.005466472 1.1955403 0.567368379107 0.43198562
## 1135 3.14 0.2538321 0.008759124 1.1985940 0.632735165247 0.36671168
## 1136 4.03 0.2704467 0.007138487 1.1990369 0.669940265406 0.33038178
## 1137 4.27 0.2787150 0.002512563 1.1997187 0.589549185979 0.41091414
## 1138 4.19 0.2799500 0.006248884 1.2016461 0.668442684582 0.33215574
## 1139 3.74 0.2698384 0.006642729 1.2020958 0.643498279348 0.35663566
## 1140 3.84 0.2646633 0.002353367 1.2035928 0.558882594398 0.44074899
## 1141 3.41 0.2722134 0.009185656 1.2052877 0.675448431928 0.32472327
## 1142 3.67 0.2599892 0.006092098 1.2062500 0.638617357960 0.36109853
## 1143 4.11 0.2814882 0.010163339 1.2064607 0.741033485147 0.25954887
## 1144 3.14 0.2644099 0.007685270 1.2065404 0.626723955927 0.37295147
## 1145 2.53 0.2293158 0.005767701 1.2069767 0.529781508605 0.46758071
## 1146 3.19 0.2395332 0.005557614 1.2079723 0.592127738461 0.40639186
## 1147 3.22 0.2565275 0.007623404 1.2081129 0.638329783453 0.36112164
## 1148 3.33 0.2577093 0.005873715 1.2081911 0.608998518976 0.39037790
## 1149 3.87 0.2670241 0.006255585 1.2082718 0.661893812771 0.33815415
## 1150 3.54 0.2672275 0.008233399 1.2115677 0.682461408057 0.31753622
## 1151 4.41 0.2845514 0.003716156 1.2125163 0.659410128005 0.34123029
## 1152 2.92 0.2484871 0.005684944 1.2134831 0.582299371606 0.41634342
## 1153 3.70 0.2679382 0.004722979 1.2145062 0.630235011640 0.36960991
## 1154 3.82 0.2744035 0.004699928 1.2149254 0.639478235389 0.36063903
## 1155 3.92 0.2670661 0.007443718 1.2151335 0.703706227134 0.29638144
## 1156 4.26 0.2833922 0.003710247 1.2151394 0.653647715539 0.34688504
## 1157 3.44 0.2629840 0.009175996 1.2151899 0.701461985747 0.29840945
## 1158 3.99 0.2719065 0.006549832 1.2165963 0.694453790519 0.30574232
## 1159 4.01 0.2796640 0.005897069 1.2175793 0.683908912257 0.31647522
## 1160 3.84 0.2743708 0.007089685 1.2177778 0.695319010451 0.30490144
## 1161 4.02 0.2717295 0.005664719 1.2190476 0.685510346719 0.31463968
## 1162 3.84 0.2667274 0.004193254 1.2192593 0.643126623616 0.35669529
## 1163 3.68 0.2608852 0.005037783 1.2232416 0.658541004870 0.34105730
## 1164 3.11 0.2568030 0.006307443 1.2235915 0.639001379196 0.36020812
## 1165 4.02 0.2708296 0.002497770 1.2248603 0.635056587379 0.36484087
## 1166 3.43 0.2683503 0.006359012 1.2251656 0.667812445110 0.33197528
## 1167 3.05 0.2517292 0.004550419 1.2251773 0.599113977943 0.39961583
## 1168 3.63 0.2690227 0.004868374 1.2254601 0.654238325004 0.34556398
## 1169 4.10 0.2835979 0.005467372 1.2256757 0.699163905188 0.30127241
## 1170 3.58 0.2590234 0.005484784 1.2269841 0.668748280919 0.33076209
## 1171 3.56 0.2541206 0.004528165 1.2272000 0.649042895762 0.35019601
## 1172 2.90 0.2519127 0.006234061 1.2274052 0.629710753418 0.36914894
## 1173 3.23 0.2622652 0.007477658 1.2297521 0.685900480288 0.31367075
## 1174 3.41 0.2596206 0.006323397 1.2297940 0.678199086044 0.32129735
## 1175 3.61 0.2713496 0.005348547 1.2305296 0.673678064639 0.32619624
## 1176 3.33 0.2614426 0.004452360 1.2306397 0.634444790643 0.36485985
## 1177 3.67 0.2623997 0.007658643 1.2307692 0.722837441389 0.27696327
## 1178 4.13 0.2816092 0.006465517 1.2311902 0.730000856850 0.27036155
## 1179 2.94 0.2527352 0.007111597 1.2323810 0.663862416427 0.33518297
## 1180 3.29 0.2598511 0.005084438 1.2325203 0.649627401497 0.34966512
## 1181 3.17 0.2524662 0.006028498 1.2332731 0.662726183184 0.33634944
## 1182 4.18 0.2819930 0.004370629 1.2356771 0.706564788152 0.29374693
## 1183 3.78 0.2469204 0.006802721 1.2357473 0.728699152737 0.27069453
## 1184 3.03 0.2531577 0.004871887 1.2360360 0.632843118151 0.36598705
## 1185 3.09 0.2610783 0.004615953 1.2361111 0.630895852284 0.36822855
## 1186 3.65 0.2763728 0.006864162 1.2369231 0.717078961274 0.28299227
## 1187 4.35 0.2930822 0.004048583 1.2379714 0.714718554553 0.28588168
## 1188 4.21 0.2781301 0.007972459 1.2382865 0.770588642386 0.22967176
## 1189 3.58 0.2672429 0.006122816 1.2386707 0.704781482350 0.29497925
## 1190 4.06 0.2667505 0.005748159 1.2395382 0.732942145727 0.26698039
## 1191 3.98 0.2789116 0.007151579 1.2426778 0.752944641423 0.24724753
## 1192 4.49 0.2896624 0.005597341 1.2458172 0.762908957577 0.23755368
## 1193 3.76 0.2713541 0.005934184 1.2480377 0.731637225678 0.26825240
## 1194 2.95 0.2466704 0.006940536 1.2504537 0.702897109627 0.29597870
## 1195 3.27 0.2541090 0.006094183 1.2508251 0.710073872658 0.28913535
## 1196 3.13 0.2598726 0.004549591 1.2530973 0.674256646795 0.32485195
## 1197 3.18 0.2488513 0.003859585 1.2554745 0.672237119409 0.32647253
## 1198 3.64 0.2726947 0.005192480 1.2578125 0.730007259189 0.26978800
## 1199 3.01 0.2630716 0.008562580 1.2601770 0.749783764916 0.24971825
## 1200 3.52 0.2769038 0.013764748 1.2626728 0.837544293500 0.16247884
## 1201 3.18 0.2667025 0.006806377 1.2628866 0.737172813033 0.26236275
## 1202 3.79 0.2825588 0.005654709 1.2629247 0.753871402350 0.24618992
## 1203 3.01 0.2623300 0.005331911 1.2640449 0.703524126104 0.29566802
## 1204 3.42 0.2561773 0.003815407 1.2642741 0.707482213658 0.29164249
## 1205 3.71 0.2791201 0.010097367 1.2679641 0.814587487335 0.18544255
## 1206 3.12 0.2639323 0.004965974 1.2687609 0.714262215707 0.28500620
## 1207 3.21 0.2512879 0.008395344 1.2692998 0.776150947556 0.22317469
## 1208 3.19 0.2693069 0.006480648 1.2695652 0.744416521195 0.25514909
## 1209 3.49 0.2697725 0.004694836 1.2697161 0.735287235296 0.26430433
## 1210 3.61 0.2760482 0.005938456 1.2704403 0.761646123220 0.23819811
## 1211 3.63 0.2659364 0.004130005 1.2708018 0.737975228087 0.26154823
## 1212 3.54 0.2665090 0.006367109 1.2711039 0.766642922225 0.23299785
## 1213 3.69 0.2671308 0.005679736 1.2712121 0.765452748934 0.23422008
## 1214 3.87 0.2777380 0.003758053 1.2742176 0.750944087285 0.24891542
## 1215 3.22 0.2441839 0.005951307 1.2744755 0.751317190917 0.24762940
## 1216 3.61 0.2833479 0.004010462 1.2756410 0.740264352784 0.25963488
## 1217 3.61 0.2811775 0.008385370 1.2764977 0.801772567392 0.19820760
## 1218 3.41 0.2702074 0.006974249 1.2765957 0.776035440174 0.22364998
## 1219 3.24 0.2677470 0.005636780 1.2783172 0.750441680679 0.24903719
## 1220 3.18 0.2538193 0.004049328 1.2859649 0.738187790085 0.26077823
## 1221 3.87 0.2749509 0.002142475 1.2869440 0.749461150529 0.25020973
## 1222 3.49 0.2709072 0.008150248 1.2892691 0.812255817331 0.18748132
## 1223 3.74 0.2686779 0.006743567 1.2900763 0.809070227910 0.19064590
## 1224 3.44 0.2631203 0.003927169 1.2903226 0.758492931131 0.24083895
## 1225 3.31 0.2690632 0.009622367 1.2908163 0.822504537124 0.17720586
## 1226 3.75 0.2765957 0.005851064 1.2917293 0.800219217209 0.19959287
## 1227 3.84 0.2865149 0.004028021 1.2950581 0.785282362788 0.21465835
## 1228 3.75 0.2779478 0.004320432 1.2974203 0.789424599179 0.21033833
## 1229 3.79 0.2771191 0.005131835 1.3003049 0.805285271329 0.19449470
## 1230 3.26 0.2608453 0.004468442 1.3006873 0.773432459087 0.22580995
## 1231 3.10 0.2644961 0.010792025 1.3059441 0.843229908776 0.15638374
## 1232 3.42 0.2688501 0.003599064 1.3137584 0.789170340335 0.21024219
## 1233 3.31 0.2740571 0.010750793 1.3155259 0.858098926519 0.14165051
## 1234 3.73 0.2631388 0.005980428 1.3196347 0.836983167090 0.16257310
## 1235 3.87 0.2792986 0.006262301 1.3196347 0.842603967185 0.15719711
## 1236 3.22 0.2655216 0.005009841 1.3213058 0.807572432563 0.19179521
## 1237 2.91 0.2564719 0.004437870 1.3214286 0.787003948880 0.21200044
## 1238 3.69 0.2823177 0.005635787 1.3214286 0.831695707378 0.16809046
## 1239 2.58 0.2400000 0.005576923 1.3216630 0.787147581369 0.21144719
## 1240 3.80 0.2686513 0.003085860 1.3298507 0.822236363293 0.17727624
## 1241 3.77 0.2767762 0.008130081 1.3431953 0.875464522654 0.12427899
## 1242 3.07 0.2662091 0.008668954 1.3440285 0.861216420781 0.13830768
## 1243 3.02 0.2525542 0.006811256 1.3478261 0.849552752132 0.14970522
## 1244 2.81 0.2741348 0.008561020 1.3496094 0.856535164344 0.14300686
## 1245 3.51 0.2804279 0.011048755 1.3538705 0.893825195276 0.10594078
## 1246 3.11 0.2630443 0.005577546 1.3546713 0.847784271723 0.15157139
## 1247 3.18 0.2695369 0.006693198 1.3614458 0.864605779400 0.13490227
## 1248 3.59 0.2635923 0.003947605 1.3705426 0.864807886119 0.13462861
## 1249 3.14 0.2618314 0.005258386 1.3720930 0.862686027476 0.13667030
## 1250 3.15 0.2568079 0.004508566 1.3797909 0.864534075358 0.13474179
## 1251 3.08 0.2722504 0.008140376 1.3824057 0.887392631249 0.11217156
## 1252 3.83 0.2905728 0.004574383 1.3838550 0.883362098485 0.11637956
## 1253 2.99 0.2606072 0.004714313 1.4000000 0.877606730944 0.12171208
## 1254 3.50 0.2797376 0.004963659 1.4096774 0.896103575614 0.10349891
## 1255 3.25 0.2715171 0.004741065 1.4216867 0.897653486392 0.10184615
## 1256 2.97 0.2719093 0.006118409 1.4224599 0.899411523550 0.10008447
## 1257 3.37 0.2714567 0.006629636 1.4334471 0.914605640909 0.08497278
## 1258 3.82 0.2879674 0.005493532 1.4710366 0.931014713042 0.06866660
## 1259 3.54 0.2882042 0.006690141 1.4784689 0.933709583115 0.06596551
## 1260 2.83 0.2654947 0.005255527 1.5067698 0.931997454637 0.06751257
## 
## $data
##       ERA       AVG         X3R        RR win not_win
## 1    4.24 0.2245660 0.007840209 0.6273458   0       1
## 2    5.30 0.2400293 0.007135016 0.6368534   0       1
## 3    4.58 0.2288089 0.004986150 0.6518072   0       1
## 4    4.92 0.2478727 0.006844247 0.6655093   0       1
## 5    4.75 0.2385939 0.005983545 0.6662791   0       1
## 6    5.00 0.2419235 0.005012997 0.6778291   0       1
## 7    4.98 0.2527056 0.006854257 0.6840934   0       1
## 8    4.54 0.2379619 0.003732736 0.6970849   0       1
## 9    3.82 0.2263561 0.006532235 0.7060086   0       1
## 10   6.38 0.2555154 0.003797468 0.7098821   0       1
## 11   4.82 0.2511525 0.006269592 0.7111369   0       1
## 12   4.16 0.2437237 0.004764523 0.7116883   0       1
## 13   4.79 0.2395089 0.002932014 0.7193396   0       1
## 14   5.20 0.2484707 0.006477150 0.7226436   0       1
## 15   3.99 0.2348931 0.005895357 0.7234332   0       1
## 16   4.27 0.2493477 0.007267984 0.7251908   0       1
## 17   4.28 0.2383391 0.003658314 0.7269076   0       1
## 18   4.83 0.2560440 0.006043956 0.7298246   0       1
## 19   5.29 0.2530335 0.006245539 0.7331155   0       1
## 20   4.64 0.2545554 0.005284257 0.7334123   0       1
## 21   3.78 0.2265490 0.007289469 0.7338346   0       1
## 22   3.53 0.2171406 0.003380394 0.7340764   0       1
## 23   4.56 0.2359904 0.005178472 0.7342569   0       1
## 24   3.93 0.2355334 0.002958033 0.7349570   0       1
## 25   4.33 0.2398967 0.006089684 0.7357775   0       1
## 26   4.57 0.2523071 0.007567368 0.7360097   0       1
## 27   4.67 0.2476670 0.006905562 0.7362110   0       1
## 28   4.09 0.2424632 0.005147059 0.7489879   0       1
## 29   5.49 0.2625840 0.006178448 0.7497326   0       1
## 30   4.41 0.2511070 0.003321033 0.7539474   0       1
## 31   4.12 0.2398801 0.005809595 0.7540541   0       1
## 32   4.53 0.2420839 0.004418262 0.7561275   0       1
## 33   4.85 0.2537044 0.003614022 0.7575419   0       1
## 34   4.94 0.2581463 0.003801593 0.7576099   0       1
## 35   4.23 0.2541133 0.004753199 0.7588933   0       1
## 36   4.54 0.2500921 0.007182320 0.7612903   0       1
## 37   4.73 0.2528163 0.005355494 0.7637028   0       1
## 38   5.05 0.2469433 0.004631345 0.7657343   0       1
## 39   4.01 0.2363226 0.008178229 0.7660455   0       1
## 40   4.38 0.2475861 0.006376389 0.7692308   0       1
## 41   4.58 0.2473118 0.004556224 0.7699005   0       1
## 42   4.54 0.2528110 0.003627131 0.7700730   0       1
## 43   3.62 0.2450667 0.005825973 0.7710145   0       1
## 44   4.51 0.2575920 0.005001786 0.7726131   0       1
## 45   4.69 0.2485079 0.006691988 0.7737948   0       1
## 46   5.49 0.2474820 0.005960946 0.7748815   0       1
## 47   5.28 0.2632893 0.005351409 0.7753396   0       1
## 48   4.66 0.2505919 0.004735021 0.7769697   0       1
## 49   3.98 0.2404788 0.009793254 0.7777778   0       1
## 50   4.55 0.2419123 0.002695902 0.7791878   0       1
## 51   5.65 0.2710682 0.006620147 0.7796087   0       1
## 52   4.59 0.2592726 0.003781059 0.7808917   0       1
## 53   5.09 0.2448720 0.003811944 0.7832957   0       1
## 54   5.41 0.2731557 0.010694218 0.7838676   0       1
## 55   4.56 0.2534975 0.007647827 0.7859873   0       1
## 56   4.25 0.2548807 0.008315257 0.7865772   0       1
## 57   4.28 0.2486685 0.004775023 0.7877170   0       1
## 58   3.91 0.2439159 0.005162242 0.7889039   0       1
## 59   4.78 0.2506787 0.004343891 0.7893491   0       1
## 60   5.76 0.2793207 0.006793207 0.7907762   0       1
## 61   3.66 0.2362805 0.006859756 0.7921260   0       1
## 62   3.71 0.2306845 0.005689117 0.7941176   0       1
## 63   5.16 0.2630725 0.007212405 0.7942158   0       1
## 64   4.87 0.2553739 0.006430277 0.7952381   0       1
## 65   5.15 0.2585771 0.005236547 0.7955801   0       1
## 66   3.22 0.2329482 0.005777115 0.7967213   0       1
## 67   3.94 0.2370923 0.004473439 0.7991632   0       1
## 68   4.35 0.2343865 0.004959589 0.7997497   0       1
## 69   4.97 0.2737330 0.005531763 0.8000000   0       1
## 70   4.08 0.2440067 0.007247723 0.8000000   0       1
## 71   5.39 0.2735951 0.007204611 0.8012821   0       1
## 72   4.72 0.2573954 0.007936508 0.8021978   0       1
## 73   3.97 0.2465048 0.005150846 0.8022599   0       1
## 74   4.13 0.2476712 0.005662100 0.8024862   0       1
## 75   4.96 0.2548411 0.006028498 0.8049065   0       1
## 76   4.21 0.2411089 0.003465256 0.8050734   0       1
## 77   3.55 0.2484970 0.010020040 0.8055556   0       1
## 78   4.16 0.2469468 0.006285920 0.8072917   0       1
## 79   5.49 0.2596171 0.004115226 0.8076110   0       1
## 80   4.51 0.2396969 0.006653114 0.8076416   0       1
## 81   4.04 0.2396417 0.006905562 0.8077437   0       1
## 82   5.31 0.2762926 0.004220893 0.8080594   0       1
## 83   3.48 0.2438755 0.004052312 0.8081991   0       1
## 84   4.19 0.2459283 0.005066956 0.8092190   0       1
## 85   3.54 0.2296840 0.005455229 0.8098160   0       1
## 86   4.28 0.2506279 0.007534984 0.8099274   0       1
## 87   4.90 0.2626389 0.007888132 0.8110329   0       1
## 88   3.71 0.2327555 0.006319971 0.8110465   0       1
## 89   5.02 0.2638763 0.005459509 0.8118343   0       1
## 90   5.27 0.2573878 0.006384531 0.8119565   0       1
## 91   4.62 0.2525839 0.005258386 0.8122699   0       1
## 92   5.00 0.2577826 0.006917896 0.8123570   0       1
## 93   3.92 0.2586334 0.006980162 0.8130194   0       1
## 94   4.84 0.2556757 0.004864865 0.8130841   0       1
## 95   4.14 0.2394966 0.005737553 0.8133515   0       1
## 96   3.70 0.2304348 0.005671078 0.8136364   0       1
## 97   4.17 0.2380865 0.010630499 0.8136986   0       1
## 98   4.32 0.2483504 0.005865103 0.8144192   0       1
## 99   4.83 0.2588576 0.009219089 0.8147268   0       1
## 100  4.87 0.2607123 0.005785572 0.8158845   0       1
## 101  4.21 0.2418906 0.004448563 0.8162450   0       1
## 102  4.23 0.2513228 0.003439153 0.8176583   0       1
## 103  5.02 0.2564888 0.004325883 0.8178694   0       1
## 104  5.13 0.2664860 0.006323397 0.8181818   0       1
## 105  4.19 0.2509424 0.009513552 0.8188976   0       1
## 106  4.64 0.2464032 0.008377345 0.8189655   0       1
## 107  4.39 0.2488003 0.005906238 0.8224777   0       1
## 108  4.49 0.2555010 0.007092199 0.8227848   0       1
## 109  4.58 0.2555436 0.005185980 0.8232446   0       1
## 110  4.89 0.2567592 0.002327663 0.8232558   0       1
## 111  3.90 0.2329828 0.004058292 0.8237037   0       1
## 112  4.49 0.2564103 0.005128205 0.8240621   0       1
## 113  3.66 0.2509582 0.006205512 0.8245083   0       1
## 114  3.61 0.2330258 0.005719557 0.8249258   0       1
## 115  4.68 0.2530210 0.005205429 0.8251232   0       1
## 116  4.35 0.2610261 0.007740774 0.8255659   0       1
## 117  4.33 0.2486299 0.004932408 0.8257477   0       1
## 118  4.77 0.2464620 0.003675795 0.8259833   0       1
## 119  4.88 0.2599285 0.005361398 0.8262548   0       1
## 120  5.01 0.2598880 0.010836193 0.8264840   0       1
## 121  5.19 0.2500906 0.005074302 0.8269019   0       1
## 122  3.98 0.2489664 0.003415423 0.8271093   0       1
## 123  5.21 0.2556459 0.007588076 0.8271605   0       1
## 124  4.73 0.2654616 0.003544214 0.8273736   0       1
## 125  4.31 0.2371357 0.003058654 0.8275862   0       1
## 126  4.59 0.2517999 0.006276537 0.8281250   0       1
## 127  5.53 0.2681924 0.006436617 0.8283898   0       1
## 128  5.01 0.2577116 0.003586801 0.8284091   0       1
## 129  3.45 0.2349727 0.004293521 0.8285714   0       1
## 130  4.67 0.2483553 0.004385965 0.8287093   0       1
## 131  4.26 0.2483790 0.008465418 0.8288410   0       1
## 132  4.37 0.2423963 0.005714286 0.8296489   0       1
## 133  4.43 0.2493752 0.008746876 0.8297362   0       1
## 134  4.58 0.2500000 0.004723837 0.8306943   0       1
## 135  5.08 0.2583511 0.003731343 0.8314480   0       1
## 136  4.12 0.2435309 0.006423197 0.8335535   0       1
## 137  4.41 0.2496408 0.004849138 0.8337696   0       1
## 138  4.71 0.2642385 0.004820568 0.8349398   0       1
## 139  4.54 0.2603017 0.005886681 0.8350649   0       1
## 140  4.32 0.2582926 0.008337865 0.8352941   0       1
## 141  4.93 0.2637712 0.005120056 0.8366165   0       1
## 142  4.24 0.2476746 0.005836221 0.8375165   0       1
## 143  4.09 0.2472487 0.004585473 0.8381344   0       1
## 144  4.93 0.2654758 0.006720906 0.8392019   0       1
## 145  3.93 0.2511325 0.004348614 0.8392857   0       1
## 146  4.16 0.2440888 0.005456530 0.8393574   0       1
## 147  3.72 0.2563215 0.009315944 0.8395604   0       1
## 148  3.84 0.2502236 0.007333214 0.8399433   0       1
## 149  4.86 0.2526655 0.006638503 0.8402062   0       1
## 150  4.09 0.2440684 0.007173073 0.8411602   0       1
## 151  4.69 0.2649757 0.008454758 0.8417351   0       1
## 152  3.88 0.2470054 0.004718693 0.8423237   0       1
## 153  3.59 0.2337083 0.004266874 0.8423645   0       1
## 154  4.77 0.2603380 0.005393743 0.8425481   0       1
## 155  4.38 0.2619520 0.006675086 0.8432148   0       1
## 156  3.89 0.2511123 0.006228866 0.8434066   0       1
## 157  4.54 0.2360499 0.004955947 0.8437118   0       1
## 158  4.00 0.2443677 0.007812500 0.8437936   0       1
## 159  4.39 0.2653097 0.004601770 0.8451053   0       1
## 160  5.25 0.2556930 0.004124081 0.8451327   0       1
## 161  4.90 0.2540141 0.003788562 0.8451537   0       1
## 162  5.06 0.2740781 0.005191550 0.8455641   0       1
## 163  3.68 0.2413666 0.004775900 0.8455882   0       1
## 164  5.15 0.2684229 0.003381986 0.8458904   0       1
## 165  4.91 0.2650624 0.005608829 0.8466899   0       1
## 166  5.18 0.2614486 0.006203248 0.8469388   0       1
## 167  4.81 0.2582528 0.008389568 0.8479810   0       1
## 168  4.76 0.2698357 0.003495281 0.8481166   0       1
## 169  4.33 0.2480704 0.004846527 0.8488064   0       1
## 170  4.87 0.2663477 0.006556796 0.8496503   0       1
## 171  4.55 0.2634795 0.004313444 0.8498123   0       1
## 172  5.14 0.2699911 0.008726625 0.8500000   0       1
## 173  4.50 0.2372944 0.006468305 0.8513011   0       1
## 174  4.48 0.2465830 0.004493541 0.8514589   0       1
## 175  5.22 0.2736238 0.009324009 0.8516854   0       1
## 176  3.72 0.2400220 0.005858660 0.8517442   0       1
## 177  3.06 0.2418199 0.005033882 0.8517824   0       1
## 178  4.42 0.2527352 0.004923414 0.8520942   0       1
## 179  4.34 0.2497211 0.006322053 0.8522727   0       1
## 180  5.67 0.2658898 0.006355932 0.8524252   0       1
## 181  4.81 0.2495889 0.006212315 0.8528708   0       1
## 182  4.77 0.2514970 0.007258211 0.8530120   0       1
## 183  4.12 0.2459231 0.005435801 0.8532236   0       1
## 184  5.35 0.2773619 0.003565062 0.8542825   0       1
## 185  4.70 0.2594693 0.005468908 0.8546196   0       1
## 186  3.97 0.2503715 0.004457652 0.8553547   0       1
## 187  5.03 0.2615105 0.004003640 0.8555046   0       1
## 188  4.09 0.2473321 0.005021971 0.8556231   0       1
## 189  4.04 0.2430696 0.006609143 0.8557823   0       1
## 190  4.93 0.2627043 0.005566529 0.8557920   0       1
## 191  4.04 0.2444198 0.006456373 0.8567416   0       1
## 192  4.64 0.2702944 0.008742194 0.8580324   0       1
## 193  3.87 0.2559502 0.007506408 0.8581662   0       1
## 194  5.13 0.2665103 0.006134969 0.8584687   0       1
## 195  4.74 0.2677645 0.008233399 0.8585132   0       1
## 196  3.70 0.2344865 0.004027091 0.8588235   0       1
## 197  3.87 0.2574894 0.006984010 0.8589563   0       1
## 198  4.30 0.2482231 0.003644979 0.8590426   0       1
## 199  4.58 0.2563406 0.005615942 0.8595147   0       1
## 200  4.65 0.2531830 0.007457257 0.8598802   0       1
## 201  4.27 0.2582418 0.003220917 0.8602740   0       1
## 202  4.39 0.2634698 0.007902299 0.8603491   0       1
## 203  4.79 0.2555515 0.004004368 0.8607748   0       1
## 204  4.49 0.2594150 0.007495430 0.8609432   0       1
## 205  3.65 0.2471596 0.006891414 0.8610272   0       1
## 206  3.41 0.2483234 0.006333830 0.8616352   0       1
## 207  4.59 0.2535689 0.005174875 0.8618090   0       1
## 208  4.66 0.2612034 0.007320121 0.8625304   0       1
## 209  4.46 0.2464032 0.004917137 0.8628719   0       1
## 210  3.84 0.2434750 0.003728561 0.8628912   0       1
## 211  4.36 0.2462687 0.006552603 0.8642132   0       1
## 212  4.21 0.2469725 0.004954128 0.8644537   0       1
## 213  4.77 0.2622246 0.003940534 0.8647059   0       1
## 214  4.33 0.2564426 0.006487655 0.8650794   0       1
## 215  4.14 0.2411243 0.006471893 0.8657718   0       1
## 216  5.20 0.2735849 0.007438316 0.8663697   0       1
## 217  4.05 0.2515057 0.007118087 0.8666667   0       1
## 218  3.78 0.2436853 0.007450482 0.8666667   0       1
## 219  4.52 0.2630443 0.003058654 0.8670013   0       1
## 220  4.58 0.2687590 0.009379509 0.8670732   0       1
## 221  4.21 0.2638606 0.007602448 0.8673051   0       1
## 222  3.53 0.2457814 0.006786500 0.8673469   0       1
## 223  4.42 0.2559105 0.003428984 0.8674389   0       1
## 224  3.91 0.2387436 0.005628177 0.8684583   0       1
## 225  3.89 0.2462340 0.007625070 0.8686030   0       1
## 226  5.37 0.2717607 0.003964678 0.8696605   0       1
## 227  4.11 0.2417845 0.003488159 0.8699862   0       1
## 228  3.64 0.2457376 0.007042254 0.8702290   0       1
## 229  3.85 0.2568456 0.007484483 0.8703704   0       1
## 230  4.86 0.2568574 0.003996367 0.8705463   0       1
## 231  5.52 0.2834632 0.006196884 0.8706366   0       1
## 232  4.49 0.2562284 0.004909984 0.8706786   0       1
## 233  4.63 0.2597004 0.003068038 0.8706897   0       1
## 234  4.29 0.2525708 0.005773047 0.8707124   0       1
## 235  5.17 0.2715326 0.005327651 0.8709677   0       1
## 236  3.72 0.2439290 0.005617978 0.8710145   0       1
## 237  4.33 0.2612596 0.004760161 0.8711409   0       1
## 238  3.36 0.2349118 0.004271123 0.8716323   0       1
## 239  4.72 0.2530273 0.003795409 0.8723898   0       1
## 240  5.04 0.2654343 0.008793970 0.8732227   0       1
## 241  4.06 0.2623843 0.006532390 0.8732970   0       1
## 242  3.61 0.2418665 0.005949061 0.8734756   0       1
## 243  4.91 0.2599892 0.006147170 0.8736330   0       1
## 244  4.00 0.2377228 0.003273918 0.8737300   0       1
## 245  4.65 0.2736480 0.002351239 0.8745476   0       1
## 246  4.27 0.2600806 0.008797654 0.8754915   0       1
## 247  4.52 0.2649274 0.005879111 0.8757842   0       1
## 248  4.82 0.2576845 0.003681207 0.8763505   0       1
## 249  4.18 0.2561785 0.005164146 0.8768021   0       1
## 250  3.38 0.2291225 0.004435873 0.8768473   0       1
## 251  3.78 0.2517329 0.006931777 0.8769018   0       1
## 252  4.77 0.2670751 0.009010633 0.8771712   0       1
## 253  3.98 0.2388899 0.006562910 0.8774648   0       1
## 254  4.23 0.2439024 0.003752345 0.8780822   0       1
## 255  4.68 0.2553885 0.006882811 0.8789731   0       1
## 256  4.23 0.2518626 0.005088134 0.8795337   0       1
## 257  3.87 0.2451684 0.008650837 0.8797101   0       1
## 258  4.55 0.2472095 0.004391583 0.8800000   0       1
## 259  4.31 0.2576632 0.006130545 0.8811370   0       1
## 260  4.50 0.2693294 0.004213998 0.8813131   0       1
## 261  6.02 0.2876137 0.006822953 0.8813230   0       1
## 262  4.64 0.2638419 0.006409115 0.8824257   0       1
## 263  4.13 0.2500923 0.005721669 0.8827493   0       1
## 264  3.98 0.2445169 0.004712706 0.8828571   0       1
## 265  4.91 0.2487553 0.004794394 0.8829152   0       1
## 266  4.77 0.2614355 0.007051166 0.8831943   0       1
## 267  3.64 0.2455654 0.007760532 0.8839695   0       1
## 268  4.42 0.2592858 0.006818590 0.8842653   0       1
## 269  4.48 0.2687233 0.004992867 0.8847631   0       1
## 270  3.77 0.2438078 0.005545287 0.8853695   0       1
## 271  3.42 0.2351865 0.004023409 0.8854489   0       1
## 272  3.69 0.2416452 0.009731913 0.8854626   0       1
## 273  3.86 0.2449018 0.005612722 0.8857143   0       1
## 274  4.07 0.2547425 0.004878049 0.8861454   0       1
## 275  4.45 0.2699432 0.008985879 0.8863937   0       1
## 276  4.94 0.2672827 0.006753154 0.8866442   0       1
## 277  4.01 0.2441002 0.003602954 0.8867133   0       1
## 278  3.10 0.2313012 0.003275705 0.8871528   0       1
## 279  3.61 0.2437683 0.006598240 0.8871716   0       1
## 280  3.98 0.2495933 0.004879812 0.8873039   0       1
## 281  4.34 0.2595066 0.004451864 0.8883117   0       1
## 282  4.28 0.2514599 0.006204380 0.8887381   0       1
## 283  4.09 0.2426242 0.006597031 0.8891929   0       1
## 284  4.68 0.2599465 0.005352364 0.8892989   0       1
## 285  3.95 0.2465505 0.005446623 0.8903134   0       1
## 286  4.34 0.2632143 0.006428571 0.8914729   0       1
## 287  4.99 0.2708148 0.002852558 0.8922559   0       1
## 288  4.15 0.2569904 0.004966887 0.8924731   0       1
## 289  4.94 0.2668793 0.005493532 0.8930180   0       1
## 290  4.18 0.2565312 0.004898403 0.8930900   0       1
## 291  4.92 0.2769313 0.006555634 0.8931034   0       1
## 292  5.06 0.2636494 0.005028736 0.8936416   0       1
## 293  4.16 0.2461538 0.006672845 0.8948069   0       1
## 294  3.58 0.2459257 0.003845450 0.8953662   0       1
## 295  3.99 0.2490083 0.005228994 0.8953975   0       1
## 296  4.63 0.2455510 0.004493978 0.8958838   0       1
## 297  5.02 0.2701455 0.007099752 0.8966318   0       1
## 298  4.43 0.2649744 0.005661066 0.8968153   0       1
## 299  4.76 0.2657330 0.005672753 0.8973105   0       1
## 300  4.21 0.2535894 0.004666188 0.8984576   0       1
## 301  4.55 0.2641949 0.005910801 0.8988764   0       1
## 302  5.13 0.2673143 0.005396654 0.8998849   0       1
## 303  4.07 0.2493131 0.003297307 0.9004208   0       1
## 304  3.79 0.2420132 0.004818847 0.9010189   0       1
## 305  4.63 0.2581061 0.004421518 0.9010856   0       1
## 306  4.48 0.2637502 0.007442367 0.9015345   0       1
## 307  5.54 0.2745204 0.006096468 0.9024919   0       1
## 308  4.05 0.2477974 0.006791483 0.9032258   0       1
## 309  4.11 0.2660142 0.004270463 0.9034853   0       1
## 310  4.01 0.2597357 0.008810573 0.9035326   0       1
## 311  4.16 0.2487374 0.003968254 0.9035813   0       1
## 312  3.65 0.2545126 0.007761733 0.9036697   0       1
## 313  3.46 0.2440387 0.008196721 0.9037855   0       1
## 314  4.02 0.2599304 0.007321984 0.9041298   0       1
## 315  3.77 0.2370930 0.005756431 0.9049708   0       1
## 316  4.44 0.2630716 0.007105119 0.9051383   0       1
## 317  3.91 0.2539596 0.006371746 0.9052925   0       1
## 318  4.34 0.2623278 0.005076142 0.9053708   0       1
## 319  4.09 0.2504967 0.004153874 0.9057105   0       1
## 320  4.76 0.2676081 0.004236540 0.9060976   0       1
## 321  4.30 0.2647268 0.006564940 0.9061662   0       1
## 322  4.38 0.2528990 0.003865268 0.9072848   0       1
## 323  3.99 0.2614687 0.004533092 0.9073306   0       1
## 324  4.25 0.2706324 0.009646302 0.9074316   0       1
## 325  4.48 0.2614745 0.008312252 0.9074550   0       1
## 326  3.80 0.2384615 0.005128205 0.9085631   0       1
## 327  4.01 0.2550815 0.006958433 0.9087193   0       1
## 328  4.77 0.2630916 0.005038690 0.9087591   0       1
## 329  4.47 0.2521320 0.004078606 0.9090909   0       1
## 330  2.92 0.2343000 0.003456885 0.9094412   0       1
## 331  3.72 0.2463504 0.006934307 0.9101796   0       1
## 332  4.00 0.2604467 0.006304035 0.9102750   0       1
## 333  4.49 0.2600449 0.007138487 0.9102750   0       1
## 334  4.70 0.2603498 0.002855103 0.9106700   0       1
## 335  4.71 0.2639757 0.006251116 0.9109756   0       1
## 336  4.16 0.2606721 0.005086285 0.9110807   0       1
## 337  4.09 0.2384229 0.007067138 0.9110807   0       1
## 338  4.56 0.2687804 0.004863988 0.9112500   0       1
## 339  4.11 0.2622774 0.005756431 0.9131016   0       1
## 340  3.47 0.2488220 0.005980428 0.9131122   0       1
## 341  3.58 0.2494922 0.008125577 0.9132420   0       1
## 342  4.64 0.2453071 0.004009477 0.9132653   0       1
## 343  3.26 0.2247322 0.006037001 0.9134948   0       1
## 344  4.12 0.2651391 0.007092199 0.9135802   0       1
## 345  4.29 0.2604414 0.007112894 0.9139785   0       1
## 346  3.88 0.2543548 0.005865624 0.9143258   0       1
## 347  4.68 0.2773309 0.007312614 0.9144981   0       1
## 348  3.81 0.2564988 0.007634975 0.9145427   0       1
## 349  3.73 0.2467391 0.005072464 0.9146165   0       1
## 350  4.10 0.2688035 0.008377345 0.9147497   0       1
## 351  4.27 0.2533821 0.003839122 0.9160207   0       1
## 352  4.36 0.2607351 0.005822416 0.9161206   0       1
## 353  4.09 0.2489908 0.003853211 0.9167842   0       1
## 354  4.01 0.2469568 0.007930653 0.9169014   0       1
## 355  4.05 0.2640998 0.008676790 0.9171271   0       1
## 356  4.59 0.2615720 0.005264113 0.9171895   0       1
## 357  3.66 0.2348315 0.003183521 0.9173047   0       1
## 358  3.42 0.2466912 0.008823529 0.9177419   0       1
## 359  4.22 0.2599192 0.004959589 0.9177898   0       1
## 360  4.94 0.2668093 0.004102015 0.9179367   0       1
## 361  4.64 0.2510933 0.005466472 0.9181141   0       1
## 362  4.51 0.2715102 0.008032845 0.9186785   0       1
## 363  3.91 0.2607290 0.005207398 0.9187675   0       1
## 364  5.71 0.2754617 0.004045734 0.9194215   0       1
## 365  4.51 0.2640928 0.005075222 0.9197452   0       1
## 366  5.07 0.2730204 0.005374418 0.9198646   0       1
## 367  3.75 0.2572197 0.005381166 0.9198856   0       1
## 368  4.57 0.2536375 0.004850009 0.9202059   0       1
## 369  4.42 0.2530099 0.008086253 0.9207161   0       1
## 370  3.97 0.2609506 0.008387698 0.9207493   0       1
## 371  3.93 0.2469478 0.006289308 0.9213162   0       1
## 372  5.15 0.2610961 0.002695418 0.9223847   0       1
## 373  4.13 0.2466896 0.006892799 0.9225543   0       1
## 374  4.52 0.2541367 0.006654676 0.9226994   0       1
## 375  4.84 0.2763721 0.007305773 0.9229829   0       1
## 376  3.65 0.2452865 0.004759290 0.9235474   0       1
## 377  4.23 0.2712573 0.006540698 0.9240000   0       1
## 378  5.24 0.2714414 0.006803939 0.9248120   0       1
## 379  3.86 0.2557078 0.006940639 0.9248227   0       1
## 380  3.87 0.2579830 0.003427747 0.9248555   0       1
## 381  3.86 0.2412606 0.005977178 0.9250000   0       1
## 382  3.93 0.2654611 0.008318264 0.9254144   0       1
## 383  4.45 0.2580995 0.004524887 0.9255051   0       1
## 384  3.78 0.2543573 0.009077705 0.9258721   0       1
## 385  4.32 0.2611967 0.005553565 0.9260700   0       1
## 386  4.23 0.2504992 0.004719550 0.9263158   0       1
## 387  4.10 0.2520901 0.002908033 0.9263456   0       1
## 388  4.31 0.2601064 0.003900709 0.9268293   0       1
## 389  4.48 0.2593000 0.005314275 0.9271357   0       1
## 390  3.27 0.2264828 0.005666793 0.9272097   0       1
## 391  4.83 0.2573770 0.002367942 0.9284065   0       1
## 392  3.95 0.2618531 0.008143322 0.9286733   0       1
## 393  4.44 0.2698696 0.006429720 0.9289474   0       1
## 394  5.35 0.2816501 0.009246088 0.9294245   0       1
## 395  4.29 0.2458151 0.005276565 0.9301713   0       1
## 396  4.49 0.2556746 0.006173960 0.9307590   0       1
## 397  4.18 0.2469791 0.004577078 0.9308437   0       1
## 398  4.03 0.2462017 0.008786381 0.9311334   0       1
## 399  4.64 0.2663725 0.005825243 0.9315726   0       1
## 400  4.30 0.2579652 0.002353367 0.9315789   0       1
## 401  3.84 0.2522835 0.007855316 0.9315866   0       1
## 402  3.93 0.2550091 0.006921676 0.9317212   0       1
## 403  5.37 0.2826466 0.006110335 0.9317477   0       1
## 404  4.58 0.2623974 0.005886550 0.9328358   0       1
## 405  4.14 0.2575100 0.006152733 0.9337838   0       1
## 406  4.04 0.2540345 0.009791478 0.9338959   0       1
## 407  4.34 0.2704830 0.005724508 0.9341935   0       1
## 408  4.21 0.2618182 0.006060606 0.9346505   0       1
## 409  4.51 0.2572983 0.002175884 0.9350811   0       1
## 410  4.74 0.2728101 0.005099253 0.9351741   0       1
## 411  4.09 0.2487346 0.003615329 0.9357045   0       1
## 412  4.01 0.2417905 0.004219409 0.9362319   0       1
## 413  4.01 0.2657031 0.006602427 0.9364005   0       1
## 414  3.64 0.2408377 0.009568514 0.9367647   0       1
## 415  4.18 0.2538813 0.009315068 0.9372497   0       1
## 416  3.66 0.2465038 0.003915719 0.9374046   0       1
## 417  3.60 0.2596084 0.010333575 0.9374069   0       1
## 418  3.76 0.2509596 0.002558947 0.9396171   0       1
## 419  4.64 0.2673356 0.008063071 0.9400749   0       1
## 420  3.52 0.2540270 0.005739678 0.9406393   0       1
## 421  3.72 0.2517673 0.004168932 0.9407407   0       1
## 422  4.16 0.2543700 0.005098325 0.9410151   0       1
## 423  3.69 0.2592254 0.004749726 0.9411765   0       1
## 424  4.25 0.2524833 0.004334477 0.9417989   0       1
## 425  4.13 0.2573610 0.007088332 0.9421488   1       0
## 426  3.92 0.2435593 0.007491321 0.9423631   0       1
## 427  4.57 0.2581845 0.005394345 0.9440994   0       1
## 428  4.63 0.2591374 0.009502924 0.9443038   0       1
## 429  3.45 0.2502250 0.004680468 0.9447853   0       1
## 430  5.48 0.2879664 0.004729375 0.9451613   0       1
## 431  3.75 0.2532290 0.007458614 0.9465541   0       1
## 432  4.20 0.2606738 0.004683841 0.9465753   0       1
## 433  3.42 0.2596695 0.007885843 0.9466667   0       1
## 434  4.58 0.2591534 0.006251116 0.9468480   0       1
## 435  4.03 0.2526259 0.004708439 0.9468665   0       1
## 436  3.90 0.2503666 0.006048387 0.9471429   0       1
## 437  5.14 0.2751453 0.003699137 0.9482379   0       1
## 438  4.19 0.2540628 0.006681112 0.9486111   0       1
## 439  3.94 0.2675273 0.004124081 0.9486824   0       1
## 440  4.49 0.2721496 0.008714597 0.9488140   0       1
## 441  4.55 0.2665103 0.006856730 0.9491094   0       1
## 442  5.24 0.2690237 0.003768844 0.9491713   0       1
## 443  3.87 0.2519351 0.005897530 0.9494799   0       1
## 444  3.45 0.2432729 0.006040637 0.9495192   0       1
## 445  4.43 0.2577561 0.007215007 0.9496124   0       1
## 446  3.71 0.2439472 0.005319149 0.9499264   0       1
## 447  3.39 0.2540250 0.007334526 0.9503205   0       1
## 448  4.13 0.2640971 0.008029979 0.9507324   0       1
## 449  4.38 0.2623457 0.005265069 0.9507576   0       1
## 450  3.76 0.2338915 0.004914452 0.9508449   0       1
## 451  3.84 0.2609692 0.008105727 0.9510086   0       1
## 452  4.16 0.2659711 0.009701629 0.9512535   0       1
## 453  3.56 0.2564056 0.009150805 0.9512938   0       1
## 454  3.71 0.2505122 0.004283852 0.9515670   0       1
## 455  4.14 0.2508929 0.008214286 0.9519890   0       1
## 456  3.90 0.2541757 0.005809731 0.9525223   0       1
## 457  4.00 0.2570755 0.005261248 0.9532967   0       1
## 458  3.75 0.2454066 0.004729853 0.9533133   0       1
## 459  4.33 0.2695636 0.004327443 0.9533679   0       1
## 460  3.92 0.2596276 0.008316760 0.9534556   0       1
## 461  3.80 0.2548411 0.003470954 0.9538905   0       1
## 462  4.28 0.2616754 0.009449391 0.9539474   0       1
## 463  3.63 0.2579700 0.008244778 0.9542587   0       1
## 464  3.77 0.2502777 0.004072566 0.9545455   0       1
## 465  4.60 0.2716049 0.007407407 0.9545455   0       1
## 466  3.84 0.2526892 0.005104831 0.9557522   0       1
## 467  5.15 0.2687789 0.004805981 0.9557823   0       1
## 468  5.20 0.2667633 0.005617978 0.9562780   0       1
## 469  3.79 0.2695495 0.011351351 0.9563380   0       1
## 470  5.20 0.2650089 0.003730018 0.9566667   0       1
## 471  4.11 0.2530909 0.006545455 0.9567643   0       1
## 472  3.78 0.2526183 0.006500542 0.9569733   0       1
## 473  3.53 0.2534060 0.005267938 0.9573820   0       1
## 474  3.23 0.2349284 0.008289375 0.9574106   0       1
## 475  4.22 0.2696689 0.008365967 0.9576380   0       1
## 476  3.56 0.2572707 0.008389262 0.9576471   0       1
## 477  4.90 0.2710512 0.007268215 0.9579930   0       1
## 478  4.44 0.2750353 0.007228491 0.9580052   0       1
## 479  4.36 0.2627041 0.004035957 0.9582773   0       1
## 480  4.04 0.2454713 0.004066543 0.9584527   0       1
## 481  4.38 0.2524237 0.004667864 0.9589041   0       1
## 482  4.93 0.2636290 0.003661513 0.9592641   0       1
## 483  4.83 0.2626134 0.006715064 0.9596577   0       1
## 484  4.66 0.2654183 0.004884999 0.9597222   0       1
## 485  3.38 0.2406730 0.004023409 0.9597990   0       1
## 486  3.57 0.2728247 0.008218689 0.9608128   0       1
## 487  3.91 0.2485814 0.004210141 0.9614815   0       1
## 488  4.31 0.2520699 0.004599816 0.9618421   0       1
## 489  4.41 0.2642896 0.006092098 0.9618806   0       1
## 490  4.08 0.2558604 0.006541886 0.9631579   0       1
## 491  4.01 0.2521194 0.005344637 0.9638728   0       1
## 492  5.05 0.2740661 0.007004310 0.9642445   0       1
## 493  3.91 0.2610120 0.009100837 0.9643367   0       1
## 494  3.60 0.2574476 0.004597278 0.9644970   0       1
## 495  3.57 0.2467724 0.004979712 0.9648855   0       1
## 496  4.74 0.2622980 0.004847397 0.9653938   0       1
## 497  3.86 0.2426090 0.006836659 0.9658754   0       1
## 498  5.00 0.2911932 0.006392045 0.9660819   0       1
## 499  3.17 0.2437488 0.004260048 0.9666667   0       1
## 500  4.75 0.2628263 0.006480648 0.9668246   0       1
## 501  3.64 0.2385787 0.005076142 0.9671151   0       1
## 502  4.19 0.2637500 0.006964286 0.9676550   0       1
## 503  4.46 0.2701642 0.005888651 0.9678663   0       1
## 504  3.98 0.2531668 0.007238509 0.9685363   0       1
## 505  3.94 0.2424298 0.004404478 0.9687034   0       1
## 506  4.63 0.2656362 0.006290439 0.9693757   0       1
## 507  4.04 0.2483601 0.003462099 0.9700855   0       1
## 508  3.56 0.2549306 0.005478451 0.9701258   0       1
## 509  3.58 0.2424187 0.004043374 0.9704510   0       1
## 510  3.42 0.2370316 0.007753369 0.9705401   0       1
## 511  3.72 0.2550832 0.007393715 0.9712230   0       1
## 512  3.83 0.2589139 0.004936917 0.9712644   0       1
## 513  4.52 0.2515505 0.004742795 0.9716749   0       1
## 514  4.58 0.2753159 0.005516996 0.9717097   0       1
## 515  3.82 0.2522928 0.008092070 0.9717682   0       1
## 516  4.20 0.2644509 0.008128613 0.9717742   0       1
## 517  3.59 0.2376274 0.003707136 0.9722222   0       1
## 518  4.04 0.2634915 0.006338283 0.9726225   0       1
## 519  4.13 0.2500926 0.007410152 0.9726776   1       0
## 520  3.90 0.2550688 0.007060101 0.9731638   0       1
## 521  4.11 0.2585117 0.004892966 0.9733542   0       1
## 522  3.91 0.2625504 0.008427996 0.9733894   0       1
## 523  3.86 0.2555814 0.003036256 0.9740791   0       1
## 524  5.30 0.2878548 0.008284858 0.9744444   0       1
## 525  4.08 0.2605225 0.007619739 0.9749652   0       1
## 526  4.23 0.2594187 0.008432006 0.9750983   0       1
## 527  3.88 0.2629028 0.005988024 0.9751131   0       1
## 528  4.63 0.2613490 0.006432641 0.9751861   1       0
## 529  4.10 0.2754897 0.006040637 0.9759358   0       1
## 530  3.81 0.2596971 0.005725896 0.9761194   0       1
## 531  4.32 0.2489656 0.006116208 0.9763469   0       1
## 532  4.73 0.2865939 0.003870514 0.9766298   0       1
## 533  3.79 0.2599641 0.004847397 0.9773371   0       1
## 534  4.28 0.2564103 0.002868926 0.9775726   0       1
## 535  3.41 0.2435384 0.005642519 0.9778130   0       1
## 536  3.84 0.2607328 0.004626203 0.9786629   0       1
## 537  3.95 0.2570025 0.005456530 0.9786629   0       1
## 538  3.56 0.2467368 0.004350979 0.9792000   0       1
## 539  3.94 0.2457080 0.003876685 0.9792593   0       1
## 540  4.57 0.2480591 0.003512015 0.9793814   0       1
## 541  4.16 0.2724464 0.005816067 0.9795082   0       1
## 542  3.64 0.2533188 0.005455537 0.9796215   0       1
## 543  4.93 0.2722746 0.009603415 0.9798578   0       1
## 544  3.56 0.2674157 0.008146067 0.9802469   1       0
## 545  3.76 0.2439693 0.008954678 0.9803030   0       1
## 546  4.68 0.2737569 0.004777915 0.9805589   0       1
## 547  4.17 0.2585551 0.007423502 0.9806630   0       1
## 548  4.48 0.2667848 0.006731621 0.9809645   0       1
## 549  3.92 0.2682663 0.009381202 0.9810496   1       0
## 550  3.89 0.2563683 0.004002911 0.9815078   0       1
## 551  3.70 0.2511377 0.003128555 0.9816934   0       1
## 552  4.37 0.2642675 0.007633588 0.9818653   0       1
## 553  4.80 0.2610643 0.006808816 0.9818841   0       1
## 554  3.74 0.2592253 0.008261428 0.9821162   0       1
## 555  3.69 0.2665701 0.008511409 0.9823009   0       1
## 556  4.21 0.2613982 0.003218309 0.9825737   0       1
## 557  3.77 0.2640102 0.005276565 0.9828080   0       1
## 558  3.61 0.2576315 0.006252299 0.9830247   0       1
## 559  3.37 0.2361473 0.008925251 0.9836334   0       1
## 560  4.05 0.2714908 0.005077983 0.9836957   0       1
## 561  3.57 0.2442678 0.006656805 0.9845201   0       1
## 562  4.19 0.2696991 0.004118911 0.9845938   0       1
## 563  3.92 0.2586328 0.005461593 0.9856115   0       1
## 564  3.70 0.2466009 0.005587633 0.9856528   0       1
## 565  3.20 0.2418884 0.004374772 0.9861592   0       1
## 566  3.81 0.2516944 0.004945961 0.9867257   0       1
## 567  4.44 0.2621907 0.005094614 0.9868421   0       1
## 568  4.38 0.2834196 0.006425129 0.9869622   0       1
## 569  4.58 0.2841108 0.007891968 0.9881094   0       1
## 570  3.28 0.2516791 0.007835821 0.9882943   0       1
## 571  3.82 0.2719765 0.007157277 0.9885387   0       1
## 572  4.41 0.2579763 0.006745670 0.9886507   1       0
## 573  4.14 0.2580999 0.003660992 0.9890710   0       1
## 574  3.67 0.2500915 0.005126327 0.9893455   0       1
## 575  3.99 0.2564242 0.006695621 0.9902235   0       1
## 576  4.15 0.2643970 0.006519377 0.9903448   0       1
## 577  4.28 0.2684527 0.006014215 0.9905787   0       1
## 578  4.82 0.2658000 0.008400000 0.9906292   0       1
## 579  3.56 0.2435156 0.004478447 0.9907121   0       1
## 580  4.33 0.2591441 0.007315289 0.9910486   0       1
## 581  3.68 0.2452072 0.008581340 0.9910847   0       1
## 582  4.56 0.2581646 0.005838351 0.9924051   0       1
## 583  4.87 0.2912991 0.005535375 0.9929988   0       1
## 584  4.01 0.2582306 0.004598124 0.9930168   0       1
## 585  4.26 0.2670921 0.007474932 0.9932157   0       1
## 586  4.31 0.2650709 0.005851064 0.9933599   0       1
## 587  4.26 0.2654616 0.006911217 0.9934726   0       1
## 588  5.42 0.2777379 0.006463196 0.9936441   0       1
## 589  4.83 0.2687311 0.002847482 0.9939394   0       1
## 590  4.13 0.2717172 0.004040404 0.9940476   0       1
## 591  4.93 0.2749196 0.007859950 0.9940898   0       1
## 592  5.00 0.2796731 0.006041222 0.9942463   0       1
## 593  4.07 0.2467532 0.008048290 0.9942857   0       1
## 594  5.17 0.2787001 0.007064641 0.9944383   0       1
## 595  4.23 0.2641374 0.006979241 0.9945726   0       1
## 596  4.39 0.2588064 0.005840482 0.9946950   0       1
## 597  4.71 0.2751595 0.007264352 0.9951632   0       1
## 598  3.55 0.2471910 0.009025603 0.9952830   0       1
## 599  3.45 0.2566340 0.010905125 0.9953560   0       1
## 600  4.06 0.2631394 0.008964669 0.9959839   0       1
## 601  4.85 0.2800395 0.007312253 0.9960422   0       1
## 602  4.60 0.2558477 0.002901179 0.9962217   0       1
## 603  5.60 0.2874776 0.006618962 0.9968880   0       1
## 604  3.85 0.2648328 0.005933118 0.9970370   0       1
## 605  3.83 0.2579531 0.004908199 0.9971056   0       1
## 606  3.93 0.2669461 0.008017493 0.9971791   0       1
## 607  4.32 0.2636232 0.005413208 0.9973118   0       1
## 608  4.29 0.2604693 0.006317690 0.9973154   0       1
## 609  4.26 0.2621777 0.003760745 0.9973719   0       1
## 610  3.96 0.2491773 0.005484461 0.9985653   0       1
## 611  3.88 0.2691892 0.007747748 0.9985856   0       1
## 612  4.18 0.2703039 0.005686867 0.9986468   0       1
## 613  4.27 0.2615637 0.005935325 1.0000000   0       1
## 614  3.62 0.2504071 0.006151619 1.0000000   0       1
## 615  3.68 0.2572920 0.006301764 1.0000000   0       1
## 616  3.47 0.2629342 0.007021434 1.0000000   0       1
## 617  4.81 0.2757223 0.005109232 1.0011669   0       1
## 618  4.66 0.2704063 0.009708738 1.0012315   0       1
## 619  4.36 0.2509492 0.003435184 1.0012970   0       1
## 620  3.48 0.2514461 0.007230658 1.0015873   0       1
## 621  3.20 0.2562106 0.009640341 1.0017182   0       1
## 622  4.97 0.2815378 0.008610332 1.0025543   0       1
## 623  4.30 0.2576743 0.006500542 1.0026810   0       1
## 624  4.28 0.2762946 0.006271278 1.0027100   0       1
## 625  4.08 0.2536612 0.006689568 1.0027894   0       1
## 626  3.74 0.2521422 0.005834093 1.0031153   0       1
## 627  3.48 0.2468077 0.005472455 1.0031746   0       1
## 628  2.84 0.2439817 0.005922812 1.0037383   0       1
## 629  4.55 0.2677526 0.005307467 1.0037736   0       1
## 630  4.43 0.2540462 0.005091835 1.0039113   0       1
## 631  4.19 0.2614379 0.007262164 1.0039894   0       1
## 632  4.20 0.2601019 0.004368402 1.0041040   0       1
## 633  3.69 0.2547557 0.009884372 1.0046296   0       1
## 634  4.95 0.2759417 0.004975124 1.0046784   0       1
## 635  4.49 0.2717584 0.004795737 1.0051086   0       1
## 636  4.02 0.2840070 0.004745167 1.0053191   0       1
## 637  3.83 0.2550505 0.005050505 1.0058824   0       1
## 638  3.64 0.2716619 0.006569602 1.0059791   0       1
## 639  4.50 0.2617426 0.006274650 1.0060533   0       1
## 640  3.70 0.2490393 0.007319305 1.0061350   0       1
## 641  3.69 0.2552102 0.005301645 1.0061633   0       1
## 642  4.33 0.2562786 0.006599450 1.0064683   0       1
## 643  4.51 0.2723022 0.006834532 1.0065274   0       1
## 644  3.92 0.2641476 0.007342407 1.0069252   0       1
## 645  3.51 0.2594052 0.007524185 1.0075529   0       1
## 646  5.00 0.2833565 0.005385685 1.0076004   0       1
## 647  3.60 0.2536028 0.005427662 1.0076104   0       1
## 648  4.45 0.2658523 0.004131489 1.0076239   0       1
## 649  4.29 0.2679469 0.004486719 1.0078329   0       1
## 650  4.96 0.2673198 0.005073478 1.0081585   0       1
## 651  3.35 0.2412264 0.005171777 1.0086059   0       1
## 652  3.92 0.2513651 0.006734620 1.0095109   0       1
## 653  4.04 0.2644716 0.008497079 1.0098177   0       1
## 654  3.90 0.2473022 0.002877698 1.0099291   0       1
## 655  4.30 0.2684742 0.005670743 1.0107672   0       1
## 656  3.58 0.2519786 0.008098656 1.0108527   0       1
## 657  3.43 0.2543876 0.007943839 1.0109204   0       1
## 658  4.20 0.2750267 0.006234414 1.0110041   0       1
## 659  3.60 0.2400898 0.007479432 1.0119760   0       1
## 660  4.08 0.2617980 0.006280280 1.0126761   0       1
## 661  3.75 0.2514461 0.006326826 1.0133929   0       1
## 662  3.31 0.2513330 0.006618864 1.0136752   0       1
## 663  4.70 0.2813808 0.003138075 1.0144578   0       1
## 664  3.61 0.2598093 0.004583792 1.0152905   0       1
## 665  3.73 0.2612340 0.007845934 1.0158730   0       1
## 666  3.71 0.2550175 0.006260357 1.0165165   0       1
## 667  5.25 0.2875245 0.007139033 1.0165198   0       1
## 668  3.55 0.2639667 0.005243175 1.0169753   0       1
## 669  3.49 0.2386696 0.003472222 1.0177994   0       1
## 670  4.47 0.2698580 0.005435736 1.0184275   0       1
## 671  4.28 0.2629348 0.006357055 1.0185923   0       1
## 672  5.29 0.2922671 0.010720562 1.0187638   0       1
## 673  4.25 0.2546878 0.004915347 1.0188172   0       1
## 674  4.09 0.2716628 0.006125023 1.0188172   0       1
## 675  3.28 0.2469251 0.006522549 1.0188679   0       1
## 676  3.50 0.2609162 0.005010737 1.0196375   0       1
## 677  3.98 0.2505084 0.008689222 1.0198300   0       1
## 678  3.69 0.2442326 0.006863680 1.0200308   0       1
## 679  4.77 0.2781955 0.004654493 1.0206061   0       1
## 680  4.16 0.2562366 0.003385602 1.0208333   0       1
## 681  3.91 0.2536127 0.006322254 1.0209205   0       1
## 682  3.71 0.2518328 0.004215543 1.0209895   0       1
## 683  4.32 0.2598439 0.005988024 1.0215463   0       1
## 684  3.66 0.2610176 0.007524185 1.0217077   0       1
## 685  4.07 0.2533821 0.006764168 1.0222222   0       1
## 686  4.08 0.2548451 0.004890418 1.0222531   0       1
## 687  3.97 0.2613616 0.006570542 1.0236769   0       1
## 688  4.92 0.2800425 0.002481390 1.0243619   0       1
## 689  3.56 0.2530942 0.004125561 1.0245023   0       1
## 690  4.24 0.2574850 0.005806569 1.0245566   1       0
## 691  4.54 0.2687432 0.004889533 1.0249344   1       0
## 692  3.38 0.2508300 0.005901881 1.0255591   0       1
## 693  4.10 0.2637623 0.005833030 1.0257143   0       1
## 694  3.52 0.2573991 0.009327354 1.0258359   0       1
## 695  3.50 0.2526723 0.003870254 1.0265340   1       0
## 696  4.01 0.2687161 0.004520796 1.0266854   0       1
## 697  4.05 0.2497721 0.003646308 1.0288600   0       1
## 698  4.51 0.2473409 0.004687218 1.0289673   0       1
## 699  3.92 0.2654243 0.007056269 1.0291667   0       1
## 700  3.83 0.2628415 0.004371585 1.0306834   0       1
## 701  3.58 0.2662679 0.006887801 1.0309598   0       1
## 702  3.28 0.2498672 0.006903877 1.0314010   0       1
## 703  3.13 0.2401675 0.009468318 1.0317460   0       1
## 704  3.81 0.2555638 0.005934718 1.0319767   0       1
## 705  3.30 0.2481357 0.007643550 1.0319865   0       1
## 706  4.00 0.2516544 0.006066176 1.0320700   0       1
## 707  4.26 0.2626967 0.002145923 1.0320856   0       1
## 708  4.41 0.2619734 0.006661865 1.0322165   0       1
## 709  3.47 0.2475038 0.006528418 1.0322581   0       1
## 710  4.25 0.2658498 0.006038004 1.0322997   0       1
## 711  3.16 0.2433126 0.004580432 1.0323741   0       1
## 712  4.13 0.2649607 0.007858150 1.0327869   0       1
## 713  3.46 0.2625251 0.007469484 1.0332278   0       1
## 714  3.12 0.2500929 0.005574136 1.0335008   0       1
## 715  2.95 0.2402562 0.003110704 1.0335821   0       1
## 716  3.39 0.2559513 0.006085556 1.0336000   0       1
## 717  3.26 0.2464724 0.004398021 1.0340136   1       0
## 718  3.86 0.2624113 0.007274050 1.0347323   0       1
## 719  4.47 0.2661517 0.002282303 1.0350195   0       1
## 720  3.32 0.2429907 0.005131024 1.0351171   0       1
## 721  4.32 0.2581799 0.005363848 1.0358056   0       1
## 722  3.80 0.2518052 0.005184225 1.0359820   0       1
## 723  3.42 0.2538879 0.005605787 1.0360065   0       1
## 724  4.15 0.2663693 0.005173952 1.0362416   0       1
## 725  3.85 0.2527813 0.007112894 1.0364656   0       1
## 726  4.22 0.2482257 0.003821656 1.0370879   0       1
## 727  3.65 0.2589547 0.005116959 1.0375375   0       1
## 728  4.15 0.2603238 0.005275605 1.0375522   0       1
## 729  4.11 0.2674986 0.007777175 1.0376851   0       1
## 730  4.11 0.2765353 0.007454739 1.0390625   0       1
## 731  3.71 0.2589863 0.005751258 1.0392749   0       1
## 732  3.93 0.2747860 0.008737518 1.0394366   0       1
## 733  3.36 0.2581937 0.008557902 1.0394737   0       1
## 734  4.36 0.2622029 0.002903284 1.0395778   0       1
## 735  3.81 0.2734236 0.007125045 1.0402299   0       1
## 736  4.05 0.2733970 0.006821282 1.0402542   1       0
## 737  4.73 0.2731357 0.001976640 1.0407643   0       1
## 738  4.91 0.2676184 0.003433321 1.0409146   0       1
## 739  3.66 0.2636584 0.006272764 1.0410509   1       0
## 740  3.56 0.2576540 0.003504242 1.0410742   0       1
## 741  4.24 0.2658273 0.005215827 1.0412234   0       1
## 742  3.80 0.2585419 0.005846885 1.0412371   0       1
## 743  3.91 0.2506431 0.005696435 1.0412518   0       1
## 744  3.79 0.2548234 0.005278486 1.0415430   0       1
## 745  3.97 0.2593612 0.003487518 1.0416069   0       1
## 746  4.45 0.2823280 0.013974881 1.0428922   0       1
## 747  3.51 0.2625789 0.005229937 1.0432692   0       1
## 748  4.27 0.2690233 0.006511628 1.0433604   0       1
## 749  3.62 0.2661204 0.009084432 1.0434783   0       1
## 750  4.52 0.2720327 0.004442075 1.0440806   0       1
## 751  4.77 0.2788717 0.003725386 1.0441718   0       1
## 752  3.38 0.2617090 0.007865570 1.0447020   0       1
## 753  3.40 0.2551298 0.005084438 1.0450161   0       1
## 754  3.67 0.2564810 0.005331862 1.0453125   0       1
## 755  3.54 0.2540687 0.010849910 1.0461538   0       1
## 756  4.04 0.2743805 0.005170262 1.0464481   0       1
## 757  3.84 0.2633752 0.008438061 1.0467153   0       1
## 758  3.59 0.2511038 0.004966887 1.0479233   0       1
## 759  3.58 0.2641240 0.007307075 1.0479876   0       1
## 760  4.50 0.2644716 0.006018764 1.0492424   0       1
## 761  3.31 0.2547237 0.005882353 1.0492611   0       1
## 762  4.71 0.2708970 0.003235664 1.0493381   0       1
## 763  4.35 0.2652434 0.007960919 1.0503979   0       1
## 764  3.70 0.2559653 0.004338395 1.0507726   0       1
## 765  4.20 0.2597865 0.002491103 1.0507757   0       1
## 766  3.34 0.2612891 0.003095412 1.0508197   0       1
## 767  5.15 0.2736861 0.005097557 1.0509413   0       1
## 768  3.12 0.2376549 0.005508558 1.0520446   0       1
## 769  3.84 0.2548305 0.005286183 1.0520833   0       1
## 770  3.54 0.2566225 0.005150846 1.0522876   0       1
## 771  3.82 0.2562194 0.003994916 1.0527066   1       0
## 772  3.79 0.2622359 0.009391367 1.0527066   0       1
## 773  3.72 0.2722988 0.005620015 1.0527108   1       0
## 774  3.48 0.2654804 0.008185053 1.0528771   0       1
## 775  3.72 0.2635025 0.005637389 1.0529501   0       1
## 776  4.01 0.2772679 0.005294741 1.0530612   0       1
## 777  3.57 0.2528569 0.006167241 1.0537125   0       1
## 778  4.13 0.2784993 0.008297258 1.0537931   0       1
## 779  4.00 0.2586052 0.004325104 1.0539359   0       1
## 780  3.73 0.2672532 0.006598845 1.0545455   0       1
## 781  4.60 0.2704639 0.004656994 1.0546584   0       1
## 782  3.72 0.2779909 0.010987095 1.0548341   0       1
## 783  4.31 0.2668845 0.004357298 1.0550336   0       1
## 784  4.62 0.2759111 0.008177778 1.0550796   0       1
## 785  3.85 0.2619652 0.008158086 1.0552239   0       1
## 786  3.69 0.2737711 0.009149623 1.0553977   0       1
## 787  4.69 0.2591049 0.003623845 1.0555556   0       1
## 788  4.03 0.2744818 0.005539671 1.0561010   0       1
## 789  4.41 0.2770999 0.007957560 1.0567282   1       0
## 790  3.47 0.2467001 0.008365867 1.0568182   0       1
## 791  3.05 0.2492260 0.004643963 1.0569260   0       1
## 792  3.53 0.2561111 0.008055556 1.0569307   0       1
## 793  3.85 0.2748663 0.009625668 1.0572246   0       1
## 794  4.45 0.2624932 0.003968970 1.0578187   0       1
## 795  4.22 0.2594925 0.007018175 1.0586630   0       1
## 796  3.82 0.2693133 0.007510730 1.0590778   0       1
## 797  3.56 0.2562408 0.005506608 1.0591054   0       1
## 798  4.38 0.2598625 0.004885993 1.0594595   0       1
## 799  3.48 0.2433068 0.002894356 1.0595041   0       1
## 800  3.78 0.2534529 0.008071749 1.0595930   0       1
## 801  3.83 0.2592861 0.004348614 1.0600293   1       0
## 802  4.18 0.2829320 0.007967422 1.0602094   0       1
## 803  4.21 0.2598182 0.004000000 1.0605227   1       0
## 804  3.08 0.2512112 0.008612955 1.0608108   0       1
## 805  4.29 0.2655914 0.003405018 1.0625000   0       1
## 806  3.72 0.2615188 0.004917137 1.0627737   0       1
## 807  4.23 0.2669627 0.005683837 1.0630872   0       1
## 808  3.88 0.2668415 0.006857604 1.0631068   0       1
## 809  3.24 0.2548868 0.005031933 1.0642202   0       1
## 810  3.80 0.2663685 0.007155635 1.0642336   0       1
## 811  4.33 0.2625412 0.006590992 1.0649867   0       1
## 812  4.73 0.2859971 0.003959683 1.0650307   1       0
## 813  4.60 0.2776109 0.004064322 1.0650510   0       1
## 814  4.60 0.2669246 0.007209425 1.0652709   0       1
## 815  3.25 0.2498584 0.007554297 1.0653710   0       1
## 816  3.25 0.2588536 0.006389193 1.0663350   0       1
## 817  3.77 0.2704000 0.007822222 1.0664740   0       1
## 818  3.93 0.2592457 0.006041743 1.0668605   0       1
## 819  3.34 0.2517470 0.004229496 1.0670017   0       1
## 820  4.50 0.2744294 0.007132668 1.0679739   1       0
## 821  4.09 0.2675055 0.003829322 1.0681818   0       1
## 822  4.18 0.2660371 0.007127584 1.0681818   0       1
## 823  3.71 0.2499540 0.005153690 1.0683891   0       1
## 824  3.46 0.2591447 0.006369427 1.0685358   0       1
## 825  4.17 0.2697277 0.002618715 1.0687332   0       1
## 826  2.99 0.2492240 0.005294869 1.0690909   0       1
## 827  3.82 0.2608221 0.006547836 1.0695652   0       1
## 828  4.76 0.2773578 0.004499640 1.0700246   1       0
## 829  3.39 0.2482569 0.008073394 1.0702875   0       1
## 830  4.38 0.2630053 0.003987674 1.0706806   0       1
## 831  3.81 0.2695857 0.010132079 1.0712209   0       1
## 832  4.26 0.2752899 0.004817128 1.0720000   0       1
## 833  4.53 0.2680843 0.005270811 1.0726817   0       1
## 834  4.37 0.2843102 0.004824875 1.0729443   0       1
## 835  3.55 0.2566903 0.006553796 1.0733138   0       1
## 836  4.01 0.2765542 0.004618117 1.0737589   1       0
## 837  3.41 0.2571217 0.009988901 1.0737834   0       1
## 838  3.10 0.2496324 0.006250000 1.0738916   1       0
## 839  4.14 0.2625678 0.009764919 1.0739191   0       1
## 840  4.14 0.2714311 0.009445732 1.0739247   0       1
## 841  3.91 0.2567459 0.005343308 1.0740741   1       0
## 842  4.65 0.2750890 0.006761566 1.0740741   0       1
## 843  4.19 0.2812278 0.004613201 1.0744828   0       1
## 844  3.86 0.2784969 0.004674577 1.0744986   0       1
## 845  3.79 0.2594348 0.006563355 1.0746951   0       1
## 846  3.98 0.2668121 0.005634315 1.0750708   1       0
## 847  3.49 0.2516364 0.008909091 1.0751174   1       0
## 848  3.55 0.2568724 0.007828145 1.0752864   0       1
## 849  3.35 0.2458533 0.004607446 1.0755034   0       1
## 850  4.45 0.2667021 0.004075846 1.0755442   0       1
## 851  3.87 0.2627331 0.006814921 1.0765832   1       0
## 852  3.27 0.2561879 0.004878049 1.0767974   1       0
## 853  3.31 0.2626720 0.005430847 1.0770465   0       1
## 854  4.00 0.2590318 0.004335260 1.0772532   0       1
## 855  3.61 0.2578675 0.008145131 1.0774092   0       1
## 856  3.86 0.2734306 0.011957095 1.0775862   0       1
## 857  3.49 0.2505967 0.007343492 1.0776398   0       1
## 858  4.00 0.2618572 0.005815010 1.0776978   0       1
## 859  3.45 0.2600469 0.006127230 1.0782030   0       1
## 860  3.48 0.2520766 0.005958830 1.0782209   0       1
## 861  4.33 0.2741792 0.006388642 1.0784314   0       1
## 862  4.12 0.2719455 0.006449301 1.0786517   1       0
## 863  4.35 0.2792520 0.004274265 1.0788436   0       1
## 864  3.81 0.2753927 0.004450262 1.0790021   0       1
## 865  4.02 0.2768683 0.010142349 1.0790960   0       1
## 866  5.26 0.2939929 0.009363958 1.0791527   0       1
## 867  5.00 0.2886107 0.005641749 1.0792193   1       0
## 868  4.11 0.2714542 0.004129264 1.0799458   0       1
## 869  3.68 0.2642572 0.005266981 1.0802469   1       0
## 870  3.47 0.2593931 0.005419075 1.0808241   0       1
## 871  3.10 0.2614086 0.012037370 1.0814941   1       0
## 872  3.48 0.2589026 0.007630814 1.0820189   1       0
## 873  4.69 0.2791026 0.005829359 1.0823245   0       1
## 874  3.50 0.2547529 0.007604563 1.0830619   0       1
## 875  4.53 0.2657168 0.006055209 1.0831234   0       1
## 876  3.75 0.2678963 0.007121987 1.0835821   1       0
## 877  3.74 0.2565908 0.004153124 1.0835913   0       1
## 878  3.46 0.2597070 0.008058608 1.0837438   0       1
## 879  3.11 0.2550117 0.005959906 1.0846561   0       1
## 880  3.52 0.2627472 0.009617129 1.0848485   0       1
## 881  4.04 0.2657559 0.008014572 1.0852601   0       1
## 882  4.28 0.2713568 0.003589375 1.0852820   0       1
## 883  2.96 0.2365170 0.006275740 1.0856031   1       0
## 884  3.65 0.2594624 0.007313951 1.0864198   0       1
## 885  4.73 0.2739100 0.007208158 1.0864799   1       0
## 886  3.55 0.2608219 0.007488584 1.0866142   0       1
## 887  3.63 0.2542712 0.006906579 1.0868217   0       1
## 888  3.85 0.2525745 0.006323397 1.0885341   0       1
## 889  3.87 0.2619693 0.005058717 1.0886427   0       1
## 890  4.56 0.2759248 0.006872852 1.0886628   0       1
## 891  4.03 0.2729230 0.005741970 1.0893617   0       1
## 892  4.04 0.2711324 0.004961900 1.0898551   1       0
## 893  4.03 0.2656945 0.004268184 1.0909091   1       0
## 894  4.45 0.2724359 0.005341880 1.0911425   1       0
## 895  3.71 0.2493766 0.004987531 1.0914454   0       1
## 896  4.23 0.2757640 0.010305615 1.0918775   0       1
## 897  3.83 0.2661871 0.007378712 1.0922619   0       1
## 898  4.64 0.2783929 0.006607143 1.0925700   1       0
## 899  4.61 0.2803606 0.003535443 1.0931990   0       1
## 900  3.64 0.2713801 0.006070345 1.0934150   0       1
## 901  4.08 0.2847753 0.010303784 1.0934449   0       1
## 902  4.16 0.2633977 0.003645643 1.0934959   0       1
## 903  3.47 0.2606083 0.010562739 1.0939908   0       1
## 904  4.03 0.2509431 0.003412969 1.0945559   0       1
## 905  3.77 0.2586207 0.004355717 1.0945736   0       1
## 906  4.10 0.2568874 0.005108557 1.0946502   0       1
## 907  4.25 0.2783597 0.009231315 1.0946822   0       1
## 908  3.36 0.2530391 0.006732747 1.0949367   0       1
## 909  3.82 0.2702462 0.011951481 1.0950292   0       1
## 910  4.12 0.2770113 0.010392403 1.0950413   0       1
## 911  3.53 0.2584249 0.004029304 1.0950872   1       0
## 912  3.86 0.2725981 0.007103534 1.0953058   0       1
## 913  3.95 0.2621289 0.005068791 1.0959097   0       1
## 914  3.34 0.2491707 0.008293402 1.0960630   1       0
## 915  3.32 0.2468412 0.006592199 1.0962733   1       0
## 916  4.25 0.2731941 0.009383853 1.0964333   0       1
## 917  4.02 0.2742052 0.003973988 1.0972818   0       1
## 918  3.99 0.2682310 0.004512635 1.0975610   1       0
## 919  3.70 0.2700018 0.012417953 1.0978916   1       0
## 920  3.26 0.2453518 0.006379876 1.0987868   0       1
## 921  4.06 0.2651855 0.006987995 1.0992908   0       1
## 922  4.20 0.2676231 0.003896564 1.0994624   0       1
## 923  3.50 0.2572418 0.008198215 1.0996785   1       0
## 924  4.31 0.2619392 0.005581972 1.1000000   0       1
## 925  3.32 0.2640591 0.012076424 1.1000000   0       1
## 926  5.07 0.2925146 0.005131835 1.1001164   1       0
## 927  4.60 0.2785614 0.005641749 1.1005025   0       1
## 928  3.74 0.2608300 0.004550419 1.1010720   0       1
## 929  3.74 0.2734996 0.003976862 1.1011561   0       1
## 930  4.37 0.2713459 0.005246020 1.1014304   1       0
## 931  2.94 0.2534321 0.007947977 1.1018519   0       1
## 932  3.45 0.2494948 0.007716333 1.1031746   0       1
## 933  3.48 0.2574565 0.011161940 1.1033386   0       1
## 934  4.08 0.2694192 0.006123163 1.1036835   0       1
## 935  3.61 0.2737549 0.010067114 1.1037594   0       1
## 936  4.29 0.2704652 0.005950234 1.1038961   0       1
## 937  3.80 0.2503228 0.006825309 1.1042296   1       0
## 938  4.11 0.2745649 0.004746001 1.1050477   0       1
## 939  3.83 0.2592388 0.005148005 1.1061286   0       1
## 940  4.21 0.2738957 0.007603186 1.1062069   0       1
## 941  4.17 0.2633497 0.003462730 1.1062937   0       1
## 942  3.68 0.2689816 0.010255488 1.1063174   1       0
## 943  4.02 0.2553461 0.005255527 1.1065089   0       1
## 944  4.65 0.2880242 0.004975124 1.1067344   1       0
## 945  4.04 0.2768290 0.006111810 1.1068917   1       0
## 946  3.37 0.2499542 0.007885568 1.1070234   0       1
## 947  3.87 0.2731839 0.009327354 1.1070423   0       1
## 948  3.62 0.2511054 0.002394989 1.1070866   0       1
## 949  3.45 0.2567420 0.004737609 1.1071429   0       1
## 950  2.78 0.2559772 0.007400380 1.1081594   0       1
## 951  4.06 0.2752600 0.004316264 1.1083086   0       1
## 952  4.03 0.2606363 0.005919349 1.1084165   0       1
## 953  3.73 0.2578430 0.004455170 1.1086310   1       0
## 954  3.69 0.2635938 0.005221462 1.1088647   0       1
## 955  3.78 0.2617620 0.004904632 1.1092814   0       1
## 956  5.21 0.2866972 0.003352152 1.1094972   0       1
## 957  3.72 0.2670261 0.007008086 1.1100478   0       1
## 958  4.69 0.2683192 0.003618600 1.1101485   1       0
## 959  4.79 0.2803705 0.003740648 1.1104442   1       0
## 960  4.06 0.2823427 0.005419580 1.1111111   0       1
## 961  3.24 0.2514567 0.007465404 1.1111111   0       1
## 962  4.21 0.2695778 0.006315410 1.1115702   0       1
## 963  4.28 0.2671506 0.007078040 1.1118509   0       1
## 964  3.61 0.2767508 0.009242801 1.1119632   0       1
## 965  4.06 0.2625608 0.002341077 1.1124829   1       0
## 966  4.41 0.2804770 0.004805125 1.1125320   0       1
## 967  4.01 0.2616384 0.005413208 1.1125731   1       0
## 968  3.70 0.2508909 0.005523877 1.1126126   0       1
## 969  3.63 0.2646310 0.012722646 1.1127098   0       1
## 970  4.16 0.2786740 0.008686403 1.1127517   0       1
## 971  4.57 0.2869550 0.008685079 1.1129235   0       1
## 972  3.79 0.2617090 0.006971755 1.1130690   0       1
## 973  3.77 0.2580216 0.007214733 1.1132075   0       1
## 974  3.43 0.2444364 0.003075810 1.1141925   1       0
## 975  3.76 0.2581290 0.005812897 1.1141975   0       1
## 976  3.25 0.2635221 0.003095975 1.1151203   1       0
## 977  3.25 0.2521453 0.006755523 1.1153184   0       1
## 978  3.78 0.2755697 0.005652712 1.1163476   0       1
## 979  4.36 0.2816102 0.010640184 1.1172680   0       1
## 980  4.07 0.2659650 0.006778452 1.1174825   0       1
## 981  4.02 0.2625181 0.005442671 1.1176471   0       1
## 982  4.03 0.2803468 0.007406069 1.1177285   0       1
## 983  3.02 0.2505482 0.006213450 1.1197053   0       1
## 984  3.44 0.2499536 0.004828227 1.1210084   1       0
## 985  3.26 0.2420533 0.005115090 1.1210692   0       1
## 986  3.25 0.2625718 0.004310345 1.1218274   0       1
## 987  3.80 0.2683102 0.008457801 1.1220588   0       1
## 988  3.98 0.2753304 0.008076358 1.1227080   1       0
## 989  3.58 0.2596309 0.007167174 1.1228879   1       0
## 990  4.52 0.2759602 0.002844950 1.1229404   1       0
## 991  3.36 0.2674875 0.009992862 1.1238095   0       1
## 992  4.50 0.2756205 0.004003203 1.1242938   1       0
## 993  3.30 0.2458925 0.007797271 1.1243655   1       0
## 994  4.22 0.2608373 0.009262690 1.1244755   0       1
## 995  4.23 0.2841196 0.004141160 1.1244870   1       0
## 996  3.37 0.2637947 0.009532539 1.1247947   1       0
## 997  3.47 0.2716459 0.007468880 1.1252955   0       1
## 998  3.82 0.2545288 0.004208600 1.1253776   0       1
## 999  3.99 0.2659708 0.004858737 1.1266376   0       1
## 1000 3.43 0.2608380 0.008162525 1.1268058   1       0
## 1001 4.34 0.2816216 0.007747748 1.1276042   1       0
## 1002 4.02 0.2668101 0.003586157 1.1276297   1       0
## 1003 3.26 0.2635590 0.005671748 1.1290850   0       1
## 1004 3.33 0.2534085 0.005271769 1.1294498   0       1
## 1005 3.93 0.2736842 0.005626134 1.1294643   0       1
## 1006 3.23 0.2659786 0.006880319 1.1294719   0       1
## 1007 3.73 0.2685202 0.007533632 1.1294852   1       0
## 1008 3.63 0.2610611 0.005691206 1.1300940   1       0
## 1009 4.74 0.2806612 0.003732670 1.1304348   0       1
## 1010 3.73 0.2650752 0.004244032 1.1304985   1       0
## 1011 4.53 0.2810064 0.005846917 1.1309824   0       1
## 1012 3.31 0.2616520 0.006838222 1.1314935   0       1
## 1013 3.36 0.2442178 0.005463486 1.1332237   0       1
## 1014 4.39 0.2799680 0.006203722 1.1332378   1       0
## 1015 3.59 0.2604583 0.004365224 1.1337481   1       0
## 1016 4.05 0.2714520 0.004736746 1.1344538   1       0
## 1017 4.32 0.2795642 0.006325778 1.1345646   0       1
## 1018 4.04 0.2612304 0.004871008 1.1348637   0       1
## 1019 3.58 0.2638306 0.005660033 1.1350932   1       0
## 1020 3.30 0.2474844 0.007070982 1.1364764   1       0
## 1021 3.49 0.2670205 0.006771596 1.1365079   0       1
## 1022 3.34 0.2514150 0.005477451 1.1377551   1       0
## 1023 3.51 0.2563164 0.005858660 1.1379310   0       1
## 1024 3.69 0.2703971 0.008122744 1.1380323   1       0
## 1025 4.28 0.2824670 0.006519824 1.1389646   1       0
## 1026 3.43 0.2696889 0.009600000 1.1392801   1       0
## 1027 3.66 0.2627630 0.005306496 1.1406250   1       0
## 1028 3.65 0.2670053 0.005441683 1.1406491   0       1
## 1029 3.93 0.2706439 0.003819571 1.1408046   1       0
## 1030 4.14 0.2643037 0.007376754 1.1409029   1       0
## 1031 3.98 0.2648560 0.006744440 1.1409496   1       0
## 1032 4.21 0.2789030 0.007528231 1.1415094   1       0
## 1033 3.99 0.2729875 0.005724508 1.1428571   0       1
## 1034 3.49 0.2735882 0.008067345 1.1430746   1       0
## 1035 2.94 0.2463527 0.006278855 1.1431227   0       1
## 1036 3.30 0.2488839 0.004464286 1.1433390   0       1
## 1037 4.19 0.2736318 0.004619758 1.1434371   0       1
## 1038 3.91 0.2633671 0.007225434 1.1436950   1       0
## 1039 3.70 0.2766034 0.004836976 1.1439510   0       1
## 1040 3.17 0.2436697 0.005871560 1.1444043   0       1
## 1041 3.39 0.2462275 0.004416636 1.1445783   0       1
## 1042 3.93 0.2761313 0.004436557 1.1455604   1       0
## 1043 3.57 0.2634049 0.003610760 1.1466459   0       1
## 1044 3.26 0.2464039 0.006164767 1.1471572   0       1
## 1045 3.62 0.2623098 0.004118174 1.1477987   0       1
## 1046 3.29 0.2461286 0.003825833 1.1478992   0       1
## 1047 3.57 0.2627210 0.003247925 1.1482059   0       1
## 1048 3.61 0.2622536 0.004159884 1.1488372   1       0
## 1049 3.84 0.2552917 0.006626173 1.1491228   1       0
## 1050 4.52 0.2769476 0.004981072 1.1492109   0       1
## 1051 3.15 0.2551002 0.005881272 1.1493849   1       0
## 1052 3.08 0.2539769 0.009399855 1.1497326   0       1
## 1053 3.63 0.2683983 0.005952381 1.1503876   0       1
## 1054 4.05 0.2666423 0.006583760 1.1504298   0       1
## 1055 4.38 0.2703541 0.004563709 1.1504540   1       0
## 1056 3.58 0.2435614 0.006806475 1.1514658   0       1
## 1057 3.91 0.2634545 0.008909091 1.1515152   1       0
## 1058 4.05 0.2683797 0.004817987 1.1519886   1       0
## 1059 4.01 0.2715540 0.005377308 1.1532847   1       0
## 1060 3.82 0.2604223 0.006677495 1.1535022   1       0
## 1061 2.96 0.2478365 0.004603204 1.1544118   1       0
## 1062 4.16 0.2579778 0.006274650 1.1565585   1       0
## 1063 3.28 0.2564472 0.006672678 1.1574394   0       1
## 1064 3.75 0.2602538 0.008644473 1.1580547   1       0
## 1065 3.29 0.2640813 0.007202216 1.1581633   0       1
## 1066 4.45 0.2853077 0.005869797 1.1603154   1       0
## 1067 3.84 0.2646269 0.008946144 1.1603631   0       1
## 1068 3.39 0.2653394 0.007239819 1.1608040   1       0
## 1069 3.48 0.2379229 0.005789759 1.1612378   1       0
## 1070 4.66 0.2844616 0.005612066 1.1614518   1       0
## 1071 3.40 0.2703156 0.004188774 1.1619537   0       1
## 1072 3.80 0.2736052 0.009477825 1.1620029   0       1
## 1073 4.49 0.2694197 0.004729853 1.1628205   0       1
## 1074 3.49 0.2578812 0.005498534 1.1630435   1       0
## 1075 3.40 0.2654676 0.006834532 1.1636953   1       0
## 1076 3.95 0.2731681 0.007363506 1.1639344   1       0
## 1077 4.84 0.2884040 0.005278902 1.1642157   0       1
## 1078 4.00 0.2780068 0.007528231 1.1643454   0       1
## 1079 4.07 0.2749147 0.005566529 1.1645207   0       1
## 1080 3.35 0.2549303 0.009770219 1.1647059   1       0
## 1081 4.58 0.2699640 0.004136691 1.1648216   1       0
## 1082 3.30 0.2495886 0.009508137 1.1650000   1       0
## 1083 3.67 0.2721126 0.008223684 1.1656250   0       1
## 1084 4.66 0.2860432 0.005844846 1.1656734   1       0
## 1085 3.83 0.2857893 0.010325516 1.1657061   1       0
## 1086 3.31 0.2418673 0.005329903 1.1663894   0       1
## 1087 3.42 0.2471889 0.005529954 1.1666667   0       1
## 1088 3.21 0.2689531 0.010288809 1.1669394   1       0
## 1089 3.56 0.2701678 0.007038441 1.1674277   0       1
## 1090 2.99 0.2415699 0.007554819 1.1682070   1       0
## 1091 3.70 0.2651833 0.008024804 1.1686747   1       0
## 1092 3.21 0.2596342 0.004794885 1.1694631   0       1
## 1093 3.68 0.2599857 0.005706134 1.1704036   1       0
## 1094 3.49 0.2640378 0.005815010 1.1704918   0       1
## 1095 3.44 0.2683595 0.010778224 1.1719243   1       0
## 1096 3.95 0.2870403 0.006069261 1.1727672   1       0
## 1097 3.69 0.2623067 0.003554292 1.1732523   0       1
## 1098 4.90 0.2891374 0.005679801 1.1732558   1       0
## 1099 3.56 0.2582830 0.004576240 1.1732909   0       1
## 1100 3.05 0.2485089 0.006687150 1.1740675   0       1
## 1101 3.88 0.2554003 0.006534761 1.1750000   1       0
## 1102 3.06 0.2525888 0.005362426 1.1769912   0       1
## 1103 3.67 0.2593602 0.007270084 1.1772727   1       0
## 1104 2.96 0.2606325 0.005089059 1.1778929   1       0
## 1105 3.34 0.2531646 0.007233273 1.1783961   0       1
## 1106 3.84 0.2676308 0.006070345 1.1790831   0       1
## 1107 3.63 0.2531876 0.005464481 1.1795276   1       0
## 1108 3.57 0.2496794 0.004762777 1.1796117   0       1
## 1109 3.97 0.2829576 0.007033363 1.1799710   1       0
## 1110 3.71 0.2714337 0.006581288 1.1805556   0       1
## 1111 4.03 0.2829541 0.006139401 1.1828411   0       1
## 1112 3.73 0.2639296 0.005315249 1.1833856   1       0
## 1113 3.68 0.2702703 0.005334282 1.1835148   1       0
## 1114 3.40 0.2589087 0.007795100 1.1841680   1       0
## 1115 4.35 0.2697095 0.004510193 1.1847826   0       1
## 1116 3.38 0.2491362 0.003637025 1.1850594   0       1
## 1117 3.95 0.2735268 0.008970976 1.1851351   1       0
## 1118 3.81 0.2679460 0.005152807 1.1864662   0       1
## 1119 3.76 0.2593335 0.008013112 1.1870398   0       1
## 1120 4.20 0.2802102 0.006129597 1.1872456   0       1
## 1121 4.48 0.2889582 0.006933611 1.1878863   0       1
## 1122 3.43 0.2562545 0.002859185 1.1888702   1       0
## 1123 3.86 0.2625828 0.005373455 1.1890694   0       1
## 1124 3.71 0.2750752 0.006368300 1.1891496   0       1
## 1125 3.93 0.2695413 0.005871560 1.1900585   0       1
## 1126 3.69 0.2802376 0.007559395 1.1901840   1       0
## 1127 2.66 0.2567019 0.009477390 1.1903323   1       0
## 1128 3.91 0.2682665 0.003939828 1.1923642   1       0
## 1129 3.54 0.2696829 0.004987531 1.1929012   1       0
## 1130 3.40 0.2539510 0.006176203 1.1932367   1       0
## 1131 3.63 0.2543205 0.004366018 1.1944012   1       0
## 1132 3.44 0.2496884 0.005608641 1.1944444   1       0
## 1133 3.00 0.2628480 0.005174875 1.1946903   0       1
## 1134 3.36 0.2571064 0.005466472 1.1955403   1       0
## 1135 3.14 0.2538321 0.008759124 1.1985940   1       0
## 1136 4.03 0.2704467 0.007138487 1.1990369   1       0
## 1137 4.27 0.2787150 0.002512563 1.1997187   0       1
## 1138 4.19 0.2799500 0.006248884 1.2016461   0       1
## 1139 3.74 0.2698384 0.006642729 1.2020958   1       0
## 1140 3.84 0.2646633 0.002353367 1.2035928   1       0
## 1141 3.41 0.2722134 0.009185656 1.2052877   1       0
## 1142 3.67 0.2599892 0.006092098 1.2062500   1       0
## 1143 4.11 0.2814882 0.010163339 1.2064607   0       1
## 1144 3.14 0.2644099 0.007685270 1.2065404   0       1
## 1145 2.53 0.2293158 0.005767701 1.2069767   0       1
## 1146 3.19 0.2395332 0.005557614 1.2079723   0       1
## 1147 3.22 0.2565275 0.007623404 1.2081129   0       1
## 1148 3.33 0.2577093 0.005873715 1.2081911   1       0
## 1149 3.87 0.2670241 0.006255585 1.2082718   1       0
## 1150 3.54 0.2672275 0.008233399 1.2115677   0       1
## 1151 4.41 0.2845514 0.003716156 1.2125163   1       0
## 1152 2.92 0.2484871 0.005684944 1.2134831   0       1
## 1153 3.70 0.2679382 0.004722979 1.2145062   1       0
## 1154 3.82 0.2744035 0.004699928 1.2149254   1       0
## 1155 3.92 0.2670661 0.007443718 1.2151335   1       0
## 1156 4.26 0.2833922 0.003710247 1.2151394   1       0
## 1157 3.44 0.2629840 0.009175996 1.2151899   1       0
## 1158 3.99 0.2719065 0.006549832 1.2165963   0       1
## 1159 4.01 0.2796640 0.005897069 1.2175793   0       1
## 1160 3.84 0.2743708 0.007089685 1.2177778   0       1
## 1161 4.02 0.2717295 0.005664719 1.2190476   1       0
## 1162 3.84 0.2667274 0.004193254 1.2192593   1       0
## 1163 3.68 0.2608852 0.005037783 1.2232416   1       0
## 1164 3.11 0.2568030 0.006307443 1.2235915   0       1
## 1165 4.02 0.2708296 0.002497770 1.2248603   1       0
## 1166 3.43 0.2683503 0.006359012 1.2251656   0       1
## 1167 3.05 0.2517292 0.004550419 1.2251773   1       0
## 1168 3.63 0.2690227 0.004868374 1.2254601   1       0
## 1169 4.10 0.2835979 0.005467372 1.2256757   1       0
## 1170 3.58 0.2590234 0.005484784 1.2269841   1       0
## 1171 3.56 0.2541206 0.004528165 1.2272000   1       0
## 1172 2.90 0.2519127 0.006234061 1.2274052   1       0
## 1173 3.23 0.2622652 0.007477658 1.2297521   1       0
## 1174 3.41 0.2596206 0.006323397 1.2297940   0       1
## 1175 3.61 0.2713496 0.005348547 1.2305296   0       1
## 1176 3.33 0.2614426 0.004452360 1.2306397   1       0
## 1177 3.67 0.2623997 0.007658643 1.2307692   1       0
## 1178 4.13 0.2816092 0.006465517 1.2311902   1       0
## 1179 2.94 0.2527352 0.007111597 1.2323810   1       0
## 1180 3.29 0.2598511 0.005084438 1.2325203   1       0
## 1181 3.17 0.2524662 0.006028498 1.2332731   0       1
## 1182 4.18 0.2819930 0.004370629 1.2356771   0       1
## 1183 3.78 0.2469204 0.006802721 1.2357473   1       0
## 1184 3.03 0.2531577 0.004871887 1.2360360   1       0
## 1185 3.09 0.2610783 0.004615953 1.2361111   1       0
## 1186 3.65 0.2763728 0.006864162 1.2369231   0       1
## 1187 4.35 0.2930822 0.004048583 1.2379714   1       0
## 1188 4.21 0.2781301 0.007972459 1.2382865   1       0
## 1189 3.58 0.2672429 0.006122816 1.2386707   1       0
## 1190 4.06 0.2667505 0.005748159 1.2395382   0       1
## 1191 3.98 0.2789116 0.007151579 1.2426778   1       0
## 1192 4.49 0.2896624 0.005597341 1.2458172   0       1
## 1193 3.76 0.2713541 0.005934184 1.2480377   0       1
## 1194 2.95 0.2466704 0.006940536 1.2504537   1       0
## 1195 3.27 0.2541090 0.006094183 1.2508251   1       0
## 1196 3.13 0.2598726 0.004549591 1.2530973   1       0
## 1197 3.18 0.2488513 0.003859585 1.2554745   1       0
## 1198 3.64 0.2726947 0.005192480 1.2578125   0       1
## 1199 3.01 0.2630716 0.008562580 1.2601770   1       0
## 1200 3.52 0.2769038 0.013764748 1.2626728   1       0
## 1201 3.18 0.2667025 0.006806377 1.2628866   1       0
## 1202 3.79 0.2825588 0.005654709 1.2629247   1       0
## 1203 3.01 0.2623300 0.005331911 1.2640449   1       0
## 1204 3.42 0.2561773 0.003815407 1.2642741   0       1
## 1205 3.71 0.2791201 0.010097367 1.2679641   1       0
## 1206 3.12 0.2639323 0.004965974 1.2687609   1       0
## 1207 3.21 0.2512879 0.008395344 1.2692998   1       0
## 1208 3.19 0.2693069 0.006480648 1.2695652   1       0
## 1209 3.49 0.2697725 0.004694836 1.2697161   1       0
## 1210 3.61 0.2760482 0.005938456 1.2704403   0       1
## 1211 3.63 0.2659364 0.004130005 1.2708018   1       0
## 1212 3.54 0.2665090 0.006367109 1.2711039   0       1
## 1213 3.69 0.2671308 0.005679736 1.2712121   0       1
## 1214 3.87 0.2777380 0.003758053 1.2742176   1       0
## 1215 3.22 0.2441839 0.005951307 1.2744755   0       1
## 1216 3.61 0.2833479 0.004010462 1.2756410   1       0
## 1217 3.61 0.2811775 0.008385370 1.2764977   1       0
## 1218 3.41 0.2702074 0.006974249 1.2765957   1       0
## 1219 3.24 0.2677470 0.005636780 1.2783172   1       0
## 1220 3.18 0.2538193 0.004049328 1.2859649   1       0
## 1221 3.87 0.2749509 0.002142475 1.2869440   1       0
## 1222 3.49 0.2709072 0.008150248 1.2892691   1       0
## 1223 3.74 0.2686779 0.006743567 1.2900763   0       1
## 1224 3.44 0.2631203 0.003927169 1.2903226   1       0
## 1225 3.31 0.2690632 0.009622367 1.2908163   1       0
## 1226 3.75 0.2765957 0.005851064 1.2917293   0       1
## 1227 3.84 0.2865149 0.004028021 1.2950581   0       1
## 1228 3.75 0.2779478 0.004320432 1.2974203   1       0
## 1229 3.79 0.2771191 0.005131835 1.3003049   1       0
## 1230 3.26 0.2608453 0.004468442 1.3006873   1       0
## 1231 3.10 0.2644961 0.010792025 1.3059441   1       0
## 1232 3.42 0.2688501 0.003599064 1.3137584   1       0
## 1233 3.31 0.2740571 0.010750793 1.3155259   1       0
## 1234 3.73 0.2631388 0.005980428 1.3196347   1       0
## 1235 3.87 0.2792986 0.006262301 1.3196347   1       0
## 1236 3.22 0.2655216 0.005009841 1.3213058   1       0
## 1237 2.91 0.2564719 0.004437870 1.3214286   1       0
## 1238 3.69 0.2823177 0.005635787 1.3214286   0       1
## 1239 2.58 0.2400000 0.005576923 1.3216630   1       0
## 1240 3.80 0.2686513 0.003085860 1.3298507   1       0
## 1241 3.77 0.2767762 0.008130081 1.3431953   1       0
## 1242 3.07 0.2662091 0.008668954 1.3440285   1       0
## 1243 3.02 0.2525542 0.006811256 1.3478261   1       0
## 1244 2.81 0.2741348 0.008561020 1.3496094   1       0
## 1245 3.51 0.2804279 0.011048755 1.3538705   1       0
## 1246 3.11 0.2630443 0.005577546 1.3546713   1       0
## 1247 3.18 0.2695369 0.006693198 1.3614458   1       0
## 1248 3.59 0.2635923 0.003947605 1.3705426   0       1
## 1249 3.14 0.2618314 0.005258386 1.3720930   1       0
## 1250 3.15 0.2568079 0.004508566 1.3797909   1       0
## 1251 3.08 0.2722504 0.008140376 1.3824057   1       0
## 1252 3.83 0.2905728 0.004574383 1.3838550   1       0
## 1253 2.99 0.2606072 0.004714313 1.4000000   1       0
## 1254 3.50 0.2797376 0.004963659 1.4096774   1       0
## 1255 3.25 0.2715171 0.004741065 1.4216867   1       0
## 1256 2.97 0.2719093 0.006118409 1.4224599   1       0
## 1257 3.37 0.2714567 0.006629636 1.4334471   1       0
## 1258 3.82 0.2879674 0.005493532 1.4710366   1       0
## 1259 3.54 0.2882042 0.006690141 1.4784689   1       0
## 1260 2.83 0.2654947 0.005255527 1.5067698   1       0
# plot network
plot(nnet1, rep="best")

## Genrate  Confusion Matrix

predict <- predict(nnet1, data.frame(team9016$ERA, team9016$AVG, team9016$X3R, team9016$RR))
predicted.class <- apply(predict,1,which.max)-1

nn_cm <-confusionMatrix(factor(ifelse(predicted.class == "1", "N", "Y")),
                factor(team9016$DivWin))

model.accuracytest['nn',] <- c('nn', nn_cm$overall[1])
model.accuracytest
##     modelx     accuracy_test
## lda    lda 0.840604026845638
## qda    qda 0.843959731543624
## nn      nn 0.878571428571429
#nnet to predict possitilities winning the district
#neural network analysis

team9016 <- subset(Teams, yearID > 1900 & yearID < 2016)
team9016 <- team9016[ ,c("yearID", "DivWin", "ERA", "AVG", "X3R","RR")]
colnames(team9016) <- c("yearID", "DivWin", "ERA", "AVG", "X3R", "RR")
team9016 <- na.omit(team9016)

team9016$win = team9016$DivWin == "Y"
team9016$not_win = team9016$DivWin == "N"
nn <- neuralnet(win+not_win~ERA+AVG+X3R+RR, team9016, hidden = 3, stepmax = 1e6)
nn$result.matrix #reached.threshold = 0.0099 < 1%
##                                   [,1]
## error                    109.312834059
## reached.threshold          0.009857029
## steps                 119388.000000000
## Intercept.to.1layhid1     -0.465588894
## ERA.to.1layhid1            0.027441466
## AVG.to.1layhid1            1.082698084
## X3R.to.1layhid1           -5.707849300
## RR.to.1layhid1             0.043457867
## Intercept.to.1layhid2     27.005749908
## ERA.to.1layhid2           -0.450770144
## AVG.to.1layhid2           -8.989919483
## X3R.to.1layhid2           20.770145082
## RR.to.1layhid2           -20.294154140
## Intercept.to.1layhid3      1.448725959
## ERA.to.1layhid3           -0.065518901
## AVG.to.1layhid3           -2.534213166
## X3R.to.1layhid3           12.765358515
## RR.to.1layhid3            -0.141072327
## Intercept.to.win           0.264180247
## 1layhid1.to.win           -0.181771410
## 1layhid2.to.win           -0.852156639
## 1layhid3.to.win            1.081004804
## Intercept.to.not_win      -0.749610384
## 1layhid1.to.not_win        2.141422558
## 1layhid2.to.not_win        0.848774930
## 1layhid3.to.not_win       -0.200769916
plot(nn)

K-means clustering

Doubles & triples and ERA have been crucial indicator to identify baseball teams. Therefore using predictors, such as X2B(Doubles), X3B(Triples), ERA, RA(Opponents runs scored) to cluster teams. So, classify 2015 baseball teams to their ranks

#2015

team_c <- subset(Teams, yearID == 2015, select = c(teamID, X2B, X3B, ERA, RA))
rownames(team_c) <- team_c[,1]
team_c[,1] <- NULL
team_st <- scale(team_c)
fviz_nbclust(team_st, kmeans, method = "gap_stat") #3 is otimal the number of clustering

residual <- kmeans(team_st, 3, nstart = 25)
residual
## K-means clustering with 3 clusters of sizes 10, 9, 11
## 
## Cluster means:
##          X2B        X3B        ERA         RA
## 1 -1.0794637 -0.6697055  0.2349028  0.2339084
## 2  0.3217514  0.9599113  0.9403452  0.9659892
## 3  0.7180795 -0.1765587 -0.9829213 -1.0029988
## 
## Clustering vector:
## ARI ATL BAL BOS CHA CHN CIN CLE COL DET HOU KCA LAA LAN MIA MIL MIN NYA 
##   2   1   1   2   1   3   1   3   2   2   3   3   1   3   1   2   2   1 
## NYN OAK PHI PIT SDN SEA SFN SLN TBA TEX TOR WAS 
##   3   2   2   3   1   1   3   3   3   2   3   1 
## 
## Within cluster sum of squares by cluster:
## [1] 13.69561 13.50195 18.27653
##  (between_SS / total_SS =  60.8 %)
## 
## Available components:
## 
## [1] "cluster"      "centers"      "totss"        "withinss"    
## [5] "tot.withinss" "betweenss"    "size"         "iter"        
## [9] "ifault"
fviz_cluster(residual, data = team_st)

#To identify factors which are divides each clusters

pca.res <- PCA(team_st, graph = FALSE)
fviz_contrib(pca.res, choice = "var", axes =1, top = 4)

fviz_contrib(pca.res, choice = "var", axes = 2, top = 4)

#2016
team_c <- subset(Teams, yearID == 2016, select = c(teamID, X2B, X3B, ERA, RA))
rownames(team_c) <- team_c[,1]
team_c[,1] <- NULL
team_st <- scale(team_c)
fviz_nbclust(team_st, kmeans, method = "gap_stat") #3 is otimal the number of clustering

residual <- kmeans(team_st, 3, nstart = 25)
residual
## K-means clustering with 3 clusters of sizes 14, 4, 12
## 
## Cluster means:
##          X2B       X3B        ERA          RA
## 1 -0.6552391 -0.528096  0.1314615  0.09702076
## 2  0.7037165  1.271022  1.7830839  1.87048464
## 3  0.5298735  0.192438 -0.7477331 -0.73668576
## 
## Clustering vector:
## ARI ATL BAL BOS CHA CHN CIN CLE COL DET HOU KCA LAA LAN MIA MIL MIN NYA 
##   2   1   1   3   3   3   2   3   2   1   3   1   1   3   3   1   2   1 
## NYN OAK PHI PIT SDN SEA SFN SLN TBA TEX TOR WAS 
##   1   1   1   1   1   1   3   3   3   1   3   3 
## 
## Within cluster sum of squares by cluster:
## [1] 21.608719  5.023502 26.888348
##  (between_SS / total_SS =  53.9 %)
## 
## Available components:
## 
## [1] "cluster"      "centers"      "totss"        "withinss"    
## [5] "tot.withinss" "betweenss"    "size"         "iter"        
## [9] "ifault"
fviz_cluster(residual, data = team_st)

#To identify factors which are divides each clusters

pca.res <- PCA(team_st, graph = FALSE)
fviz_contrib(pca.res, choice = "var", axes =1, top = 4)

fviz_contrib(pca.res, choice = "var", axes = 2, top = 4)

Corresponding the result of dim1, there is a high relationship between ERA and RA. Therefore, it is appropriate to combine two predictors together and called as “Prevention lost scores”. According to Dim2 results, extra-base hit such as doubles and triples are important.

All champions of each division except TEX are located in dim1 cluster. Therefore, I can conclude that ERA is important to win the division. But, there is other factors to win for TEX.

Special regression Model

#The relationship between team performace and the salary gap between players
#Considering palyers salary whose AB is more than 400 after 2010 season

a2010 <- subset(Salaries, yearID > 2010)
b2010 <- subset(Batting, yearID > 2010)

c2010 <- merge(a2010, b2010, by = "playerID")
c2010$teamyear <- paste(c2010$teamID.x, c2010$yearID.x, sep="")
c2010 <- subset(c2010, AB > 400)

func_2 <- function(c2010){return(data.frame(sd = sd(c2010$salary)))}
team_sd <- ddply(c2010, .(teamyear), func_2)
d2010 <- subset(Teams, yearID > 2010)
d2010$WP <- d2010$W / d2010$G
d2010$teamyear <- paste(d2010$teamID, d2010$yearID, sep = "")
e2010 <- merge(team_sd,d2010,by="teamyear")

data <- e2010[, c("sd", "WP")]
fit <- lm(WP~sd, data)
plot(e2010$sd, e2010$WP)
lines(smooth.spline(e2010$sd, e2010$WP))

p <- plot_ly(data, x = ~sd, color = I("black")) %>%
  add_markers(y = ~WP, text = rownames(data), showlegend = FALSE) %>%
  add_lines(y = ~fitted(loess(WP ~ sd)),
            line = list(color = '#07A4B5'),
            name = "Loess Smoother", showlegend = TRUE) %>%
  layout(xaxis = list(title = '
Difference in annual salary (By team)'),
         yaxis = list(title = 'Winning Rate'),
         legend = list(x = 0.80, y = 0.90))

p

It seema that there is no linear relationship between Team Salary Difference and winning rate. To identifity this, we need to run the simple linear regression

Linear Regression:

summary(lm(WP~sd+ L+R+AB+H+X2B+X3B+HR+BB+SO+SB+CS+HBP+SF+RA+ER+ERA+CG+SHO+SV+IPouts+HA+HRA+BBA+SOA+E+DP+FP+attendance + BPF + PPF + AVG + X3R + RR, data = e2010))  
## 
## Call:
## lm(formula = WP ~ sd + L + R + AB + H + X2B + X3B + HR + BB + 
##     SO + SB + CS + HBP + SF + RA + ER + ERA + CG + SHO + SV + 
##     IPouts + HA + HRA + BBA + SOA + E + DP + FP + attendance + 
##     BPF + PPF + AVG + X3R + RR, data = e2010)
## 
## Residuals:
##        Min         1Q     Median         3Q        Max 
## -0.0045717 -0.0001647  0.0000484  0.0003121  0.0028533 
## 
## Coefficients:
##                      Estimate        Std. Error  t value
## (Intercept)  0.96541278275747  0.22527096008639    4.286
## sd          -0.00000000003006  0.00000000004403   -0.683
## L           -0.00616626443303  0.00002520170691 -244.676
## R           -0.00000137739565  0.00000851540979   -0.162
## AB          -0.00001218106930  0.00001885427444   -0.646
## H            0.00006106677964  0.00007137592878    0.856
## X2B          0.00000388820132  0.00000437755313    0.888
## X3B         -0.00054761931019  0.00061062605716   -0.897
## HR           0.00000905314150  0.00000517115769    1.751
## BB          -0.00000344686424  0.00000195958608   -1.759
## SO          -0.00000155032404  0.00000091472680   -1.695
## SB           0.00000281012594  0.00000320079160    0.878
## CS           0.00000254101988  0.00001091638896    0.233
## HBP         -0.00000869516631  0.00000595101254   -1.461
## SF           0.00001227890814  0.00001151496824    1.066
## RA          -0.00000310434119  0.00001165606474   -0.266
## ER          -0.00009447255860  0.00009732145535   -0.971
## ERA          0.01579445872813  0.01551357800313    1.018
## CG          -0.00002803921143  0.00003176529281   -0.883
## SHO          0.00002803010086  0.00002342803039    1.196
## SV          -0.00002041475269  0.00001595901219   -1.279
## IPouts       0.00001665060620  0.00001520969628    1.095
## HA           0.00000364113375  0.00000246066008    1.480
## HRA         -0.00000448639499  0.00000532697872   -0.842
## BBA         -0.00000013916855  0.00000221267147   -0.063
## SOA         -0.00000005802253  0.00000098716081   -0.059
## E            0.00000736563227  0.00003339644819    0.221
## DP          -0.00000200823966  0.00000550027231   -0.365
## FP           0.03511934571986  0.20417386907566    0.172
## attendance   0.00000000025946  0.00000000015261    1.700
## BPF          0.00009813715585  0.00008038386135    1.221
## PPF         -0.00011104132654  0.00008175864099   -1.358
## AVG         -0.37426871638885  0.39466017341642   -0.948
## X3R          2.99247423567295  3.37744628244438    0.886
## RR           0.00197502032125  0.00520199952316    0.380
##                         Pr(>|t|)    
## (Intercept)            0.0000346 ***
## sd                        0.4959    
## L           < 0.0000000000000002 ***
## R                         0.8717    
## AB                        0.5193    
## H                         0.3938    
## X2B                       0.3760    
## X3B                       0.3714    
## HR                        0.0823 .  
## BB                        0.0809 .  
## SO                        0.0924 .  
## SB                        0.3815    
## CS                        0.8163    
## HBP                       0.1463    
## SF                        0.2882    
## RA                        0.7904    
## ER                        0.3334    
## ERA                       0.3105    
## CG                        0.3790    
## SHO                       0.2336    
## SV                        0.2030    
## IPouts                    0.2756    
## HA                        0.1413    
## HRA                       0.4012    
## BBA                       0.9499    
## SOA                       0.9532    
## E                         0.8258    
## DP                        0.7156    
## FP                        0.8637    
## attendance                0.0914 .  
## BPF                       0.2243    
## PPF                       0.1767    
## AVG                       0.3447    
## X3R                       0.3772    
## RR                        0.7048    
## ---
## Signif. codes:  0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
## 
## Residual standard error: 0.00085 on 134 degrees of freedom
## Multiple R-squared:  0.9999, Adjusted R-squared:  0.9998 
## F-statistic: 3.124e+04 on 34 and 134 DF,  p-value: < 0.00000000000000022
## My prediction is sd(The salary difference in the team) is important.
#But according to the ligression result it is not important

To analyze the variables which affect WP(winning percentage), create data partition and set up the crossvalidation

Linear Regression using CARET and CrossValidation:

set.seed(123)
e2010_2 <- subset(e2010, select = -c(teamyear, lgID, teamID, franchID, divID, DivWin,WCWin, LgWin, WSWin,name, park, teamIDBR, teamIDlahman45,teamIDretro, W, G, L ))
rows <- sample(nrow(e2010_2))
e2010_2 <- e2010_2[rows, ]

split <- round(nrow(e2010_2 * 0.7))
train.df <- e2010_2[1:split,]
test.df <- e2010_2[(split+1): nrow(e2010_2),]

tr <- trainControl(method = "repeatedcv", number = 10, repeats = 5, verboseIter = TRUE)

lm <- train(WP~., train.df, method = "lm", trControl = tr)
## + Fold01.Rep1: intercept=TRUE 
## - Fold01.Rep1: intercept=TRUE 
## + Fold02.Rep1: intercept=TRUE 
## - Fold02.Rep1: intercept=TRUE 
## + Fold03.Rep1: intercept=TRUE 
## - Fold03.Rep1: intercept=TRUE 
## + Fold04.Rep1: intercept=TRUE 
## - Fold04.Rep1: intercept=TRUE 
## + Fold05.Rep1: intercept=TRUE 
## - Fold05.Rep1: intercept=TRUE 
## + Fold06.Rep1: intercept=TRUE 
## - Fold06.Rep1: intercept=TRUE 
## + Fold07.Rep1: intercept=TRUE 
## - Fold07.Rep1: intercept=TRUE 
## + Fold08.Rep1: intercept=TRUE 
## - Fold08.Rep1: intercept=TRUE 
## + Fold09.Rep1: intercept=TRUE 
## - Fold09.Rep1: intercept=TRUE 
## + Fold10.Rep1: intercept=TRUE 
## - Fold10.Rep1: intercept=TRUE 
## + Fold01.Rep2: intercept=TRUE 
## - Fold01.Rep2: intercept=TRUE 
## + Fold02.Rep2: intercept=TRUE 
## - Fold02.Rep2: intercept=TRUE 
## + Fold03.Rep2: intercept=TRUE 
## - Fold03.Rep2: intercept=TRUE 
## + Fold04.Rep2: intercept=TRUE 
## - Fold04.Rep2: intercept=TRUE 
## + Fold05.Rep2: intercept=TRUE 
## - Fold05.Rep2: intercept=TRUE 
## + Fold06.Rep2: intercept=TRUE 
## - Fold06.Rep2: intercept=TRUE 
## + Fold07.Rep2: intercept=TRUE 
## - Fold07.Rep2: intercept=TRUE 
## + Fold08.Rep2: intercept=TRUE 
## - Fold08.Rep2: intercept=TRUE 
## + Fold09.Rep2: intercept=TRUE 
## - Fold09.Rep2: intercept=TRUE 
## + Fold10.Rep2: intercept=TRUE 
## - Fold10.Rep2: intercept=TRUE 
## + Fold01.Rep3: intercept=TRUE 
## - Fold01.Rep3: intercept=TRUE 
## + Fold02.Rep3: intercept=TRUE 
## - Fold02.Rep3: intercept=TRUE 
## + Fold03.Rep3: intercept=TRUE 
## - Fold03.Rep3: intercept=TRUE 
## + Fold04.Rep3: intercept=TRUE 
## - Fold04.Rep3: intercept=TRUE 
## + Fold05.Rep3: intercept=TRUE 
## - Fold05.Rep3: intercept=TRUE 
## + Fold06.Rep3: intercept=TRUE 
## - Fold06.Rep3: intercept=TRUE 
## + Fold07.Rep3: intercept=TRUE 
## - Fold07.Rep3: intercept=TRUE 
## + Fold08.Rep3: intercept=TRUE 
## - Fold08.Rep3: intercept=TRUE 
## + Fold09.Rep3: intercept=TRUE 
## - Fold09.Rep3: intercept=TRUE 
## + Fold10.Rep3: intercept=TRUE 
## - Fold10.Rep3: intercept=TRUE 
## + Fold01.Rep4: intercept=TRUE 
## - Fold01.Rep4: intercept=TRUE 
## + Fold02.Rep4: intercept=TRUE 
## - Fold02.Rep4: intercept=TRUE 
## + Fold03.Rep4: intercept=TRUE 
## - Fold03.Rep4: intercept=TRUE 
## + Fold04.Rep4: intercept=TRUE 
## - Fold04.Rep4: intercept=TRUE 
## + Fold05.Rep4: intercept=TRUE 
## - Fold05.Rep4: intercept=TRUE 
## + Fold06.Rep4: intercept=TRUE 
## - Fold06.Rep4: intercept=TRUE 
## + Fold07.Rep4: intercept=TRUE 
## - Fold07.Rep4: intercept=TRUE 
## + Fold08.Rep4: intercept=TRUE 
## - Fold08.Rep4: intercept=TRUE 
## + Fold09.Rep4: intercept=TRUE 
## - Fold09.Rep4: intercept=TRUE 
## + Fold10.Rep4: intercept=TRUE 
## - Fold10.Rep4: intercept=TRUE 
## + Fold01.Rep5: intercept=TRUE 
## - Fold01.Rep5: intercept=TRUE 
## + Fold02.Rep5: intercept=TRUE 
## - Fold02.Rep5: intercept=TRUE 
## + Fold03.Rep5: intercept=TRUE 
## - Fold03.Rep5: intercept=TRUE 
## + Fold04.Rep5: intercept=TRUE 
## - Fold04.Rep5: intercept=TRUE 
## + Fold05.Rep5: intercept=TRUE 
## - Fold05.Rep5: intercept=TRUE 
## + Fold06.Rep5: intercept=TRUE 
## - Fold06.Rep5: intercept=TRUE 
## + Fold07.Rep5: intercept=TRUE 
## - Fold07.Rep5: intercept=TRUE 
## + Fold08.Rep5: intercept=TRUE 
## - Fold08.Rep5: intercept=TRUE 
## + Fold09.Rep5: intercept=TRUE 
## - Fold09.Rep5: intercept=TRUE 
## + Fold10.Rep5: intercept=TRUE 
## - Fold10.Rep5: intercept=TRUE 
## Aggregating results
## Fitting final model on full training set
summary(lm)
## 
## Call:
## lm(formula = .outcome ~ ., data = dat)
## 
## Residuals:
##       Min        1Q    Median        3Q       Max 
## -0.038357 -0.009563 -0.000398  0.008586  0.038819 
## 
## Coefficients:
##                       Estimate         Std. Error t value        Pr(>|t|)
## (Intercept)  -9.47042735904912   4.84572693121188  -1.954         0.05277
## sd            0.00000000005621   0.00000000081991   0.069         0.94544
## yearID        0.00028400390519   0.00118226122648   0.240         0.81053
## Rank         -0.01312829929306   0.00189575396429  -6.925 0.0000000001723
## Ghome        -0.00150013429181   0.00256000642823  -0.586         0.55888
## R             0.00038030260949   0.00015333250935   2.480         0.01439
## AB            0.00019767336216   0.00034476136123   0.573         0.56737
## H            -0.00108877680930   0.00130298649096  -0.836         0.40489
## X2B           0.00003671378206   0.00007974107823   0.460         0.64598
## X3B          -0.01990750859273   0.01153239793668  -1.726         0.08665
## HR            0.00010412932186   0.00009475241378   1.099         0.27378
## BB           -0.00008708986677   0.00003513267509  -2.479         0.01444
## SO            0.00000264689322   0.00001774301506   0.149         0.88164
## SB            0.00007465712543   0.00005919362020   1.261         0.20945
## CS           -0.00052324559716   0.00019462923886  -2.688         0.00810
## HBP           0.00007957743159   0.00010942588002   0.727         0.46837
## SF           -0.00003693079250   0.00021009446627  -0.176         0.86073
## RA           -0.00000983207855   0.00021522553312  -0.046         0.96363
## ER           -0.00286913810597   0.00177232559301  -1.619         0.10787
## ERA           0.44694330769975   0.28265506292730   1.581         0.11622
## CG            0.00151741897830   0.00058825714281   2.580         0.01099
## SHO           0.00120695840388   0.00042211149227   2.859         0.00494
## SV            0.00176149380747   0.00025044509018   7.033 0.0000000000981
## IPouts        0.00071647590966   0.00027215348659   2.633         0.00948
## HA           -0.00005423060590   0.00004515604954  -1.201         0.23192
## HRA          -0.00018796758850   0.00009937111623  -1.892         0.06074
## BBA          -0.00000167164786   0.00004175290482  -0.040         0.96812
## SOA          -0.00000081318516   0.00001947013225  -0.042         0.96675
## E             0.00062329145505   0.00061783119649   1.009         0.31490
## DP           -0.00013036114341   0.00010556737610  -1.235         0.21908
## FP            5.05572761522887   3.77755736106868   1.338         0.18308
## attendance    0.00000000190980   0.00000000283370   0.674         0.50151
## BPF          -0.00205457268996   0.00146090588733  -1.406         0.16196
## PPF           0.00153027670253   0.00149276141713   1.025         0.30718
## AVG           6.72717347302941   7.19884177726872   0.934         0.35176
## X3R         111.03412855211309  63.74134440362309   1.742         0.08385
## RR            0.03524882658670   0.09590356320305   0.368         0.71380
##                
## (Intercept) .  
## sd             
## yearID         
## Rank        ***
## Ghome          
## R           *  
## AB             
## H              
## X2B            
## X3B         .  
## HR             
## BB          *  
## SO             
## SB             
## CS          ** 
## HBP            
## SF             
## RA             
## ER             
## ERA            
## CG          *  
## SHO         ** 
## SV          ***
## IPouts      ** 
## HA             
## HRA         .  
## BBA            
## SOA            
## E              
## DP             
## FP             
## attendance     
## BPF            
## PPF            
## AVG            
## X3R         .  
## RR             
## ---
## Signif. codes:  0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
## 
## Residual standard error: 0.01549 on 132 degrees of freedom
## Multiple R-squared:  0.9587, Adjusted R-squared:  0.9475 
## F-statistic: 85.19 on 36 and 132 DF,  p-value: < 0.00000000000000022
lm$results
##   intercept       RMSE  Rsquared        MAE      RMSESD RsquaredSD
## 1      TRUE 0.01759883 0.9370755 0.01386533 0.002868096 0.02368572
##         MAESD
## 1 0.002339939

Still sd is not important. Obviously L(Loses) are significant. Consider the significant level is 90%, HR, BB, SO and attendance are significant.

There are lots of variables so use ridge and lasso to select(or drop) variables

Ridge Regression:

set.seed(123)
ridgeReg <- train(WP~., train.df, method = 'glmnet', 
               tuneGrid = expand.grid(alpha = 0,
                                      lambda = seq(0.0001, 1, length = 5)), 
               trControl = tr) 

# print results
print(ridgeReg)

# plot results
plot(ridgeReg)

plot(ridgeReg$finalModel, xvar = 'lambda', lwd =1.5, label = TRUE) 

#when coefficient going to 0, my model is nothing

plot(varImp(ridgeReg, scale = FALSE))

plot(varImp(ridgeReg, scale = TRUE)) #same scale 

FP(Fielding percentage) is the most important compared to wp. We can see some predictors are shrinking to nearly zero which are not significant in this model, such as sd.

Lasso Regression:

set.seed(123)
lassoReg <- train(WP~., train.df, method = 'glmnet', 
               tuneGrid = expand.grid(alpha = 1, 
              lambda = seq(0.0001, 0.5, length = 5)),
               trControl = tr)

print(lassoReg)

 # plot results
plot(lassoReg)

plot(lassoReg$finalModel, xvar = 'lambda', lwd =1.4, label=TRUE)

plot(varImp(lassoReg, scale = FALSE))

plot(varImp(lassoReg, scale = TRUE))

FP(Fielding percentage) is the most important compared to wp. And it is the same result with Ridge regression. We can see some predictors are shrinking to zero which are not significant in this model, such as sd.

Elastic-Net Regression:

set.seed(123)
enetReg <- train(WP~., train.df, method = 'glmnet',
               tuneGrid = expand.grid(alpha = seq(0, 1, length = 10), 
                                      lambda = seq(0.0001, 0.3, length = 10)),
               trControl = tr)
  # print best-tuned results
enetReg$bestTune

 # plot results
plot(enetReg)  # alpha is the mixing parameter and lambda is the regularization parameter

plot(enetReg$finalModel, xvar = 'lambda', lwd =1.4, label=TRUE)

plot(varImp(enetReg, scale = FALSE))

plot(varImp(enetReg, scale = TRUE))

The results show that alpha is equal to 1. It means that elastic-net regression is almost same to Lasso regression.

Compare Models: Models with lowest RMSE, MAE? highest R2?

  # create a list of above models
model_list <- list(Linear = lm, 
                   Ridge = ridgeReg, 
                   Lasso = lassoReg, 
                   ElasticNet = enetReg)
compare <- resamples(model_list)
  # Compare summary of models
summary(compare)
## 
## Call:
## summary.resamples(object = compare)
## 
## Models: Linear, Ridge, Lasso, ElasticNet 
## Number of resamples: 50 
## 
## MAE 
##                   Min.    1st Qu.     Median       Mean    3rd Qu.
## Linear     0.008419842 0.01236876 0.01394715 0.01386533 0.01577353
## Ridge      0.009043358 0.01242553 0.01407917 0.01405810 0.01587521
## Lasso      0.009124789 0.01203271 0.01376281 0.01386376 0.01610182
## ElasticNet 0.009124789 0.01203271 0.01376281 0.01386376 0.01610182
##                  Max. NA's
## Linear     0.02019433    0
## Ridge      0.01934665    0
## Lasso      0.01798780    0
## ElasticNet 0.01798780    0
## 
## RMSE 
##                  Min.    1st Qu.     Median       Mean    3rd Qu.
## Linear     0.00966068 0.01605300 0.01730557 0.01759883 0.01944844
## Ridge      0.01232038 0.01532790 0.01739316 0.01742610 0.01982906
## Lasso      0.01196078 0.01476813 0.01722546 0.01738179 0.01994472
## ElasticNet 0.01196078 0.01476813 0.01722546 0.01738179 0.01994472
##                  Max. NA's
## Linear     0.02320148    0
## Ridge      0.02352803    0
## Lasso      0.02334309    0
## ElasticNet 0.02334309    0
## 
## Rsquared 
##                 Min.   1st Qu.    Median      Mean   3rd Qu.      Max.
## Linear     0.8602018 0.9285226 0.9377261 0.9370755 0.9524312 0.9797852
## Ridge      0.8562716 0.9274940 0.9417060 0.9391256 0.9524280 0.9729609
## Lasso      0.8783043 0.9280651 0.9404051 0.9394398 0.9540745 0.9685581
## ElasticNet 0.8783043 0.9280651 0.9404051 0.9394398 0.9540745 0.9685581
##            NA's
## Linear        0
## Ridge         0
## Lasso         0
## ElasticNet    0
  # Plot errors from two of the four above models
xyplot(compare, model = c("Ridge", "Lasso"), 
       metric = 'RMSE')

Choose the Best model from ElasticNet:

best <- enetReg$finalModel
coef(best, s = enetReg$bestTune$lambda)
## 37 x 1 sparse Matrix of class "dgCMatrix"
##                                1
## (Intercept) -1.64218150213371694
## sd           0.00000000006290529
## yearID      -0.00011983701726055
## Rank        -0.01264454792049106
## Ghome       -0.00196703972628020
## R            0.00028607939468852
## AB          -0.00015875830946289
## H            .                  
## X2B          0.00003839355968168
## X3B          .                  
## HR           0.00009364812714483
## BB          -0.00008202386492369
## SO           .                  
## SB           0.00003724267066718
## CS          -0.00040890185528659
## HBP          0.00010356883576261
## SF          -0.00006688191127966
## RA           .                  
## ER          -0.00003594146948271
## ERA          .                  
## CG           0.00138554612758387
## SHO          0.00107858090613091
## SV           0.00184140166880023
## IPouts       0.00028630138842705
## HA          -0.00004316827637702
## HRA         -0.00015975954392130
## BBA         -0.00000430169704147
## SOA          .                  
## E            0.00016998458509342
## DP          -0.00009483368881248
## FP           1.88588261041902316
## attendance   0.00000000157248819
## BPF         -0.00048391438858579
## PPF          0.00000001023404844
## AVG          0.53929429273055651
## X3R          0.78743832266361102
## RR           0.09740684831224855
#The effect of attendance by home run in leagues
Teams$attend <- Teams$attendance/Teams$G
Teams$lgID <- relevel(Teams$lgID, ref = "AL")
plot(Teams$HR, Teams$attend)

lm <- lm(attend~HR, data=Teams)
summary(lm)
## 
## Call:
## lm(formula = attend ~ HR, data = Teams)
## 
## Residuals:
##      Min       1Q   Median       3Q      Max 
## -11308.6  -2611.2   -482.2   2143.3  18465.7 
## 
## Coefficients:
##             Estimate Std. Error t value            Pr(>|t|)    
## (Intercept)  502.597    174.924   2.873              0.0041 ** 
## HR            72.630      1.395  52.048 <0.0000000000000002 ***
## ---
## Signif. codes:  0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
## 
## Residual standard error: 4104 on 2554 degrees of freedom
##   (279 observations deleted due to missingness)
## Multiple R-squared:  0.5147, Adjusted R-squared:  0.5145 
## F-statistic:  2709 on 1 and 2554 DF,  p-value: < 0.00000000000000022
#In MLB, when an average of about 72 attendance will increase one home run increases.

plot(Teams$HR, Teams$attend, type = "n")
abline(lm(attend~HR, data=Teams))

league <- subset(Teams, lgID == "AL" | lgID == "NL")
plot(league$HR, league$attend, type = "n")
lm2 <- lm(attend~HR+lgID, data = league)
lm2
## 
## Call:
## lm(formula = attend ~ HR + lgID, data = league)
## 
## Coefficients:
## (Intercept)           HR       lgIDNL  
##      -60.83        73.49       895.95
#The effect of attendance is bigger in NL about 895.95.

abline(coef(lm2)[1], coef(lm2)[2], lwd = 2, lty = 2)
abline(coef(lm2)[1]+coef(lm2)[3], coef(lm2)[2], lwd = 2)

#Dotted line = AL

plot(Teams$HR, Teams$attend, type = "n")
d3 <- lm(attend~HR*lgID, data = league)
abline(coef(d3)[1], coef(d3)[2], lwd = 2, lty = 2)
abline(coef(d3)[1]+ coef(d3)[3], coef(d3)[2]+ coef(d3)[4], lwd = 2)

summary(lm(attend~HR*lgID, data = league))
## 
## Call:
## lm(formula = attend ~ HR * lgID, data = league)
## 
## Residuals:
##      Min       1Q   Median       3Q      Max 
## -12484.3  -2531.9   -408.7   2018.1  17876.8 
## 
## Coefficients:
##             Estimate Std. Error t value             Pr(>|t|)    
## (Intercept)  762.817    256.074   2.979              0.00292 ** 
## HR            66.496      1.937  34.335 < 0.0000000000000002 ***
## lgIDNL      -704.483    348.726  -2.020              0.04347 *  
## HR:lgIDNL     14.399      2.780   5.180          0.000000239 ***
## ---
## Signif. codes:  0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
## 
## Residual standard error: 4060 on 2552 degrees of freedom
##   (108 observations deleted due to missingness)
## Multiple R-squared:  0.5254, Adjusted R-squared:  0.5249 
## F-statistic: 941.8 on 3 and 2552 DF,  p-value: < 0.00000000000000022

Multilevel mixed effects model

#To predict the number of winning using team ABG

league <- na.omit(league)
league$AVG <- league$H / league$AB
league$division <- paste(league$lgID, league$divID)

lm3 <- lm(W~AVG, data = league)
summary(lm3)
## 
## Call:
## lm(formula = W ~ AVG, data = league)
## 
## Residuals:
##      Min       1Q   Median       3Q      Max 
## -30.6631  -8.2168   0.9902   7.6765  25.4136 
## 
## Coefficients:
##             Estimate Std. Error t value            Pr(>|t|)    
## (Intercept)   -10.66      10.25  -1.040               0.299    
## AVG           351.29      39.24   8.951 <0.0000000000000002 ***
## ---
## Signif. codes:  0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
## 
## Residual standard error: 10.57 on 508 degrees of freedom
## Multiple R-squared:  0.1362, Adjusted R-squared:  0.1345 
## F-statistic: 80.13 on 1 and 508 DF,  p-value: < 0.00000000000000022
glm <- glm(W~AVG, data = league)
summary(glm)
## 
## Call:
## glm(formula = W ~ AVG, data = league)
## 
## Deviance Residuals: 
##      Min        1Q    Median        3Q       Max  
## -30.6631   -8.2168    0.9902    7.6765   25.4136  
## 
## Coefficients:
##             Estimate Std. Error t value            Pr(>|t|)    
## (Intercept)   -10.66      10.25  -1.040               0.299    
## AVG           351.29      39.24   8.951 <0.0000000000000002 ***
## ---
## Signif. codes:  0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
## 
## (Dispersion parameter for gaussian family taken to be 111.7876)
## 
##     Null deviance: 65745  on 509  degrees of freedom
## Residual deviance: 56788  on 508  degrees of freedom
## AIC: 3856.8
## 
## Number of Fisher Scoring iterations: 2
#AIC = 3856.8

#Multilevel mixed model consider the effect that leagues include each teams (hierarchy)
lmer <- lmer(W~AVG+(1|lgID/division), data = league)
display(lmer)
## lmer(formula = W ~ AVG + (1 | lgID/division), data = league)
##             coef.est coef.se
## (Intercept) -13.24    10.38 
## AVG         361.35    39.66 
## 
## Error terms:
##  Groups        Name        Std.Dev.
##  division:lgID (Intercept)  1.55   
##  lgID          (Intercept)  0.00   
##  Residual                  10.48   
## ---
## number of obs: 510, groups: division:lgID, 6; lgID, 2
## AIC = 3847.5, DIC = 3858.4
## deviance = 3848.0
ggplot(league, aes(league$W, league$AVG))+geom_point(size=0.5)+stat_smooth(method="lm", col = "blue") + facet_wrap(~division)+labs(x="AVG", y="Games won")

Linear regressin only consider AVG and WP regardless of division and leagues which they are belonged to. Using lmer() function, before +, input fixed effects variables and then random effects variables from large factors to small ones. Corresponding to multilevel mixed model results, it has lower AIC than linear regression.

Looking at the graph(ggplot), we can observe that AL C and AL E which the number of winning games is sensitive to AVG. On the other hand, NL E and NL W are not that sensitive. Therefore, it is good to consider using multilevel mixed model.

Panel data

#MLB data is a panel data. Therefore it is better to use panel regression.

team121314 <- Teams[Teams$yearID==2014|Teams$yearID==2013|Teams$yearID==2012,]
team121314$WP <- team121314$W / team121314$G

lm4 <- lm(WP~SF+R+AB+H+X2B+X3B+HR+BB+SO+SB+CS+HBP+RA+ER+ERA+CG+SHO+SV+IPouts+HA+HRA+BBA+SOA+E+DP+FP+attendance+BPF, data = team121314)
summary(lm4)
## 
## Call:
## lm(formula = WP ~ SF + R + AB + H + X2B + X3B + HR + BB + SO + 
##     SB + CS + HBP + RA + ER + ERA + CG + SHO + SV + IPouts + 
##     HA + HRA + BBA + SOA + E + DP + FP + attendance + BPF, data = team121314)
## 
## Residuals:
##       Min        1Q    Median        3Q       Max 
## -0.032478 -0.008154  0.001331  0.008133  0.027876 
## 
## Coefficients:
##                    Estimate      Std. Error t value   Pr(>|t|)    
## (Intercept) -6.220951785310  5.258124471226  -1.183    0.24136    
## SF           0.000206399768  0.000313333645   0.659    0.51255    
## R            0.000531407182  0.000099649057   5.333 0.00000149 ***
## AB          -0.000305038648  0.000097129195  -3.141    0.00260 ** 
## H            0.000186596106  0.000104283089   1.789    0.07853 .  
## X2B          0.000067082985  0.000119509271   0.561    0.57664    
## X3B          0.000130377918  0.000292687911   0.445    0.65757    
## HR           0.000344568437  0.000140846966   2.446    0.01733 *  
## BB          -0.000068215033  0.000053601400  -1.273    0.20798    
## SO          -0.000022582037  0.000028915556  -0.781    0.43784    
## SB           0.000044229920  0.000083963843   0.527    0.60026    
## CS          -0.000640523093  0.000293729239  -2.181    0.03308 *  
## HBP         -0.000013056399  0.000160679194  -0.081    0.93550    
## RA           0.000034023876  0.000239148616   0.142    0.88733    
## ER          -0.004278263888  0.002563791690  -1.669    0.10030    
## ERA          0.618938165691  0.407184243998   1.520    0.13367    
## CG           0.002209694453  0.001086088937   2.035    0.04625 *  
## SHO          0.000465239424  0.000597155268   0.779    0.43894    
## SV           0.002043276588  0.000402328278   5.079 0.00000384 ***
## IPouts       0.000919936127  0.000391859751   2.348    0.02215 *  
## HA           0.000037888610  0.000078980377   0.480    0.63314    
## HRA         -0.000413814119  0.000151486833  -2.732    0.00823 ** 
## BBA          0.000053152899  0.000069932588   0.760    0.45015    
## SOA         -0.000045862254  0.000030747437  -1.492    0.14096    
## E            0.000236354031  0.000867635756   0.272    0.78623    
## DP          -0.000277736807  0.000146351471  -1.898    0.06247 .  
## FP           4.161499173058  5.245659463111   0.793    0.43067    
## attendance   0.000000003232  0.000000003834   0.843    0.40252    
## BPF         -0.001205742506  0.000450356524  -2.677    0.00952 ** 
## ---
## Signif. codes:  0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
## 
## Residual standard error: 0.01623 on 61 degrees of freedom
## Multiple R-squared:  0.9621, Adjusted R-squared:  0.9448 
## F-statistic: 55.37 on 28 and 61 DF,  p-value: < 0.00000000000000022

Random Effect

random1 <- plm(WP~SF+R+AB+H+X2B+X3B+HR+BB+SO+SB+CS+HBP+RA+ER+ERA+CG+SHO+SV+IPouts+HA+HRA+BBA+SOA+E+DP+FP+attendance+BPF, data = team121314, index=c("teamID", "yearID"), model = "random")
summary(random1)
## Oneway (individual) effect Random Effect Model 
##    (Swamy-Arora's transformation)
## 
## Call:
## plm(formula = WP ~ SF + R + AB + H + X2B + X3B + HR + BB + SO + 
##     SB + CS + HBP + RA + ER + ERA + CG + SHO + SV + IPouts + 
##     HA + HRA + BBA + SOA + E + DP + FP + attendance + BPF, data = team121314, 
##     model = "random", index = c("teamID", "yearID"))
## 
## Balanced Panel: n = 30, T = 3, N = 90
## 
## Effects:
##                       var     std.dev share
## idiosyncratic 0.000181484 0.013471590 0.956
## individual    0.000008287 0.002878767 0.044
## theta: 0.06218
## 
## Residuals:
##       Min.    1st Qu.     Median    3rd Qu.       Max. 
## -0.0318395 -0.0081371  0.0015323  0.0079635  0.0268605 
## 
## Coefficients:
##                     Estimate       Std. Error z-value     Pr(>|z|)    
## (Intercept) -6.0503670707884  5.2964819656916 -1.1423    0.2533140    
## SF           0.0001858567684  0.0003125307779  0.5947    0.5520553    
## R            0.0005299700592  0.0000999756284  5.3010 0.0000001152 ***
## AB          -0.0003216776389  0.0000976387179 -3.2946    0.0009857 ***
## H            0.0002000683921  0.0001051393037  1.9029    0.0570551 .  
## X2B          0.0000672424208  0.0001206861155  0.5572    0.5774128    
## X3B          0.0001430211966  0.0002950750223  0.4847    0.6278932    
## HR           0.0003414727758  0.0001410221745  2.4214    0.0154603 *  
## BB          -0.0000709367917  0.0000538098779 -1.3183    0.1874080    
## SO          -0.0000192986761  0.0000292961075 -0.6587    0.5100593    
## SB           0.0000475678459  0.0000846467377  0.5620    0.5741452    
## CS          -0.0006613217891  0.0002985549896 -2.2151    0.0267549 *  
## HBP         -0.0000156238436  0.0001625058401 -0.0961    0.9234068    
## RA           0.0000207112424  0.0002373665301  0.0873    0.9304694    
## ER          -0.0041811718944  0.0025771766907 -1.6224    0.1047210    
## ERA          0.6080824005019  0.4097228347965  1.4841    0.1377741    
## CG           0.0022196533252  0.0010851843314  2.0454    0.0408139 *  
## SHO          0.0005013100324  0.0005974616763  0.8391    0.4014320    
## SV           0.0020180653952  0.0004050800811  4.9819 0.0000006297 ***
## IPouts       0.0009306668234  0.0003941196864  2.3614    0.0182070 *  
## HA           0.0000314168416  0.0000789833769  0.3978    0.6908032    
## HRA         -0.0004226372078  0.0001521630083 -2.7775    0.0054774 ** 
## BBA          0.0000492757122  0.0000698654532  0.7053    0.4806271    
## SOA         -0.0000478781115  0.0000308969508 -1.5496    0.1212360    
## E            0.0002107841946  0.0008751628139  0.2409    0.8096703    
## DP          -0.0002787308720  0.0001467061402 -1.8999    0.0574428 .  
## FP           4.0214836212645  5.2865152676241  0.7607    0.4468327    
## attendance   0.0000000031751  0.0000000039544  0.8029    0.4220145    
## BPF         -0.0011873329306  0.0004568317689 -2.5991    0.0093480 ** 
## ---
## Signif. codes:  0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
## 
## Total Sum of Squares:    0.39114
## Residual Sum of Squares: 0.015454
## R-Squared:      0.96049
## Adj. R-Squared: 0.94236
## Chisq: 1482.96 on 28 DF, p-value: < 0.000000000000000222

Fixed effect

fixed1 <- plm(WP~SF+R+AB+H+X2B+X3B+HR+BB+SO+SB+CS+HBP+RA+ER+ERA+CG+SHO+SV+IPouts+HA+HRA+BBA+SOA+E+DP+FP+attendance+BPF, data = team121314, index=c("teamID", "yearID"), model = "within")
summary(fixed1)
## Oneway (individual) effect Within Model
## 
## Call:
## plm(formula = WP ~ SF + R + AB + H + X2B + X3B + HR + BB + SO + 
##     SB + CS + HBP + RA + ER + ERA + CG + SHO + SV + IPouts + 
##     HA + HRA + BBA + SOA + E + DP + FP + attendance + BPF, data = team121314, 
##     model = "within", index = c("teamID", "yearID"))
## 
## Balanced Panel: n = 30, T = 3, N = 90
## 
## Residuals:
##        Min.     1st Qu.      Median     3rd Qu.        Max. 
## -0.02046963 -0.00543497 -0.00021741  0.00556946  0.01868643 
## 
## Coefficients:
##                   Estimate      Std. Error t-value    Pr(>|t|)    
## SF         -0.000158642083  0.000338568024 -0.4686   0.6425549    
## R           0.000329495773  0.000131480871  2.5060   0.0174876 *  
## AB         -0.000671207055  0.000121981926 -5.5025 0.000004603 ***
## H           0.000540774611  0.000143570551  3.7666   0.0006715 ***
## X2B         0.000251147294  0.000165230378  1.5200   0.1383355    
## X3B         0.000923183287  0.000413883405  2.2305   0.0328487 *  
## HR          0.000222698337  0.000176859317  1.2592   0.2170720    
## BB         -0.000068078158  0.000072620849 -0.9374   0.3555508    
## SO          0.000107237705  0.000046205566  2.3209   0.0268226 *  
## SB          0.000163744053  0.000113757483  1.4394   0.1597447    
## CS         -0.001482946436  0.000459743613 -3.2256   0.0028954 ** 
## HBP        -0.000471296422  0.000228084764 -2.0663   0.0469715 *  
## RA         -0.000286642865  0.000256796709 -1.1162   0.2726378    
## ER         -0.002650198909  0.003107320654 -0.8529   0.4000599    
## ERA         0.455469973822  0.498517762130  0.9136   0.3677346    
## CG          0.000770298758  0.001299484150  0.5928   0.5574995    
## SHO         0.000967925636  0.000684026668  1.4150   0.1667149    
## SV          0.001077982297  0.000579835656  1.8591   0.0722275 .  
## IPouts      0.001154234554  0.000478660345  2.4114   0.0218061 *  
## HA         -0.000159603454  0.000107873123 -1.4795   0.1487721    
## HRA        -0.000446868390  0.000188421673 -2.3716   0.0238935 *  
## BBA         0.000070054141  0.000092113023  0.7605   0.4525093    
## SOA        -0.000110402640  0.000041914809 -2.6340   0.0128937 *  
## E           0.000580872076  0.001225236974  0.4741   0.6386551    
## DP         -0.000312543730  0.000178395502 -1.7520   0.0893583 .  
## FP          6.618658149739  7.226408248687  0.9159   0.3665710    
## attendance  0.000000031680  0.000000015653  2.0238   0.0514031 .  
## BPF         0.000864938882  0.000909339949  0.9512   0.3486464    
## ---
## Signif. codes:  0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
## 
## Total Sum of Squares:    0.14946
## Residual Sum of Squares: 0.0058075
## R-Squared:      0.96114
## Adj. R-Squared: 0.89193
## F-statistic: 28.27 on 28 and 32 DF, p-value: 0.0000000000000011291

We think team winning rate(WP) can be affected by systematic factors that are different for each team and every year. Therefore, panel regression is appropriate to infer the relationships between variables rather than simple linear model. So We made new table only using 2012, 2013,2015 season data to run the lm, fixed and random effect model.

When run the random effect model, the p value is small and adjust R-squared is 0.93. R, AB, CS, CG, SV, HRA are significant variables to the team winning rate based on the 95% significant level.

When run the fixed effect model, the p value is small and adjust R-squared is 0.89. R, AB, H, X3B, SO, CS, SV, IPouts, HRA, SOA are significant variables to the team winning rate.

Conclusion

Through this data set, we could verify hypothesis of baseball fans such as baseball is controlled by pitchers. In addition we observe common features and differences between leagues. We used simple linear regression to figure out the relationship between ERA or AVG and team winning. Also, to see the relationship between attendance and AVG standard deviation. Using t- test, we analyze the difference of means between leagues and by multilevel mixed model, we find out their sensitivity to AVG. Using LDA, QUA, Neural Nets and K- means, we witness accuracy of each models for predicting division win. Fianlly, MLB is a panel data, so used panel regression to infer the number of team winning.