This is an R Markdown Notebook. When you execute code within the notebook, the results appear beneath the code.

Try executing this chunk by clicking the Run button within the chunk or by placing your cursor inside it and pressing Ctrl+Shift+Enter.

plot(cars)
library(haven)
readprof <- read_dta("EDUS 651/readprof.dta")
glimpse(readprof)
Rows: 6,528
Columns: 8
$ schcode  <dbl> 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100,...
$ lowses   <dbl+lbl> 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,...
$ female   <dbl+lbl> 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,...
$ minor    <dbl> 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, ...
$ schcomp  <dbl> -2.4, -2.4, -2.4, -2.4, -2.4, -2.4, -2.4, -2.4, -2.4, ...
$ smallsch <dbl> 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, ...
$ id       <dbl> 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16,...
$ readprof <dbl> 0, 1, 0, 0, 1, 1, 1, 0, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, ...
library(tidyverse)
Registered S3 methods overwritten by 'dbplyr':
  method         from
  print.tbl_lazy     
  print.tbl_sql      
-- Attaching packages --------------------------------------- tidyverse 1.3.0 --
v ggplot2 3.3.2     v purrr   0.3.4
v tibble  3.0.3     v dplyr   1.0.2
v tidyr   1.1.1     v stringr 1.4.0
v readr   1.3.1     v forcats 0.5.0
-- Conflicts ------------------------------------------ tidyverse_conflicts() --
x dplyr::filter() masks stats::filter()
x dplyr::lag()    masks stats::lag()
library(lme4)
Loading required package: Matrix

Attaching package: 㤼㸱Matrix㤼㸲

The following objects are masked from 㤼㸱package:tidyr㤼㸲:

    expand, pack, unpack
readprof.clean <- readprof %>%
mutate(., 
       minor.fac = as_factor(minor),
       schcode.fac = as_factor(schcode),
       lowses.fac = as_factor(lowses),
       female.fac = as_factor(female),
       smallsch.fac = as_factor(smallsch),
       readprof.fac = as_factor(readprof))
glimpse(readprof.clean)
Rows: 6,528
Columns: 14
$ schcode      <dbl> 100, 100, 100, 100, 100, 100, 100, 100, 100...
$ lowses       <dbl+lbl> 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, ...
$ female       <dbl+lbl> 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, ...
$ minor        <dbl> 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
$ schcomp      <dbl> -2.4, -2.4, -2.4, -2.4, -2.4, -2.4, -2.4, -...
$ smallsch     <dbl> 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
$ id           <dbl> 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, ...
$ readprof     <dbl> 0, 1, 0, 0, 1, 1, 1, 0, 1, 1, 1, 0, 1, 1, 1...
$ minor.fac    <fct> 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
$ schcode.fac  <fct> 100, 100, 100, 100, 100, 100, 100, 100, 100...
$ lowses.fac   <fct> non-low socioeconomic status, non-low socio...
$ female.fac   <fct> male, male, male, male, male, male, male, m...
$ smallsch.fac <fct> 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
$ readprof.fac <fct> 0, 1, 0, 0, 1, 1, 1, 0, 1, 1, 1, 0, 1, 1, 1...
  1. Treating “readprof” as the DV, and “schcode” as the level-2 clustering variable, estimate a null model. Compute, report, and interpret the ICC. Is it worth conducting MLM on these data?
model.null <- glmer(readprof.fac ~ (1|schcode.fac), family = binomial, data = readprof.clean)
summary(model.null)
Generalized linear mixed model fit by maximum likelihood
  (Laplace Approximation) [glmerMod]
 Family: binomial  ( logit )
Formula: readprof.fac ~ (1 | schcode.fac)
   Data: readprof.clean

     AIC      BIC   logLik deviance df.resid 
  8868.5   8882.1  -4432.3   8864.5     6526 

Scaled residuals: 
    Min      1Q  Median      3Q     Max 
-1.7663 -0.9815  0.6879  0.9145  1.3931 

Random effects:
 Groups      Name        Variance Std.Dev.
 schcode.fac (Intercept) 0.2283   0.4778  
Number of obs: 6528, groups:  schcode.fac, 122

Fixed effects:
            Estimate Std. Error z value Pr(>|z|)  
(Intercept)  0.13155    0.05198   2.531   0.0114 *
---
Signif. codes:  
0 ‘***’ 0.001 ‘**’ 0.01 ‘*’ 0.05 ‘.’ 0.1 ‘ ’ 1
Null.ICC <-  0.2283/(0.2283 + pi^2/3)

It is worth conducting MLM with this data. The ICC is 0.06.

  1. Interpret the estimated fixed effect of the intercept (_cons) in the null model. What is this number measuring- what does it mean? You can be more vague with this answer! Logits are tough.

The intercept is 0.13 which are the odds for being proficient in reading. Because it is postive this means there are greater odds for being proficient in reading generally across the sample.

3.Conduct a multilevel model with “readprof” as the DV, “lowses”, “female”, and “minor” as level-1 IVs. Include all three IVs as fixed effects (use a random intercept only). Interpret your results in odds ratios using the or option in Stata or the formula I provided in R…

model.1 <- glmer(readprof.fac ~ lowses.fac + female.fac + minor.fac + (1|schcode.fac), family = binomial, data = readprof.clean)
summary(model.1)
Generalized linear mixed model fit by maximum likelihood (Laplace
  Approximation) [glmerMod]
 Family: binomial  ( logit )
Formula: 
readprof.fac ~ lowses.fac + female.fac + minor.fac + (1 | schcode.fac)
   Data: readprof.clean

     AIC      BIC   logLik deviance df.resid 
  8696.8   8730.7  -4343.4   8686.8     6523 

Scaled residuals: 
    Min      1Q  Median      3Q     Max 
-1.9203 -0.9482  0.6194  0.8937  1.6114 

Random effects:
 Groups      Name        Variance Std.Dev.
 schcode.fac (Intercept) 0.1033   0.3214  
Number of obs: 6528, groups:  schcode.fac, 122

Fixed effects:
                                   Estimate Std. Error z value
(Intercept)                         0.39009    0.05876   6.639
lowses.faclow socioeconomic status -0.43685    0.05717  -7.642
female.facfemale                    0.32357    0.05168   6.260
minor.fac1                         -0.43906    0.05605  -7.833
                                   Pr(>|z|)    
(Intercept)                        3.15e-11 ***
lowses.faclow socioeconomic status 2.14e-14 ***
female.facfemale                   3.84e-10 ***
minor.fac1                         4.77e-15 ***
---
Signif. codes:  0 ‘***’ 0.001 ‘**’ 0.01 ‘*’ 0.05 ‘.’ 0.1 ‘ ’ 1

Correlation of Fixed Effects:
            (Intr) lws.ss fml.fc
lwss.fclwss -0.359              
femal.fcfml -0.415 -0.031       
minor.fac1  -0.393 -0.171 -0.004
lowses.odds.ratio <- exp(-0.43685)
female.odds.ratio <- exp(0.32357)
minor.odds.ratio <- exp(-0.43906)
intercpet.odds.ratio <- exp(0.39009)
lowses.odds.ratio
[1] 0.6460683
female.odds.ratio
[1] 1.382053
minor.odds.ratio
[1] 0.6446421
intercpet.odds.ratio
[1] 1.477114

If a student has a low ses then their odds of being proficient at reading are 35% less than if they had a higher ses. The odds for being reading proficient are 38% higher for females than males. The odds for being reading proficient are 36% lower for students belonging to minority groups than those belonging to the majority group.The odds for being reading proficient are 48% higher for males from high ses and belonging to the majority group.

  1. Building from the previous section, test the impact of “smallsch” as a level-2 IVs, keeping all IVs as fixed effects (random intercept only). Interpret the results for the smallsch coefficient in terms of odds ratios. If you were a supporter of creating smaller schools, how would you interpret this result?
model.2 <- glmer(readprof.fac ~ lowses.fac + female.fac + minor.fac + smallsch.fac + (1|schcode.fac), family = binomial, data = readprof.clean)
summary(model.2)
Generalized linear mixed model fit by maximum likelihood (Laplace
  Approximation) [glmerMod]
 Family: binomial  ( logit )
Formula: 
readprof.fac ~ lowses.fac + female.fac + minor.fac + smallsch.fac +  
    (1 | schcode.fac)
   Data: readprof.clean

     AIC      BIC   logLik deviance df.resid 
  8695.8   8736.5  -4341.9   8683.8     6522 

Scaled residuals: 
    Min      1Q  Median      3Q     Max 
-1.9364 -0.9556  0.6210  0.8947  1.6222 

Random effects:
 Groups      Name        Variance Std.Dev.
 schcode.fac (Intercept) 0.09668  0.3109  
Number of obs: 6528, groups:  schcode.fac, 122

Fixed effects:
                                   Estimate Std. Error z value Pr(>|z|)    
(Intercept)                         0.32449    0.07001   4.635 3.57e-06 ***
lowses.faclow socioeconomic status -0.43920    0.05713  -7.688 1.50e-14 ***
female.facfemale                    0.32292    0.05167   6.249 4.12e-10 ***
minor.fac1                         -0.43983    0.05598  -7.857 3.94e-15 ***
smallsch.fac1                       0.13854    0.07941   1.745   0.0811 .  
---
Signif. codes:  0 ‘***’ 0.001 ‘**’ 0.01 ‘*’ 0.05 ‘.’ 0.1 ‘ ’ 1

Correlation of Fixed Effects:
            (Intr) lws.ss fml.fc mnr.f1
lwss.fclwss -0.303                     
femal.fcfml -0.346 -0.031              
minor.fac1  -0.336 -0.172 -0.003       
smllsch.fc1 -0.552 -0.002 -0.003  0.009
smllsch.odds.ratio <- exp(0.13854)
smllsch.odds.ratio
[1] 1.148596

The odds of being proficient at reading ar 15 % for students at small schools. However, this is just barely not significant in the model. I would hesitantly see this as support for small schools.

  1. Next, add a second level-2 IV, schcomp to the model along with all variables from #4. Interpret the smallsch and schcomp coefficients. Return to your “small schools” argument- has it changed?
model.3 <- glmer(readprof.fac ~ lowses.fac + female.fac + minor.fac + smallsch.fac + schcomp + (1|schcode.fac), family = binomial, data = readprof.clean)
summary(model.3)
Generalized linear mixed model fit by maximum likelihood (Laplace
  Approximation) [glmerMod]
 Family: binomial  ( logit )
Formula: 
readprof.fac ~ lowses.fac + female.fac + minor.fac + smallsch.fac +  
    schcomp + (1 | schcode.fac)
   Data: readprof.clean

     AIC      BIC   logLik deviance df.resid 
  8665.5   8713.0  -4325.8   8651.5     6521 

Scaled residuals: 
    Min      1Q  Median      3Q     Max 
-2.0503 -0.9646  0.6064  0.8824  1.6455 

Random effects:
 Groups      Name        Variance Std.Dev.
 schcode.fac (Intercept) 0.05658  0.2379  
Number of obs: 6528, groups:  schcode.fac, 122

Fixed effects:
                                   Estimate Std. Error z value Pr(>|z|)    
(Intercept)                         0.28348    0.06401   4.429 9.48e-06 ***
lowses.faclow socioeconomic status -0.36153    0.05778  -6.257 3.92e-10 ***
female.facfemale                    0.32237    0.05161   6.246 4.21e-10 ***
minor.fac1                         -0.39549    0.05580  -7.087 1.37e-12 ***
smallsch.fac1                       0.10372    0.07008   1.480    0.139    
schcomp                            -0.20725    0.03511  -5.902 3.59e-09 ***
---
Signif. codes:  0 ‘***’ 0.001 ‘**’ 0.01 ‘*’ 0.05 ‘.’ 0.1 ‘ ’ 1

Correlation of Fixed Effects:
            (Intr) lws.ss fml.fc mnr.f1 smll.1
lwss.fclwss -0.329                            
femal.fcfml -0.377 -0.031                     
minor.fac1  -0.366 -0.159 -0.004              
smllsch.fc1 -0.500 -0.024 -0.005 -0.002       
schcomp      0.154 -0.278 -0.011 -0.175  0.083

Now small schools is not anywhere close to significant. I change my stance on small schools. The support given has more of an impact.

  1. R folks: Using the model you ran for #3 above, calculate the predicted probability of passing for students with the following characteristics: 1) A male student, who qualifies as low SES, who identifies as a member of a minority racial/ethnic group; and 2) A female student, who does not qualify as low SES, who does not identify as a member of a minority racial/ethnic group.
.39 + -.44 + -.44
[1] -0.49
exp(-.49)/(1 + exp(-.49))
[1] 0.3798936

For a student who identifies as male and a minority group member and is of low SES, the predicted probability of being reading proficient is 38%.

.39 + .33 
[1] 0.72
exp(.72)/(1+ exp(.72))
[1] 0.672607

For a female student, who does not qualify as low SES, who does not identify as a member of a minority racial/ethnic group, the predicted probability of being reading proficient is 67%.

LS0tDQp0aXRsZTogIlIgTm90ZWJvb2siDQpvdXRwdXQ6IGh0bWxfbm90ZWJvb2sNCi0tLQ0KDQpUaGlzIGlzIGFuIFtSIE1hcmtkb3duXShodHRwOi8vcm1hcmtkb3duLnJzdHVkaW8uY29tKSBOb3RlYm9vay4gV2hlbiB5b3UgZXhlY3V0ZSBjb2RlIHdpdGhpbiB0aGUgbm90ZWJvb2ssIHRoZSByZXN1bHRzIGFwcGVhciBiZW5lYXRoIHRoZSBjb2RlLiANCg0KVHJ5IGV4ZWN1dGluZyB0aGlzIGNodW5rIGJ5IGNsaWNraW5nIHRoZSAqUnVuKiBidXR0b24gd2l0aGluIHRoZSBjaHVuayBvciBieSBwbGFjaW5nIHlvdXIgY3Vyc29yIGluc2lkZSBpdCBhbmQgcHJlc3NpbmcgKkN0cmwrU2hpZnQrRW50ZXIqLiANCg0KYGBge3J9DQpwbG90KGNhcnMpDQpgYGANCg0KYGBge3J9DQpsaWJyYXJ5KGhhdmVuKQ0KcmVhZHByb2YgPC0gcmVhZF9kdGEoIkVEVVMgNjUxL3JlYWRwcm9mLmR0YSIpDQpnbGltcHNlKHJlYWRwcm9mKQ0KYGBgDQoNCmBgYHtyfQ0KbGlicmFyeSh0aWR5dmVyc2UpDQpsaWJyYXJ5KGxtZTQpDQpgYGANCg0KYGBge3J9DQpyZWFkcHJvZi5jbGVhbiA8LSByZWFkcHJvZiAlPiUNCm11dGF0ZSguLCANCiAgICAgICBtaW5vci5mYWMgPSBhc19mYWN0b3IobWlub3IpLA0KICAgICAgIHNjaGNvZGUuZmFjID0gYXNfZmFjdG9yKHNjaGNvZGUpLA0KICAgICAgIGxvd3Nlcy5mYWMgPSBhc19mYWN0b3IobG93c2VzKSwNCiAgICAgICBmZW1hbGUuZmFjID0gYXNfZmFjdG9yKGZlbWFsZSksDQogICAgICAgc21hbGxzY2guZmFjID0gYXNfZmFjdG9yKHNtYWxsc2NoKSwNCiAgICAgICByZWFkcHJvZi5mYWMgPSBhc19mYWN0b3IocmVhZHByb2YpKQ0KZ2xpbXBzZShyZWFkcHJvZi5jbGVhbikNCmBgYA0KMS4gVHJlYXRpbmcg4oCccmVhZHByb2bigJ0gYXMgdGhlIERWLCBhbmQg4oCcc2NoY29kZeKAnSBhcyB0aGUgbGV2ZWwtMiBjbHVzdGVyaW5nIHZhcmlhYmxlLCBlc3RpbWF0ZSBhIG51bGwgbW9kZWwuICBDb21wdXRlLCByZXBvcnQsIGFuZCBpbnRlcnByZXQgdGhlIElDQy4gIElzIGl0IHdvcnRoIGNvbmR1Y3RpbmcgTUxNIG9uIHRoZXNlIGRhdGE/DQpgYGB7cn0NCm1vZGVsLm51bGwgPC0gZ2xtZXIocmVhZHByb2YuZmFjIH4gKDF8c2NoY29kZS5mYWMpLCBmYW1pbHkgPSBiaW5vbWlhbCwgZGF0YSA9IHJlYWRwcm9mLmNsZWFuKQ0Kc3VtbWFyeShtb2RlbC5udWxsKQ0KDQpgYGANCg0KYGBge3J9DQpOdWxsLklDQyA8LSAgMC4yMjgzLygwLjIyODMgKyBwaV4yLzMpDQpgYGANCkl0IGlzIHdvcnRoIGNvbmR1Y3RpbmcgTUxNIHdpdGggdGhpcyBkYXRhLiBUaGUgSUNDIGlzIDAuMDYuDQoNCjIuIEludGVycHJldCB0aGUgZXN0aW1hdGVkIGZpeGVkIGVmZmVjdCBvZiB0aGUgaW50ZXJjZXB0IChfY29ucykgaW4gdGhlIG51bGwgbW9kZWwuICBXaGF0IGlzIHRoaXMgbnVtYmVyIG1lYXN1cmluZy0gd2hhdCBkb2VzIGl0IG1lYW4/IFlvdSBjYW4gYmUgbW9yZSB2YWd1ZSB3aXRoIHRoaXMgYW5zd2VyISAgTG9naXRzIGFyZSB0b3VnaC4NCg0KVGhlIGludGVyY2VwdCBpcyAwLjEzIHdoaWNoIGFyZSB0aGUgb2RkcyBmb3IgYmVpbmcgcHJvZmljaWVudCBpbiByZWFkaW5nLiBCZWNhdXNlIGl0IGlzIHBvc3RpdmUgdGhpcyBtZWFucyB0aGVyZSBhcmUgZ3JlYXRlciBvZGRzIGZvciBiZWluZyBwcm9maWNpZW50IGluIHJlYWRpbmcgZ2VuZXJhbGx5IGFjcm9zcyB0aGUgc2FtcGxlLiANCg0KMy5Db25kdWN0IGEgbXVsdGlsZXZlbCBtb2RlbCB3aXRoIOKAnHJlYWRwcm9m4oCdIGFzIHRoZSBEViwg4oCcbG93c2Vz4oCdLCDigJxmZW1hbGXigJ0sIGFuZCDigJxtaW5vcuKAnSBhcyBsZXZlbC0xIElWcy4gIEluY2x1ZGUgYWxsIHRocmVlIElWcyBhcyBmaXhlZCBlZmZlY3RzICh1c2UgYSByYW5kb20gaW50ZXJjZXB0IG9ubHkpLiAgSW50ZXJwcmV0IHlvdXIgcmVzdWx0cyBpbiBvZGRzIHJhdGlvcyB1c2luZyB0aGUgb3Igb3B0aW9uIGluIFN0YXRhIG9yIHRoZSBmb3JtdWxhIEkgcHJvdmlkZWQgaW4gUi4uLg0KYGBge3J9DQptb2RlbC4xIDwtIGdsbWVyKHJlYWRwcm9mLmZhYyB+IGxvd3Nlcy5mYWMgKyBmZW1hbGUuZmFjICsgbWlub3IuZmFjICsgKDF8c2NoY29kZS5mYWMpLCBmYW1pbHkgPSBiaW5vbWlhbCwgZGF0YSA9IHJlYWRwcm9mLmNsZWFuKQ0Kc3VtbWFyeShtb2RlbC4xKQ0KYGBgDQpgYGB7cn0NCmxvd3Nlcy5vZGRzLnJhdGlvIDwtIGV4cCgtMC40MzY4NSkNCmZlbWFsZS5vZGRzLnJhdGlvIDwtIGV4cCgwLjMyMzU3KQ0KbWlub3Iub2Rkcy5yYXRpbyA8LSBleHAoLTAuNDM5MDYpDQppbnRlcmNwZXQub2Rkcy5yYXRpbyA8LSBleHAoMC4zOTAwOSkNCmxvd3Nlcy5vZGRzLnJhdGlvDQpmZW1hbGUub2Rkcy5yYXRpbw0KbWlub3Iub2Rkcy5yYXRpbw0KaW50ZXJjcGV0Lm9kZHMucmF0aW8NCg0KYGBgDQoNCklmIGEgc3R1ZGVudCBoYXMgYSBsb3cgc2VzIHRoZW4gdGhlaXIgb2RkcyBvZiBiZWluZyBwcm9maWNpZW50IGF0IHJlYWRpbmcgYXJlIDM1JSBsZXNzICB0aGFuIGlmIHRoZXkgaGFkIGEgaGlnaGVyIHNlcy4gVGhlIG9kZHMgZm9yIGJlaW5nIHJlYWRpbmcgcHJvZmljaWVudCBhcmUgMzglIGhpZ2hlciBmb3IgZmVtYWxlcyB0aGFuIG1hbGVzLiBUaGUgb2RkcyBmb3IgYmVpbmcgcmVhZGluZyBwcm9maWNpZW50IGFyZSAzNiUgbG93ZXIgZm9yIHN0dWRlbnRzIGJlbG9uZ2luZyB0byBtaW5vcml0eSBncm91cHMgdGhhbiB0aG9zZSBiZWxvbmdpbmcgdG8gdGhlIG1ham9yaXR5IGdyb3VwLlRoZSBvZGRzIGZvciBiZWluZyByZWFkaW5nIHByb2ZpY2llbnQgYXJlIDQ4JSBoaWdoZXIgZm9yIG1hbGVzIGZyb20gaGlnaCBzZXMgYW5kIGJlbG9uZ2luZyB0byB0aGUgbWFqb3JpdHkgZ3JvdXAuDQoNCjQuIEJ1aWxkaW5nIGZyb20gdGhlIHByZXZpb3VzIHNlY3Rpb24sIHRlc3QgdGhlIGltcGFjdCBvZiDigJxzbWFsbHNjaOKAnSBhcyBhIGxldmVsLTIgSVZzLCBrZWVwaW5nIGFsbCBJVnMgYXMgZml4ZWQgZWZmZWN0cyAocmFuZG9tIGludGVyY2VwdCBvbmx5KS4gIEludGVycHJldCB0aGUgcmVzdWx0cyBmb3IgdGhlIHNtYWxsc2NoIGNvZWZmaWNpZW50IGluIHRlcm1zIG9mIG9kZHMgcmF0aW9zLiAgSWYgeW91IHdlcmUgYSBzdXBwb3J0ZXIgb2YgY3JlYXRpbmcgc21hbGxlciBzY2hvb2xzLCBob3cgd291bGQgeW91IGludGVycHJldCB0aGlzIHJlc3VsdD8NCmBgYHtyfQ0KbW9kZWwuMiA8LSBnbG1lcihyZWFkcHJvZi5mYWMgfiBsb3dzZXMuZmFjICsgZmVtYWxlLmZhYyArIG1pbm9yLmZhYyArIHNtYWxsc2NoLmZhYyArICgxfHNjaGNvZGUuZmFjKSwgZmFtaWx5ID0gYmlub21pYWwsIGRhdGEgPSByZWFkcHJvZi5jbGVhbikNCnN1bW1hcnkobW9kZWwuMikNCmBgYA0KYGBge3J9DQpzbWxsc2NoLm9kZHMucmF0aW8gPC0gZXhwKDAuMTM4NTQpDQpzbWxsc2NoLm9kZHMucmF0aW8NCmBgYA0KVGhlIG9kZHMgb2YgYmVpbmcgcHJvZmljaWVudCBhdCByZWFkaW5nIGFyIDE1ICUgZm9yIHN0dWRlbnRzIGF0IHNtYWxsIHNjaG9vbHMuIEhvd2V2ZXIsIHRoaXMgaXMganVzdCBiYXJlbHkgbm90IHNpZ25pZmljYW50IGluIHRoZSBtb2RlbC4gSSB3b3VsZCBoZXNpdGFudGx5IHNlZSB0aGlzIGFzIHN1cHBvcnQgZm9yIHNtYWxsIHNjaG9vbHMuICANCg0KDQo1LiBOZXh0LCBhZGQgYSBzZWNvbmQgbGV2ZWwtMiBJViwgc2NoY29tcCB0byB0aGUgbW9kZWwgYWxvbmcgd2l0aCBhbGwgdmFyaWFibGVzIGZyb20gIzQuICBJbnRlcnByZXQgdGhlIHNtYWxsc2NoIGFuZCBzY2hjb21wIGNvZWZmaWNpZW50cy4gIFJldHVybiB0byB5b3VyIOKAnHNtYWxsIHNjaG9vbHPigJ0gYXJndW1lbnQtIGhhcyBpdCBjaGFuZ2VkPw0KYGBge3J9DQptb2RlbC4zIDwtIGdsbWVyKHJlYWRwcm9mLmZhYyB+IGxvd3Nlcy5mYWMgKyBmZW1hbGUuZmFjICsgbWlub3IuZmFjICsgc21hbGxzY2guZmFjICsgc2NoY29tcCArICgxfHNjaGNvZGUuZmFjKSwgZmFtaWx5ID0gYmlub21pYWwsIGRhdGEgPSByZWFkcHJvZi5jbGVhbikNCnN1bW1hcnkobW9kZWwuMykNCmBgYA0KTm93IHNtYWxsIHNjaG9vbHMgaXMgbm90IGFueXdoZXJlIGNsb3NlIHRvIHNpZ25pZmljYW50LiBJIGNoYW5nZSBteSBzdGFuY2Ugb24gc21hbGwgc2Nob29scy4gVGhlIHN1cHBvcnQgZ2l2ZW4gaGFzIG1vcmUgb2YgYW4gaW1wYWN0Lg0KDQo2LiBSIGZvbGtzOiBVc2luZyB0aGUgbW9kZWwgeW91IHJhbiBmb3IgIzMgYWJvdmUsIGNhbGN1bGF0ZSB0aGUgcHJlZGljdGVkIHByb2JhYmlsaXR5IG9mIHBhc3NpbmcgZm9yIHN0dWRlbnRzIHdpdGggdGhlIGZvbGxvd2luZyBjaGFyYWN0ZXJpc3RpY3M6IDEpIEEgbWFsZSBzdHVkZW50LCB3aG8gcXVhbGlmaWVzIGFzIGxvdyBTRVMsIHdobyBpZGVudGlmaWVzIGFzIGEgbWVtYmVyIG9mIGEgbWlub3JpdHkgcmFjaWFsL2V0aG5pYyBncm91cDsgYW5kIDIpIEEgZmVtYWxlIHN0dWRlbnQsIHdobyBkb2VzIG5vdCBxdWFsaWZ5IGFzIGxvdyBTRVMsIHdobyBkb2VzIG5vdCBpZGVudGlmeSBhcyBhIG1lbWJlciBvZiBhIG1pbm9yaXR5IHJhY2lhbC9ldGhuaWMgZ3JvdXAuIA0KDQpgYGB7cn0NCi4zOSArIC0uNDQgKyAtLjQ0DQoNCmBgYA0KYGBge3J9DQpleHAoLS40OSkvKDEgKyBleHAoLS40OSkpDQpgYGANCiBGb3IgYSBzdHVkZW50IHdobyBpZGVudGlmaWVzIGFzIG1hbGUgYW5kIGEgbWlub3JpdHkgZ3JvdXAgbWVtYmVyIGFuZCBpcyBvZiBsb3cgU0VTLCB0aGUgcHJlZGljdGVkIHByb2JhYmlsaXR5IG9mIGJlaW5nIHJlYWRpbmcgcHJvZmljaWVudCBpcyAzOCUuDQpgYGB7cn0NCi4zOSArIC4zMyANCmBgYA0KYGBge3J9DQpleHAoLjcyKS8oMSsgZXhwKC43MikpDQpgYGANCkZvciBhIGZlbWFsZSBzdHVkZW50LCB3aG8gZG9lcyBub3QgcXVhbGlmeSBhcyBsb3cgU0VTLCB3aG8gZG9lcyBub3QgaWRlbnRpZnkgYXMgYSBtZW1iZXIgb2YgYSBtaW5vcml0eSByYWNpYWwvZXRobmljIGdyb3VwLCB0aGUgcHJlZGljdGVkIHByb2JhYmlsaXR5IG9mIGJlaW5nIHJlYWRpbmcgcHJvZmljaWVudCBpcyA2NyUuDQoNCg==