Load libraries
Read in the data first, then combine Woodcock and Wiat and z-score the Standardized Test composite
d <- read.csv("zenith all data.csv")
d$condition <- factor(d$condition, levels=c("control","abacus"))
d$standardized <- rowMeans(d[,c("wiat","woodcock")],na.rm=TRUE)
Summaries.
d %>%
select(standardized, ans, deviance, linr2, ordinality, year) %>%
group_by(year) %>%
do(tidy(summary(.))) %>%
data.frame
## year Var1 Var2 Freq
## 1 0 standardized Min. :0.0465
## 2 0 standardized 1st Qu.:0.1530
## 3 0 standardized Median :0.1930
## 4 0 standardized Mean :0.1960
## 5 0 standardized 3rd Qu.:0.2379
## 6 0 standardized Max. :0.3428
## 7 0 standardized <NA>
## 8 0 ans Min. :0.100
## 9 0 ans 1st Qu.:0.232
## 10 0 ans Median :0.360
## 11 0 ans Mean :0.366
## 12 0 ans 3rd Qu.:0.460
## 13 0 ans Max. :0.780
## 14 0 ans NA's :25
## 15 0 deviance Min. : NA
## 16 0 deviance 1st Qu.: NA
## 17 0 deviance Median : NA
## 18 0 deviance Mean :NaN
## 19 0 deviance 3rd Qu.: NA
## 20 0 deviance Max. : NA
## 21 0 deviance NA's :187
## 22 0 linr2 Min. : NA
## 23 0 linr2 1st Qu.: NA
## 24 0 linr2 Median : NA
## 25 0 linr2 Mean :NaN
## 26 0 linr2 3rd Qu.: NA
## 27 0 linr2 Max. : NA
## 28 0 linr2 NA's :187
## 29 0 ordinality Min. : NA
## 30 0 ordinality 1st Qu.: NA
## 31 0 ordinality Median : NA
## 32 0 ordinality Mean :NaN
## 33 0 ordinality 3rd Qu.: NA
## 34 0 ordinality Max. : NA
## 35 0 ordinality NA's :187
## 36 0 year Min. :0
## 37 0 year 1st Qu.:0
## 38 0 year Median :0
## 39 0 year Mean :0
## 40 0 year 3rd Qu.:0
## 41 0 year Max. :0
## 42 0 year <NA>
## 43 1 standardized Min. :0.0781
## 44 1 standardized 1st Qu.:0.2628
## 45 1 standardized Median :0.3112
## 46 1 standardized Mean :0.3112
## 47 1 standardized 3rd Qu.:0.3602
## 48 1 standardized Max. :0.5409
## 49 1 standardized <NA>
## 50 1 ans Min. :0.020
## 51 1 ans 1st Qu.:0.120
## 52 1 ans Median :0.150
## 53 1 ans Mean :0.177
## 54 1 ans 3rd Qu.:0.200
## 55 1 ans Max. :0.780
## 56 1 ans NA's :6
## 57 1 deviance Min. :0.230
## 58 1 deviance 1st Qu.:0.480
## 59 1 deviance Median :0.605
## 60 1 deviance Mean :0.708
## 61 1 deviance 3rd Qu.:0.880
## 62 1 deviance Max. :2.400
## 63 1 deviance NA's :3
## 64 1 linr2 Min. :0.010
## 65 1 linr2 1st Qu.:0.230
## 66 1 linr2 Median :0.345
## 67 1 linr2 Mean :0.366
## 68 1 linr2 3rd Qu.:0.510
## 69 1 linr2 Max. :0.910
## 70 1 linr2 NA's :3
## 71 1 ordinality Min. :0.590
## 72 1 ordinality 1st Qu.:0.757
## 73 1 ordinality Median :0.800
## 74 1 ordinality Mean :0.798
## 75 1 ordinality 3rd Qu.:0.840
## 76 1 ordinality Max. :1.000
## 77 1 ordinality NA's :3
## 78 1 year Min. :1
## 79 1 year 1st Qu.:1
## 80 1 year Median :1
## 81 1 year Mean :1
## 82 1 year 3rd Qu.:1
## 83 1 year Max. :1
## 84 1 year <NA>
## 85 2 standardized Min. :0.130
## 86 2 standardized 1st Qu.:0.374
## 87 2 standardized Median :0.425
## 88 2 standardized Mean :0.426
## 89 2 standardized 3rd Qu.:0.480
## 90 2 standardized Max. :0.636
## 91 2 standardized NA's :1
## 92 2 ans Min. :0.010
## 93 2 ans 1st Qu.:0.110
## 94 2 ans Median :0.130
## 95 2 ans Mean :0.146
## 96 2 ans 3rd Qu.:0.170
## 97 2 ans Max. :0.570
## 98 2 ans NA's :2
## 99 2 deviance Min. :0.280
## 100 2 deviance 1st Qu.:0.450
## 101 2 deviance Median :0.610
## 102 2 deviance Mean :0.713
## 103 2 deviance 3rd Qu.:0.885
## 104 2 deviance Max. :3.070
## 105 2 deviance <NA>
## 106 2 linr2 Min. :0.060
## 107 2 linr2 1st Qu.:0.205
## 108 2 linr2 Median :0.320
## 109 2 linr2 Mean :0.352
## 110 2 linr2 3rd Qu.:0.470
## 111 2 linr2 Max. :0.800
## 112 2 linr2 <NA>
## 113 2 ordinality Min. :0.620
## 114 2 ordinality 1st Qu.:0.750
## 115 2 ordinality Median :0.800
## 116 2 ordinality Mean :0.794
## 117 2 ordinality 3rd Qu.:0.840
## 118 2 ordinality Max. :0.970
## 119 2 ordinality <NA>
## 120 2 year Min. :2
## 121 2 year 1st Qu.:2
## 122 2 year Median :2
## 123 2 year Mean :2
## 124 2 year 3rd Qu.:2
## 125 2 year Max. :2
## 126 2 year <NA>
## 127 3 standardized Min. :0.161
## 128 3 standardized 1st Qu.:0.461
## 129 3 standardized Median :0.544
## 130 3 standardized Mean :0.538
## 131 3 standardized 3rd Qu.:0.622
## 132 3 standardized Max. :0.774
## 133 3 standardized <NA>
## 134 3 ans Min. :0.060
## 135 3 ans 1st Qu.:0.100
## 136 3 ans Median :0.120
## 137 3 ans Mean :0.138
## 138 3 ans 3rd Qu.:0.160
## 139 3 ans Max. :0.590
## 140 3 ans NA's :2
## 141 3 deviance Min. :0.230
## 142 3 deviance 1st Qu.:0.430
## 143 3 deviance Median :0.625
## 144 3 deviance Mean :0.698
## 145 3 deviance 3rd Qu.:0.900
## 146 3 deviance Max. :2.020
## 147 3 deviance NA's :1
## 148 3 linr2 Min. :0.040
## 149 3 linr2 1st Qu.:0.193
## 150 3 linr2 Median :0.310
## 151 3 linr2 Mean :0.356
## 152 3 linr2 3rd Qu.:0.497
## 153 3 linr2 Max. :0.820
## 154 3 linr2 NA's :1
## 155 3 ordinality Min. :0.570
## 156 3 ordinality 1st Qu.:0.752
## 157 3 ordinality Median :0.800
## 158 3 ordinality Mean :0.798
## 159 3 ordinality 3rd Qu.:0.850
## 160 3 ordinality Max. :0.950
## 161 3 ordinality NA's :1
## 162 3 year Min. :3
## 163 3 year 1st Qu.:3
## 164 3 year Median :3
## 165 3 year Mean :3
## 166 3 year 3rd Qu.:3
## 167 3 year Max. :3
## 168 3 year <NA>
Deviance.
data.frame(year=factor(c("1-2","2-3")),
corrs=c(cor.test(d$deviance[d$year==1],
d$deviance[d$year==2])$estimate,
cor.test(d$deviance[d$year==2],
d$deviance[d$year==3])$estimate))
## year corrs
## 1 1-2 0.2776
## 2 2-3 0.5416
Linear \(r^2\).
data.frame(year=factor(c("1-2","2-3")),
corrs=c(cor.test(d$linr2[d$year==1],d$linr2[d$year==2])$estimate,
cor.test(d$linr2[d$year==2],d$linr2[d$year==3])$estimate))
## year corrs
## 1 1-2 0.1907
## 2 2-3 0.3790
Ordinality.
data.frame(year=factor(c("1-2","2-3")),
corrs=c(cor.test(d$ordinality[d$year==1],
d$ordinality[d$year==2])$estimate,
cor.test(d$ordinality[d$year==2],
d$ordinality[d$year==3])$estimate))
## year corrs
## 1 1-2 0.2131
## 2 2-3 0.3334
ANS.
data.frame(year=factor(c("0-1","1-2","2-3")),
corrs=c(cor.test(d$ans[d$year==0],
d$ans[d$year==1])$estimate,
cor.test(d$ans[d$year==1],
d$ans[d$year==2])$estimate,
cor.test(d$ans[d$year==2],
d$ans[d$year==3])$estimate))
## year corrs
## 1 0-1 0.2318
## 2 1-2 0.1908
## 3 2-3 0.4444
We see that the correlations are pretty decent year-to-year for our three DVs.
OK, now we test for effect of training for ANS. We can use a model b/c we have all 4 years:
w.interaction <- lmer(ans ~ year * condition + (subnum|year), data=d)
summary(w.interaction)
## Linear mixed model fit by REML ['lmerMod']
## Formula: ans ~ year * condition + (subnum | year)
## Data: d
##
## REML criterion at convergence: -1205
##
## Scaled residuals:
## Min 1Q Median 3Q Max
## -2.730 -0.490 -0.162 0.302 6.036
##
## Random effects:
## Groups Name Variance Std.Dev. Corr
## year (Intercept) 4.51e-03 6.72e-02
## subnum 1.37e-12 1.17e-06 -1.00
## Residual 1.02e-02 1.01e-01
## Number of obs: 713, groups: year, 4
##
## Fixed effects:
## Estimate Std. Error t value
## (Intercept) 0.32442 0.05684 5.71
## year -0.07445 0.03037 -2.45
## conditionabacus -0.02183 0.01306 -1.67
## year:conditionabacus 0.00623 0.00686 0.91
##
## Correlation of Fixed Effects:
## (Intr) year cndtnb
## year -0.802
## conditinbcs -0.113 0.091
## yr:cndtnbcs 0.092 -0.110 -0.814
wo.interaction<-lmer(ans ~ year + condition + (subnum|year), data=d)
anova(w.interaction, wo.interaction)
## refitting model(s) with ML (instead of REML)
## Data: d
## Models:
## wo.interaction: ans ~ year + condition + (subnum | year)
## w.interaction: ans ~ year * condition + (subnum | year)
## Df AIC BIC logLik deviance Chisq Chi Df Pr(>Chisq)
## wo.interaction 7 -1217 -1185 616 -1231
## w.interaction 8 -1216 -1179 616 -1232 0.82 1 0.36
Now, we do t-tests by task to find differences between control and abacus groups. We will correct for multiple comparisons post-hoc, correcting for the # of comparisons per DV.
tasks <- c("deviance","linr2","ans", "ordinality")
d %>%
filter(year > 0) %>%
gather(measure, value, deviance, linr2, ans, ordinality) %>%
group_by(year, measure) %>%
do(tidy(t.test(.$value[.$condition == "abacus"],
.$value[.$condition == "control"])))
## Source: local data frame [12 x 10]
## Groups: year, measure
##
## year measure estimate estimate1 estimate2 statistic p.value
## 1 1 deviance -0.080161 0.6653 0.7455 -1.6349 0.10386
## 2 1 linr2 0.015890 0.3748 0.3589 0.5947 0.55279
## 3 1 ans -0.002818 0.1758 0.1786 -0.1979 0.84333
## 4 1 ordinality 0.021198 0.8090 0.7878 2.1473 0.03313
## 5 2 deviance -0.117626 0.6505 0.7681 -2.2388 0.02638
## 6 2 linr2 0.010303 0.3573 0.3470 0.3781 0.70582
## 7 2 ans -0.002970 0.1447 0.1476 -0.3397 0.73447
## 8 2 ordinality 0.014167 0.8011 0.7870 1.3927 0.16540
## 9 3 deviance -0.072769 0.6597 0.7324 -1.4797 0.14068
## 10 3 linr2 0.067294 0.3918 0.3245 2.3209 0.02146
## 11 3 ans -0.011579 0.1323 0.1439 -1.1261 0.26159
## 12 3 ordinality 0.011324 0.8043 0.7929 1.1192 0.26454
## Variables not shown: parameter (dbl), conf.low (dbl), conf.high (dbl)
ANS - Without intervention split
qplot(ans, standardized, facets=~year,
data=d) +
geom_smooth(method="lm") +
ylab("Standardized Test Composite") +
xlab("Weber Fraction")
And with:
qplot(ans, standardized, facets=~year,
col=condition,
data=d) +
geom_smooth(method="lm") +
ylab("Standardized Test Composite") +
xlab("Weber Fraction")
Now do the same for deviance:
qplot(deviance, standardized, facets=~year,
data=d) +
geom_smooth(method="lm") +
ylab("Standardized Test Composite") +
xlab("PAE")
qplot(deviance, standardized, facets=~year,
col=condition,
data=d) +
geom_smooth(method="lm") +
ylab("Standardized Test Composite") +
xlab("PAE")
## geom_smooth: Only one unique x value each group.Maybe you want aes(group = 1)?
and for linear \(r^2\):
qplot(linr2, standardized, facets=~year,
data=d) +
geom_smooth(method="lm") +
ylab("Standardized Test Composite") +
xlab("Linear r^2")
qplot(linr2, standardized, facets=~year,
col=condition,
data=d) +
geom_smooth(method="lm") +
ylab("Standardized Test Composite") +
xlab("Linear r^2")
## geom_smooth: Only one unique x value each group.Maybe you want aes(group = 1)?
and for ordinality:
qplot(ordinality, standardized, facets=~year,
data=d) +
geom_smooth(method="lm") +
ylab("Standardized Test Composite") +
xlab("Ordinality")
qplot(ordinality, standardized, facets=~year,
col=condition,
data=d) +
geom_smooth(method="lm") +
ylab("Standardized Test Composite") +
xlab("Ordinality")
## geom_smooth: Only one unique x value each group.Maybe you want aes(group = 1)?
Standardize predictors.
# this is insane, but PCA will choke if the scaled variables have included in them the scaling attributes (and this is what R does by default).
sscale <- function (x) {as.numeric(scale(x))}
md <- d %>%
gather(measure, value, deviance, linr2, ans, ordinality) %>%
group_by(year, measure) %>%
mutate(standardized.scale = sscale(standardized),
value.scale = sscale(value),
wiat.scale = sscale(wiat),
woodcock.scale = sscale(woodcock),
math.scale = sscale(math),
placeval.scale = sscale(placeval),
arith.scale = sscale(arith),
mental.rot.scale = sscale(mental.rot),
verbalwm.scale = sscale(verbalwm),
spatialwm.scale = sscale(spatialwm),
ravens.scale = sscale(ravens))
Now fit models. This uses the broom
package - really very nifty.
std.baseline.models <- md %>%
group_by(year, measure) %>%
filter(!(year == 0 & measure != "ans")) %>%
do(tidy(lm(standardized.scale ~ value.scale, data = .))) %>%
filter(term != "(Intercept)") %>%
data.frame
std.baseline.models
## year measure term estimate std.error statistic p.value
## 1 0 ans value.scale -0.2861137 0.07585 -3.77188 0.0002277
## 2 1 deviance value.scale -0.0736166 0.07430 -0.99082 0.3230899
## 3 1 linr2 value.scale 0.1031407 0.07411 1.39181 0.1656798
## 4 1 ans value.scale -0.1210668 0.07316 -1.65483 0.0997119
## 5 1 ordinality value.scale 0.0008627 0.07450 0.01158 0.9907734
## 6 2 deviance value.scale -0.1447405 0.07301 -1.98243 0.0489197
## 7 2 linr2 value.scale 0.1910128 0.07338 2.60310 0.0099918
## 8 2 ans value.scale -0.1049880 0.07383 -1.42210 0.1567078
## 9 2 ordinality value.scale 0.2081563 0.07258 2.86797 0.0046137
## 10 3 deviance value.scale -0.2283394 0.07188 -3.17688 0.0017461
## 11 3 linr2 value.scale 0.2773778 0.07093 3.91050 0.0001294
## 12 3 ans value.scale -0.2259645 0.07165 -3.15365 0.0018849
## 13 3 ordinality value.scale 0.2275793 0.07189 3.16573 0.0018108
And here are the full models.
std.models <- md %>%
group_by(year, measure) %>%
filter(!(year == 0 & measure != "ans")) %>%
do(tidy(lm(standardized.scale ~ value.scale +
mental.rot.scale +
verbalwm.scale +
spatialwm.scale +
ravens.scale +
age + condition, data = .)))
And make pretty plot.
std.models$term <- factor(std.models$term,
levels = c("(Intercept)",
"value.scale", "mental.rot.scale",
"spatialwm.scale", "verbalwm.scale",
"ravens.scale","age", "conditionabacus"),
labels = c("Intercept",
"Predictor", "Mental Rotation",
"Spatial WM", "Verbal WM",
"Raven's", "Age", "Intervention"))
std.models$measure <- factor(std.models$measure,
levels = c("ans","deviance","linr2","ordinality"),
labels = c("ANS","Deviance","Linear r^2",
"Ordinality"))
qplot(term, estimate,
fill = term,
ymin = estimate - std.error, ymax = estimate + std.error,
geom = c("bar", "linerange"), stat = "identity",
facets = measure ~ year,
data=filter(std.models, term != "Intercept")) +
geom_hline(yintercept=0, lty=2) +
xlab("Predictor") +
ylab("Standardized Beta Weight") +
scale_fill_discrete(name="Predictor") +
theme(axis.text.x = element_text(angle = 90, vjust=.5, hjust = 1))
Add PCA to the dataset. Note that PC1 is flipped just to make life easier.
# need this to get around standard evals
pc1 <- function(x,y,z,m,n) {
sscale(as.numeric(prcomp(~x + y + z + m + n)$x[,1]))
}
# need to filter to complete cases
pmd <- md %>%
filter(complete.cases(wiat.scale, woodcock.scale, math.scale,
placeval.scale, arith.scale)) %>%
mutate(pc1 = -pc1(wiat.scale, woodcock.scale, math.scale,
placeval.scale, arith.scale))
PCA Models and plot.
pca.baseline.models <- pmd %>%
group_by(year, measure) %>%
filter(!(year == 0 & measure != "ans")) %>%
do(tidy(lm(pc1 ~ value.scale, data = .))) %>%
filter(term != "(Intercept)") %>%
data.frame
pca.baseline.models
## year measure term estimate std.error statistic p.value
## 1 0 ans value.scale 0.2961 0.07929 3.734 2.795e-04
## 2 1 deviance value.scale -0.1677 0.07347 -2.282 2.365e-02
## 3 1 linr2 value.scale 0.1581 0.07355 2.150 3.289e-02
## 4 1 ans value.scale -0.1294 0.07288 -1.776 7.747e-02
## 5 1 ordinality value.scale 0.1055 0.07431 1.419 1.575e-01
## 6 2 deviance value.scale -0.1873 0.07312 -2.562 1.123e-02
## 7 2 linr2 value.scale 0.2498 0.07303 3.420 7.745e-04
## 8 2 ans value.scale -0.1475 0.07411 -1.990 4.811e-02
## 9 2 ordinality value.scale 0.1914 0.07492 2.555 1.145e-02
## 10 3 deviance value.scale -0.2477 0.07146 -3.465 6.604e-04
## 11 3 linr2 value.scale 0.3379 0.06972 4.846 2.685e-06
## 12 3 ans value.scale -0.2617 0.07125 -3.673 3.151e-04
## 13 3 ordinality value.scale 0.2736 0.07166 3.818 1.844e-04
pca.models <- pmd %>%
group_by(year, measure) %>%
filter(!(year == 0 & measure != "ans")) %>%
do(tidy(lm(pc1 ~ value.scale +
mental.rot.scale +
verbalwm.scale +
spatialwm.scale +
ravens.scale +
age + condition, data = .)))
pca.models$term <- factor(pca.models$term,
levels = c("(Intercept)",
"value.scale", "mental.rot.scale",
"spatialwm.scale", "verbalwm.scale",
"ravens.scale","age", "conditionabacus"),
labels = c("Intercept",
"Predictor", "Mental Rotation",
"Spatial WM", "Verbal WM",
"Raven's", "Age", "Intervention"))
pca.models$measure <- factor(pca.models$measure,
levels = c("ans","deviance","linr2","ordinality"),
labels = c("ANS","Deviance","Linear r^2",
"Ordinality"))
qplot(term, estimate,
fill = term,
ymin = estimate - std.error, ymax = estimate + std.error,
geom = c("bar", "linerange"), stat = "identity",
facets = measure ~ year,
data=filter(pca.models, term != "Intercept")) +
geom_hline(yintercept=0, lty=2) +
xlab("Predictor") +
ylab("Standardized Beta Weight") +
scale_fill_discrete(name="Predictor") +
theme(axis.text.x = element_text(angle = 90, vjust=.5, hjust = 1))
Now for the guts of one of the models, just an example from year 0, ANS. This uses ANOVA for model comparison.
## run models
# note, the filtering expression gets the model out of the same data frame that we used for the figure
# std.models <- md %>%
# group_by(year, measure) %>%
# filter(!(year == 0 & measure != "ans")) %>%
# do(tidy(lm(standardized.scale ~ value.scale +
# mental.rot.scale +
# verbalwm.scale +
# spatialwm.scale +
# ravens.scale +
# age + condition, data = .)))
model1 <- lm(standardized.scale ~ value.scale + mental.rot.scale + verbalwm.scale +
spatialwm.scale + ravens.scale + age + condition,
data = filter(md, year == 0, measure == "ans",
complete.cases(value.scale)))
model2 <- lm(standardized.scale ~ mental.rot.scale + verbalwm.scale +
spatialwm.scale + ravens.scale + age + condition,
data = filter(md, year == 0, measure == "ans",
complete.cases(value.scale)))
summary(model1)
##
## Call:
## lm(formula = standardized.scale ~ value.scale + mental.rot.scale +
## verbalwm.scale + spatialwm.scale + ravens.scale + age + condition,
## data = filter(md, year == 0, measure == "ans", complete.cases(value.scale)))
##
## Residuals:
## Min 1Q Median 3Q Max
## -2.1398 -0.6321 -0.0196 0.6271 2.2959
##
## Coefficients:
## Estimate Std. Error t value Pr(>|t|)
## (Intercept) -0.78565 0.97931 -0.80 0.4238
## value.scale -0.23826 0.07870 -3.03 0.0029 **
## mental.rot.scale -0.15068 0.08280 -1.82 0.0710 .
## verbalwm.scale 0.18273 0.09210 1.98 0.0493 *
## spatialwm.scale 0.22982 0.09479 2.42 0.0166 *
## ravens.scale -0.00326 0.08234 -0.04 0.9685
## age 0.12625 0.14629 0.86 0.3896
## conditionabacus 0.03050 0.15181 0.20 0.8410
## ---
## Signif. codes: 0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
##
## Residual standard error: 0.902 on 137 degrees of freedom
## (17 observations deleted due to missingness)
## Multiple R-squared: 0.219, Adjusted R-squared: 0.179
## F-statistic: 5.48 on 7 and 137 DF, p-value: 1.44e-05
summary(model2)
##
## Call:
## lm(formula = standardized.scale ~ mental.rot.scale + verbalwm.scale +
## spatialwm.scale + ravens.scale + age + condition, data = filter(md,
## year == 0, measure == "ans", complete.cases(value.scale)))
##
## Residuals:
## Min 1Q Median 3Q Max
## -2.4011 -0.6116 -0.0184 0.6074 2.3312
##
## Coefficients:
## Estimate Std. Error t value Pr(>|t|)
## (Intercept) -1.17804 0.99900 -1.18 0.240
## mental.rot.scale -0.14044 0.08514 -1.65 0.101
## verbalwm.scale 0.15691 0.09438 1.66 0.099 .
## spatialwm.scale 0.28833 0.09550 3.02 0.003 **
## ravens.scale 0.00965 0.08462 0.11 0.909
## age 0.18250 0.14934 1.22 0.224
## conditionabacus 0.06904 0.15569 0.44 0.658
## ---
## Signif. codes: 0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
##
## Residual standard error: 0.929 on 138 degrees of freedom
## (17 observations deleted due to missingness)
## Multiple R-squared: 0.166, Adjusted R-squared: 0.13
## F-statistic: 4.59 on 6 and 138 DF, p-value: 0.000278
anova(model1, model2)
## Analysis of Variance Table
##
## Model 1: standardized.scale ~ value.scale + mental.rot.scale + verbalwm.scale +
## spatialwm.scale + ravens.scale + age + condition
## Model 2: standardized.scale ~ mental.rot.scale + verbalwm.scale + spatialwm.scale +
## ravens.scale + age + condition
## Res.Df RSS Df Sum of Sq F Pr(>F)
## 1 137 112
## 2 138 119 -1 -7.46 9.17 0.0029 **
## ---
## Signif. codes: 0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
# also note that p value for the anova is the same as the coefficient p value in the models data frame
filter(std.models, year == 0, measure == "ANS")
## Source: local data frame [8 x 7]
## Groups: year, measure
##
## year measure term estimate std.error statistic p.value
## 1 0 ANS Intercept -0.785654 0.97931 -0.80225 0.423796
## 2 0 ANS Predictor -0.238260 0.07870 -3.02759 0.002946
## 3 0 ANS Mental Rotation -0.150685 0.08280 -1.81992 0.070954
## 4 0 ANS Verbal WM 0.182732 0.09210 1.98399 0.049256
## 5 0 ANS Spatial WM 0.229820 0.09479 2.42453 0.016632
## 6 0 ANS Raven's -0.003256 0.08234 -0.03954 0.968517
## 7 0 ANS Age 0.126252 0.14629 0.86301 0.389641
## 8 0 ANS Intervention 0.030503 0.15181 0.20093 0.841050
Year 0 Analyses by JS, to check betas
#ensure year 0, ensure complete cases
y0 <- subset(d, year==0)
yc0 <- y0[complete.cases(y0[, c("ans","mental.rot","verbalwm","spatialwm","ravens")]),]
#scale all variables
yc0$standardized.scale <-scale(yc0$standardized)
yc0$ans.scale <-scale(yc0$ans)
yc0$mental.rot.scale<-scale(yc0$mental.rot)
yc0$age.scale<-scale(yc0$age)
yc0$verbalwm.scale<-scale(yc0$verbalwm)
yc0$spatialwm.scale<-scale(yc0$spatialwm)
yc0$ravens.scale<-scale(yc0$ravens)
#ANS, Year 0
withansyear0 <- lm(standardized.scale ~ ans.scale + mental.rot.scale + verbalwm.scale +
spatialwm.scale + ravens.scale + age + condition, data = yc0)
withoutansyear0 <- lm(standardized.scale ~ mental.rot.scale + verbalwm.scale +
spatialwm.scale + ravens.scale + age + condition, data = yc0)
summary(withansyear0)
##
## Call:
## lm(formula = standardized.scale ~ ans.scale + mental.rot.scale +
## verbalwm.scale + spatialwm.scale + ravens.scale + age + condition,
## data = yc0)
##
## Residuals:
## Min 1Q Median 3Q Max
## -2.1551 -0.6367 -0.0197 0.6316 2.3123
##
## Coefficients:
## Estimate Std. Error t value Pr(>|t|)
## (Intercept) -0.85863 0.98759 -0.87 0.3861
## ans.scale -0.23970 0.07917 -3.03 0.0029 **
## mental.rot.scale -0.14665 0.08058 -1.82 0.0710 .
## verbalwm.scale 0.17890 0.09017 1.98 0.0493 *
## spatialwm.scale 0.23110 0.09532 2.42 0.0166 *
## ravens.scale -0.00326 0.08248 -0.04 0.9685
## age 0.12715 0.14734 0.86 0.3896
## conditionabacus 0.03072 0.15289 0.20 0.8410
## ---
## Signif. codes: 0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
##
## Residual standard error: 0.909 on 137 degrees of freedom
## (2 observations deleted due to missingness)
## Multiple R-squared: 0.219, Adjusted R-squared: 0.179
## F-statistic: 5.48 on 7 and 137 DF, p-value: 1.44e-05
summary(withoutansyear0)
##
## Call:
## lm(formula = standardized.scale ~ mental.rot.scale + verbalwm.scale +
## spatialwm.scale + ravens.scale + age + condition, data = yc0)
##
## Residuals:
## Min 1Q Median 3Q Max
## -2.4182 -0.6159 -0.0185 0.6117 2.3479
##
## Coefficients:
## Estimate Std. Error t value Pr(>|t|)
## (Intercept) -1.25784 1.00729 -1.25 0.214
## mental.rot.scale -0.13667 0.08286 -1.65 0.101
## verbalwm.scale 0.15362 0.09240 1.66 0.099 .
## spatialwm.scale 0.28993 0.09604 3.02 0.003 **
## ravens.scale 0.00967 0.08477 0.11 0.909
## age 0.18380 0.15041 1.22 0.224
## conditionabacus 0.06953 0.15680 0.44 0.658
## ---
## Signif. codes: 0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
##
## Residual standard error: 0.935 on 138 degrees of freedom
## (2 observations deleted due to missingness)
## Multiple R-squared: 0.166, Adjusted R-squared: 0.13
## F-statistic: 4.59 on 6 and 138 DF, p-value: 0.000278
anova(withansyear0, withoutansyear0)
## Analysis of Variance Table
##
## Model 1: standardized.scale ~ ans.scale + mental.rot.scale + verbalwm.scale +
## spatialwm.scale + ravens.scale + age + condition
## Model 2: standardized.scale ~ mental.rot.scale + verbalwm.scale + spatialwm.scale +
## ravens.scale + age + condition
## Res.Df RSS Df Sum of Sq F Pr(>F)
## 1 137 113
## 2 138 121 -1 -7.57 9.17 0.0029 **
## ---
## Signif. codes: 0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1