Load packages:

library(tidyverse)
library(brms)
library(sjstats)
library(bayesplot)

Load data:

data <- read_csv("data/tidied_data.csv")
Missing column names filled in: 'X1' [1]Parsed with column specification:
cols(
  .default = col_double(),
  participant = col_character(),
  vowel = col_character(),
  word = col_character(),
  task = col_character(),
  fol.phonOLOG.seg = col_character(),
  fol.PHONETIC.seg = col_character(),
  age = col_character(),
  sex = col_character(),
  rol.var = col_character(),
  face.l = col_character(),
  price.l = col_character(),
  fol_seg = col_character()
)
See spec(...) for full column specifications.
str(data)
Classes ‘spec_tbl_df’, ‘tbl_df’, ‘tbl’ and 'data.frame':    11641 obs. of  63 variables:
 $ X1              : num  1 2 3 4 5 6 7 8 9 10 ...
 $ participant     : chr  "Ali" "Ali" "Ali" "Ali" ...
 $ vowel           : chr  "face" "face" "face" "face" ...
 $ word            : chr  "AGE" "AGES" "AGES" "AWAY" ...
 $ sound_start     : num  1771 1853 288 1998 116 ...
 $ sound_end       : num  1771 1853 288 1998 116 ...
 $ task            : chr  "soc.int" "soc.int" "soc.int" "soc.int" ...
 $ F1_20           : num  563 428 410 451 539 ...
 $ F1_35           : num  472 449 382 405 510 ...
 $ F1_50           : num  454 409 392 402 439 ...
 $ F1_65           : num  471 416 384 457 481 ...
 $ F1_80           : num  439 403 383 422 456 ...
 $ F2_20           : num  2147 2104 2140 1644 1882 ...
 $ F2_35           : num  2083 2178 2188 1678 1932 ...
 $ F2_50           : num  2043 2163 2216 1654 1997 ...
 $ F2_65           : num  2006 2242 2202 1616 1994 ...
 $ F2_80           : num  1990 2220 2255 1589 2023 ...
 $ fol.phonOLOG.seg: chr  "dÊ’" "dÊ’" "dÊ’" "w" ...
 $ fol.PHONETIC.seg: chr  "dÊ’" "dÊ’" "dÊ’" "w" ...
 $ lex.stress      : num  1 1 1 1 1 1 1 1 1 1 ...
 $ age             : chr  "adolescent" "adolescent" "adolescent" "adolescent" ...
 $ sex             : chr  "M" "M" "M" "M" ...
 $ meanF1          : num  480 421 390 427 485 ...
 $ meanF2          : num  2054 2181 2200 1636 1966 ...
 $ meanFleeceF1    : num  375 375 375 375 375 ...
 $ meanFleeceF2    : num  2128 2128 2128 2128 2128 ...
 $ meanTRAPF1      : num  706 706 706 706 706 ...
 $ u_F1            : num  375 375 375 375 375 ...
 $ u_F2            : num  375 375 375 375 375 ...
 $ trapF2          : num  1252 1252 1252 1252 1252 ...
 $ S_F1            : num  485 485 485 485 485 ...
 $ S_F2            : num  1252 1252 1252 1252 1252 ...
 $ normF1_20       : num  1.16 0.881 0.845 0.928 1.111 ...
 $ normF1_35       : num  0.971 0.925 0.787 0.834 1.05 ...
 $ normF1_50       : num  0.935 0.843 0.808 0.828 0.903 ...
 $ normF1_65       : num  0.971 0.856 0.79 0.942 0.991 ...
 $ normF1_80       : num  0.904 0.83 0.789 0.869 0.938 ...
 $ normF2_20       : num  1.72 1.68 1.71 1.31 1.5 ...
 $ normF2_35       : num  1.66 1.74 1.75 1.34 1.54 ...
 $ normF2_50       : num  1.63 1.73 1.77 1.32 1.6 ...
 $ normF2_65       : num  1.6 1.79 1.76 1.29 1.59 ...
 $ normF2_80       : num  1.59 1.77 1.8 1.27 1.62 ...
 $ duration        : num  0.1935 0.1485 0.1199 0.0772 0.1354 ...
 $ changeF1        : num  -124.5 -24.8 -27.3 -28.8 -83.9 ...
 $ normChangeF1    : num  -0.2564 -0.051 -0.0562 -0.0593 -0.1728 ...
 $ changeF2        : num  -156.3 116.4 115.6 -54.3 140.3 ...
 $ normChangeF2    : num  -0.1249 0.093 0.0924 -0.0434 0.1121 ...
 $ VL              : num  199.8 119 118.8 61.4 163.4 ...
 $ normVL          : num  0.2852 0.1061 0.1081 0.0735 0.206 ...
 $ normVSL1        : num  0.1953 0.0736 0.0691 0.098 0.0734 ...
 $ normVSL2        : num  0.0483 0.0828 0.0312 0.0206 0.155 ...
 $ normVSL3        : num  0.0461 0.0646 0.0213 0.1182 0.0877 ...
 $ normVSL4        : num  0.0681 0.0313 0.0428 0.0761 0.0574 ...
 $ VSL1            : num  111.5 76.9 55.4 57.3 58.2 ...
 $ VSL2            : num  43.7 42.4 30.6 24.8 95.8 ...
 $ VSL3            : num  40.7 79.5 16.9 67.2 42.6 ...
 $ VSL4            : num  36.1 25.4 53.5 44.2 38.3 ...
 $ TrajLength      : num  232 224 156 193 235 ...
 $ norm_TL         : num  0.358 0.252 0.164 0.313 0.373 ...
 $ rol.var         : chr  "n.a." "n.a." "n.a." "n.a." ...
 $ face.l          : chr  "fine" "fine" "fine" "fine" ...
 $ price.l         : chr  "n.a." "n.a." "n.a." "n.a." ...
 $ fol_seg         : chr  "fol_voiced_clus" "fol_voiced_clus" "fol_voiced_clus" "fol_approximant" ...
 - attr(*, "spec")=
  .. cols(
  ..   X1 = col_double(),
  ..   participant = col_character(),
  ..   vowel = col_character(),
  ..   word = col_character(),
  ..   sound_start = col_double(),
  ..   sound_end = col_double(),
  ..   task = col_character(),
  ..   F1_20 = col_double(),
  ..   F1_35 = col_double(),
  ..   F1_50 = col_double(),
  ..   F1_65 = col_double(),
  ..   F1_80 = col_double(),
  ..   F2_20 = col_double(),
  ..   F2_35 = col_double(),
  ..   F2_50 = col_double(),
  ..   F2_65 = col_double(),
  ..   F2_80 = col_double(),
  ..   fol.phonOLOG.seg = col_character(),
  ..   fol.PHONETIC.seg = col_character(),
  ..   lex.stress = col_double(),
  ..   age = col_character(),
  ..   sex = col_character(),
  ..   meanF1 = col_double(),
  ..   meanF2 = col_double(),
  ..   meanFleeceF1 = col_double(),
  ..   meanFleeceF2 = col_double(),
  ..   meanTRAPF1 = col_double(),
  ..   u_F1 = col_double(),
  ..   u_F2 = col_double(),
  ..   trapF2 = col_double(),
  ..   S_F1 = col_double(),
  ..   S_F2 = col_double(),
  ..   normF1_20 = col_double(),
  ..   normF1_35 = col_double(),
  ..   normF1_50 = col_double(),
  ..   normF1_65 = col_double(),
  ..   normF1_80 = col_double(),
  ..   normF2_20 = col_double(),
  ..   normF2_35 = col_double(),
  ..   normF2_50 = col_double(),
  ..   normF2_65 = col_double(),
  ..   normF2_80 = col_double(),
  ..   duration = col_double(),
  ..   changeF1 = col_double(),
  ..   normChangeF1 = col_double(),
  ..   changeF2 = col_double(),
  ..   normChangeF2 = col_double(),
  ..   VL = col_double(),
  ..   normVL = col_double(),
  ..   normVSL1 = col_double(),
  ..   normVSL2 = col_double(),
  ..   normVSL3 = col_double(),
  ..   normVSL4 = col_double(),
  ..   VSL1 = col_double(),
  ..   VSL2 = col_double(),
  ..   VSL3 = col_double(),
  ..   VSL4 = col_double(),
  ..   TrajLength = col_double(),
  ..   norm_TL = col_double(),
  ..   rol.var = col_character(),
  ..   face.l = col_character(),
  ..   price.l = col_character(),
  ..   fol_seg = col_character()
  .. )
data <- data %>% mutate(
  participant = factor(participant),
  vowel = factor(vowel),
  word = factor(word),
  task = factor(task),
  fol.phonOLOG.seg = factor(fol.phonOLOG.seg),
  fol.PHONETIC.seg = factor(fol.PHONETIC.seg),
  age = factor(age),
  sex = factor(sex),
  rol.var = factor(rol.var),
  face.l = factor(face.l),
  price.l = factor(price.l),
  fol_seg = factor(fol_seg)
)

Subset the key vowels:

face_dat <- data %>% filter(vowel == "face" | vowel == "fleece")

1 EDA

ggplot(face_dat, aes(x = duration, fill = age)) + geom_histogram(bins = 100)

Filter so that duration is not more than 0.75.

face2 <- face_dat %>% filter(duration < 0.75)

Try plotting again

ggplot(face2, aes(x = duration, fill = age)) + geom_histogram(bins = 100)

Still some outliers, but it’s better.

2 Data transformations

Log-transform duration

face3 <- mutate(face2,
               LogDur = log10(duration))

… and standardize it

face3 <- mutate(face3,
               Log_dur_z = scale(LogDur))

Recode sex var so that M is the default

face3$sex <- relevel(face3$sex, ref = "M")

Recode following segment so that ‘pause’ is the reference level:

face3$fol_seg <- relevel(face3$fol_seg, ref = "fol_pause")

3 FACE F1

Select just FACE:

just_face <- face3 %>% filter(vowel=="face")

Check distribution

ggplot(just_face, aes(x = normF1_20)) + geom_histogram(bins = 100)

Check how F1 changes with duration

ggplot(just_face, aes(x = Log_dur_z, y = normF1_20, color = age, shape = sex)) + geom_point() +
  geom_smooth()

Run a model. Not specifying priors cos I don’t feel confident about doing that!

face_m1 <- brm(normF1_20 ~ Log_dur_z + age*sex + fol_seg + (1|participant) + (1|word), data = just_face)
Compiling the C++ model
recompiling to avoid crashing R session
Start sampling

SAMPLING FOR MODEL '1e2e842938811e2fe81287f02925f257' NOW (CHAIN 1).
Chain 1: 
Chain 1: Gradient evaluation took 0.001223 seconds
Chain 1: 1000 transitions using 10 leapfrog steps per transition would take 12.23 seconds.
Chain 1: Adjust your expectations accordingly!
Chain 1: 
Chain 1: 
Chain 1: Iteration:    1 / 2000 [  0%]  (Warmup)
Chain 1: Iteration:  200 / 2000 [ 10%]  (Warmup)
Chain 1: Iteration:  400 / 2000 [ 20%]  (Warmup)
Chain 1: Iteration:  600 / 2000 [ 30%]  (Warmup)
Chain 1: Iteration:  800 / 2000 [ 40%]  (Warmup)

Tidy_stan summary:

tidy_stan(face_m1, prob=0.89, type="fixed", digits=4)
longer object length is not a multiple of shorter object length

# Summary Statistics of Stan-Model

                           estimate std.error          HDI(89%)  ratio   rhat   mcse
 Intercept                   0.9964    0.0170 [ 0.9673  1.0237] 0.1770 1.0066 0.0007
 Log_dur_z                   0.0283    0.0027 [ 0.0242  0.0326] 1.2004 0.9996 0.0000
 Log_dur_z                   0.0283    0.0027 [ 0.0242  0.0326] 1.2004 0.9996 0.0000
 agechild                   -0.0380    0.0271 [-0.0841  0.0042] 0.1986 1.0011 0.0010
 sexF                        0.0169    0.0262 [-0.0252  0.0584] 0.1998 1.0052 0.0009
 fol_segfol_approximant     -0.0161    0.0118 [-0.0363  0.0017] 0.5599 1.0019 0.0003
 fol_segfol_nasal            0.0359    0.0125 [ 0.0182  0.0574] 0.3665 1.0032 0.0003
 fol_segfol_voiced_C        -0.0181    0.0110 [-0.0352  0.0004] 0.4276 1.0025 0.0003
 fol_segfol_voiced_clus     -0.0475    0.0183 [-0.0750 -0.0163] 0.5844 1.0006 0.0004
 fol_segfol_voiceless_C     -0.0026    0.0104 [-0.0192  0.0141] 0.3692 1.0023 0.0003
 fol_segfol_voiceless_clus  -0.0232    0.0162 [-0.0492  0.0014] 0.5681 1.0020 0.0003
 fol_segother                0.0112    0.0100 [-0.0060  0.0259] 0.4126 1.0024 0.0002
 agechild.sexF              -0.0294    0.0422 [-0.0908  0.0403] 0.2093 1.0023 0.0014

Plot the coefficients:

face_posterior1 <- as.matrix(face_m1)
#dimnames(face_posterior1)
#color_scheme_set("brightblue")
mcmc_intervals(face_posterior1,
           pars = c("b_Log_dur_z",
                    "b_agechild", 
                    "b_sexF",
                    "b_fol_segfol_nasal",
                    "b_fol_segfol_approximant",
                    "b_fol_segfol_voiced_C",
                    "b_fol_segfol_voiced_clus",
                    "b_fol_segfol_voiceless_C",
                    "b_fol_segfol_voiceless_clus",
                    "b_fol_segother",
                    "b_agechild:sexF"),
           prob=0.89) + ggplot2::theme_minimal() + ggplot2::geom_vline(xintercept=0)

mcmc_areas(face_posterior1,
           pars = c("b_Log_dur_z",
                    "b_agechild", 
                    "b_sexF",
                    "b_fol_segfol_nasal",
                    "b_fol_segfol_approximant",
                    "b_fol_segfol_voiced_C",
                    "b_fol_segfol_voiced_clus",
                    "b_fol_segfol_voiceless_C",
                    "b_fol_segfol_voiceless_clus",
                    "b_fol_segother",
                    "b_agechild:sexF"),
           prob=0.89) + ggplot2::theme_minimal() + ggplot2::geom_vline(xintercept=0)

This tells us that:

  • There is a small but definite positive effect of duration on F1, i.e. an increase in duration (longer vowel) predicts a higher F1 (more open onset to FACE)

  • The posteriors on the coefficients of age, sex and their interaction are very spread out… Not sure we want to infer too much there… It seems like: children are predicted to have a lower F1 than adolescents; adolescent girls are predicted to have a higher F1 than adolescent boys; and there is a negative age-sex interaction, such that the direction of the sex-difference between adolescents (girls having higher F1) is reversed for children, i.e. female children tend to have lower F1 than the boys.

  • A following nasal predicts a higher F1; following approximant, voiced consonant, or voiced consonant cluster predicts a lower F1

3.1 F1 boxplot

Define palette:

sexes <- c("#999999", "#FFFFFF")
pd <- position_dodge(0.9)

Plot:

o1 <- ggplot(data = just_face, aes(x = age, y = normF1_20, fill = sex)) + 
  geom_boxplot(width = 0.3, position = pd) +
  theme_minimal() + 
  scale_fill_manual(values = sexes) +
   theme(panel.grid.major = element_blank(),
        panel.grid.minor = element_blank(),
        legend.title = element_text(size = rel(1.5)),
        legend.text = element_text(size = rel(1.5)),
        axis.title = element_text(size = rel(2)), 
        axis.text = element_text(size = rel(2)), 
        plot.title = element_text(size = rel(2.5)), 
        plot.margin=unit(c(0.7,0.7,0.7,0.7),"cm")) +
  labs(x = "Age", y = "Normalized F1 at 20%", 
       title = "FACE: normalized F1 at onset (20%)")
o1

4 FACE Trajectory

Check for outliers:

ggplot(face3, aes(x = norm_TL, fill=vowel)) + geom_histogram(bins = 100)

All good.

Build the model:

face_m2 <- brm(norm_TL ~ Log_dur_z + vowel*age*sex + fol_seg + (1+vowel|participant) + (1|word), data = face3)
Compiling the C++ model
recompiling to avoid crashing R session
Start sampling

SAMPLING FOR MODEL 'd93beac45d3bb02918b6ac08195de742' NOW (CHAIN 1).
Chain 1: 
Chain 1: Gradient evaluation took 0.001349 seconds
Chain 1: 1000 transitions using 10 leapfrog steps per transition would take 13.49 seconds.
Chain 1: Adjust your expectations accordingly!
Chain 1: 
Chain 1: 
Chain 1: Iteration:    1 / 2000 [  0%]  (Warmup)
Chain 1: Iteration:  200 / 2000 [ 10%]  (Warmup)
Chain 1: Iteration:  400 / 2000 [ 20%]  (Warmup)
Chain 1: Iteration:  600 / 2000 [ 30%]  (Warmup)
Chain 1: Iteration:  800 / 2000 [ 40%]  (Warmup)
Chain 1: Iteration: 1000 / 2000 [ 50%]  (Warmup)
Chain 1: Iteration: 1001 / 2000 [ 50%]  (Sampling)
Chain 1: Iteration: 1200 / 2000 [ 60%]  (Sampling)
Chain 1: Iteration: 1400 / 2000 [ 70%]  (Sampling)
Chain 1: Iteration: 1600 / 2000 [ 80%]  (Sampling)
Chain 1: Iteration: 1800 / 2000 [ 90%]  (Sampling)
Chain 1: Iteration: 2000 / 2000 [100%]  (Sampling)
Chain 1: 
Chain 1:  Elapsed Time: 66.6653 seconds (Warm-up)
Chain 1:                19.1572 seconds (Sampling)
Chain 1:                85.8225 seconds (Total)
Chain 1: 

SAMPLING FOR MODEL 'd93beac45d3bb02918b6ac08195de742' NOW (CHAIN 2).
Chain 2: 
Chain 2: Gradient evaluation took 0.000701 seconds
Chain 2: 1000 transitions using 10 leapfrog steps per transition would take 7.01 seconds.
Chain 2: Adjust your expectations accordingly!
Chain 2: 
Chain 2: 
Chain 2: Iteration:    1 / 2000 [  0%]  (Warmup)
Chain 2: Iteration:  200 / 2000 [ 10%]  (Warmup)
Chain 2: Iteration:  400 / 2000 [ 20%]  (Warmup)
Chain 2: Iteration:  600 / 2000 [ 30%]  (Warmup)
Chain 2: Iteration:  800 / 2000 [ 40%]  (Warmup)
Chain 2: Iteration: 1000 / 2000 [ 50%]  (Warmup)
Chain 2: Iteration: 1001 / 2000 [ 50%]  (Sampling)
Chain 2: Iteration: 1200 / 2000 [ 60%]  (Sampling)
Chain 2: Iteration: 1400 / 2000 [ 70%]  (Sampling)
Chain 2: Iteration: 1600 / 2000 [ 80%]  (Sampling)
Chain 2: Iteration: 1800 / 2000 [ 90%]  (Sampling)
Chain 2: Iteration: 2000 / 2000 [100%]  (Sampling)
Chain 2: 
Chain 2:  Elapsed Time: 76.9509 seconds (Warm-up)
Chain 2:                19.3641 seconds (Sampling)
Chain 2:                96.3149 seconds (Total)
Chain 2: 

SAMPLING FOR MODEL 'd93beac45d3bb02918b6ac08195de742' NOW (CHAIN 3).
Chain 3: 
Chain 3: Gradient evaluation took 0.000995 seconds
Chain 3: 1000 transitions using 10 leapfrog steps per transition would take 9.95 seconds.
Chain 3: Adjust your expectations accordingly!
Chain 3: 
Chain 3: 
Chain 3: Iteration:    1 / 2000 [  0%]  (Warmup)
Chain 3: Iteration:  200 / 2000 [ 10%]  (Warmup)
Chain 3: Iteration:  400 / 2000 [ 20%]  (Warmup)
Chain 3: Iteration:  600 / 2000 [ 30%]  (Warmup)
Chain 3: Iteration:  800 / 2000 [ 40%]  (Warmup)
Chain 3: Iteration: 1000 / 2000 [ 50%]  (Warmup)
Chain 3: Iteration: 1001 / 2000 [ 50%]  (Sampling)
Chain 3: Iteration: 1200 / 2000 [ 60%]  (Sampling)
Chain 3: Iteration: 1400 / 2000 [ 70%]  (Sampling)
Chain 3: Iteration: 1600 / 2000 [ 80%]  (Sampling)
Chain 3: Iteration: 1800 / 2000 [ 90%]  (Sampling)
Chain 3: Iteration: 2000 / 2000 [100%]  (Sampling)
Chain 3: 
Chain 3:  Elapsed Time: 61.878 seconds (Warm-up)
Chain 3:                19.6549 seconds (Sampling)
Chain 3:                81.5328 seconds (Total)
Chain 3: 

SAMPLING FOR MODEL 'd93beac45d3bb02918b6ac08195de742' NOW (CHAIN 4).
Chain 4: 
Chain 4: Gradient evaluation took 0.000993 seconds
Chain 4: 1000 transitions using 10 leapfrog steps per transition would take 9.93 seconds.
Chain 4: Adjust your expectations accordingly!
Chain 4: 
Chain 4: 
Chain 4: Iteration:    1 / 2000 [  0%]  (Warmup)
Chain 4: Iteration:  200 / 2000 [ 10%]  (Warmup)
Chain 4: Iteration:  400 / 2000 [ 20%]  (Warmup)
Chain 4: Iteration:  600 / 2000 [ 30%]  (Warmup)
Chain 4: Iteration:  800 / 2000 [ 40%]  (Warmup)
Chain 4: Iteration: 1000 / 2000 [ 50%]  (Warmup)
Chain 4: Iteration: 1001 / 2000 [ 50%]  (Sampling)
Chain 4: Iteration: 1200 / 2000 [ 60%]  (Sampling)
Chain 4: Iteration: 1400 / 2000 [ 70%]  (Sampling)
Chain 4: Iteration: 1600 / 2000 [ 80%]  (Sampling)
Chain 4: Iteration: 1800 / 2000 [ 90%]  (Sampling)
Chain 4: Iteration: 2000 / 2000 [100%]  (Sampling)
Chain 4: 
Chain 4:  Elapsed Time: 70.995 seconds (Warm-up)
Chain 4:                19.1653 seconds (Sampling)
Chain 4:                90.1603 seconds (Total)
Chain 4: 

Check the summary and posteriors:

tidy_stan(face_m2, prob=0.89, type="fixed", digits=4)
longer object length is not a multiple of shorter object length

# Summary Statistics of Stan-Model

                           estimate std.error          HDI(89%)  ratio   rhat   mcse
 Intercept                   0.3089    0.0208 [ 0.2779  0.3438] 0.2548 1.0007 0.0006
 Log_dur_z                   0.0403    0.0038 [ 0.0342  0.0461] 1.4599 0.9997 0.0000
 Log_dur_z                   0.0403    0.0038 [ 0.0342  0.0461] 1.4599 0.9997 0.0000
 vowelfleece                -0.0130    0.0219 [-0.0472  0.0241] 0.6536 1.0001 0.0004
 agechild                    0.0866    0.0285 [ 0.0406  0.1300] 0.2392 1.0005 0.0009
 sexF                        0.0760    0.0256 [ 0.0338  0.1209] 0.2532 1.0003 0.0009
 fol_segfol_approximant      0.0347    0.0193 [ 0.0024  0.0630] 0.6202 1.0004 0.0004
 fol_segfol_nasal            0.0264    0.0184 [-0.0041  0.0541] 0.3974 1.0016 0.0005
 fol_segfol_voiced_C        -0.0027    0.0165 [-0.0284  0.0242] 0.4067 1.0013 0.0004
 fol_segfol_voiced_clus     -0.0052    0.0269 [-0.0466  0.0401] 0.7173 0.9999 0.0005
 fol_segfol_voiceless_C      0.0162    0.0159 [-0.0077  0.0413] 0.3740 1.0017 0.0004
 fol_segfol_voiceless_clus  -0.0007    0.0239 [-0.0376  0.0386] 0.6038 1.0015 0.0005
 fol_segother               -0.0039    0.0150 [-0.0280  0.0200] 0.4146 1.0016 0.0004
 vowelfleece.agechild       -0.0340    0.0311 [-0.0855  0.0185] 0.6289 0.9999 0.0007
 vowelfleece.sexF            0.0032    0.0340 [-0.0560  0.0548] 0.6908 0.9999 0.0007
 agechild.sexF              -0.1071    0.0429 [-0.1820 -0.0414] 0.2480 1.0014 0.0014
 vowelfleece.agechild.sexF   0.0134    0.0482 [-0.0671  0.0913] 0.6633 1.0008 0.0010
face_posterior2 <- as.matrix(face_m2)
#dimnames(face_posterior1)
#color_scheme_set("brightblue")
mcmc_intervals(face_posterior2,
           pars = c("b_Log_dur_z",
                    "b_vowelfleece",
                    "b_agechild", 
                    "b_sexF",
                    "b_fol_segfol_nasal",
                    "b_fol_segfol_approximant",
                    "b_fol_segfol_voiced_C",
                    "b_fol_segfol_voiced_clus",
                    "b_fol_segfol_voiceless_C",
                    "b_fol_segfol_voiceless_clus",
                    "b_fol_segother",
                    "b_agechild:sexF"),
           prob=0.89) + ggplot2::theme_minimal() + ggplot2::geom_vline(xintercept=0)

mcmc_areas(face_posterior2,
           pars = c("b_Log_dur_z",
                    "b_vowelfleece",
                    "b_agechild", 
                    "b_sexF",
                    "b_fol_segfol_nasal",
                    "b_fol_segfol_approximant",
                    "b_fol_segfol_voiced_C",
                    "b_fol_segfol_voiced_clus",
                    "b_fol_segfol_voiceless_C",
                    "b_fol_segfol_voiceless_clus",
                    "b_fol_segother",
                    "b_agechild:sexF"),
           prob=0.89) + ggplot2::theme_minimal() + ggplot2::geom_vline(xintercept=0)

What this tells us is that:

  • An increase in duration predicts a greater Trajectory Length

  • FLEECE tends to have a smaller Trajectory Length than FACE. However, 0 is inside the credible interval – so you can’t be 89% confident that there is any effect of vowel (FLEECE vs. FACE) on Trajectory Length at all. If you look at the next graph, you can see that 0 even falls inside the 50% credible interval. So we don’t have grounds to conclude that FACE is any more diphthongal than FLEECE.

  • Being a child as opposed to adolescent, and being adolescent female as opposed to adolescent male, both predict greater Trajectory Length – i.e. adolescent males have the most monophthongal FACE

  • A following nasal or approximant predicts a greater Trajectory Length; otherwise the various following segments have median coefficients pretty close to 0

  • There is a negative age-sex interaction. Female children are more monophthongal than male children.

mcmc_intervals(face_posterior2,
           pars = c("b_Log_dur_z",
                    "b_vowelfleece",
                    "b_agechild", 
                    "b_sexF",
                    "b_fol_segfol_nasal",
                    "b_fol_segfol_approximant",
                    "b_fol_segfol_voiced_C",
                    "b_fol_segfol_voiced_clus",
                    "b_fol_segfol_voiceless_C",
                    "b_fol_segfol_voiceless_clus",
                    "b_fol_segother",
                    "b_agechild:sexF"),
           prob=0.50) + ggplot2::theme_minimal() + ggplot2::geom_vline(xintercept=0)

Just for fun/out of interest, we can also look at the random effects - because we included ‘vowel’ in the random effects structure.

tidy_stan(face_m2, prob=0.89, type="random", digits=4)
longer object length is not a multiple of shorter object length

# Summary Statistics of Stan-Model

## Random effect (Intercept: participant)

                       estimate std.error          HDI(89%)  ratio   rhat  mcse
 participant.Ali         0.0384    0.0233 [ 0.0004  0.0755] 0.5210 0.9999 6e-04
 participant.Amanda      0.0044    0.0269 [-0.0382  0.0505] 0.3676 1.0007 6e-04
 participant.CB         -0.0788    0.0233 [-0.1160 -0.0453] 0.4637 0.9999 6e-04
 participant.Chantelle  -0.0383    0.0260 [-0.0797  0.0057] 0.3212 0.9999 7e-04
 participant.ChrisB     -0.0088    0.0261 [-0.0496  0.0329] 0.3195 1.0012 7e-04
 participant.Daniel     -0.1183    0.0204 [-0.1509 -0.0862] 0.3725 1.0013 5e-04
 participant.Denzel     -0.0796    0.0234 [-0.1151 -0.0390] 0.4944 0.9997 5e-04
 participant.F1          0.0088    0.0308 [-0.0414  0.0545] 0.6026 1.0007 7e-04
 participant.F10        -0.0178    0.0265 [-0.0612  0.0242] 0.4577 1.0011 6e-04
 participant.F3          0.0078    0.0273 [-0.0348  0.0520] 0.5108 1.0008 5e-04
 participant.F4          0.0147    0.0316 [-0.0333  0.0646] 0.6820 1.0007 6e-04
 participant.F7         -0.0065    0.0302 [-0.0516  0.0433] 0.6297 1.0009 6e-04
 participant.F8         -0.0447    0.0295 [-0.0912 -0.0015] 0.5583 1.0002 6e-04
 participant.F9          0.0376    0.0278 [-0.0077  0.0822] 0.5360 1.0005 6e-04
 participant.GW         -0.0352    0.0223 [-0.0721  0.0010] 0.4780 1.0014 5e-04
 participant.Ibrahim     0.0583    0.0223 [ 0.0242  0.0953] 0.4375 0.9998 6e-04
 participant.Jessica     0.0482    0.0281 [ 0.0033  0.0915] 0.3810 1.0002 7e-04
 participant.Joe        -0.0191    0.0193 [-0.0501  0.0138] 0.3560 1.0012 6e-04
 participant.Kai         0.1212    0.0191 [ 0.0919  0.1513] 0.3047 1.0001 4e-04
 participant.Khadir     -0.0127    0.0222 [-0.0486  0.0230] 0.4578 1.0003 6e-04
 participant.Lola       -0.0304    0.0254 [-0.0715  0.0090] 0.3097 1.0007 6e-04
 participant.Lucy       -0.0355    0.0281 [-0.0838  0.0058] 0.3937 1.0013 7e-04
 participant.M1          0.0009    0.0284 [-0.0426  0.0483] 0.3828 1.0004 7e-04
 participant.M3          0.0291    0.0304 [-0.0168  0.0799] 0.4729 1.0013 8e-04
 participant.M4         -0.0335    0.0266 [-0.0739  0.0113] 0.3224 1.0008 7e-04
 participant.M5          0.0712    0.0290 [ 0.0225  0.1131] 0.4053 1.0018 7e-04
 participant.M6         -0.0295    0.0288 [-0.0785  0.0142] 0.3799 1.0010 7e-04
 participant.M7         -0.0110    0.0288 [-0.0575  0.0369] 0.4117 1.0010 7e-04
 participant.M8         -0.0324    0.0304 [-0.0825  0.0119] 0.4635 1.0009 7e-04
 participant.Matisse    -0.0096    0.0221 [-0.0445  0.0253] 0.4333 1.0006 5e-04
 participant.Moses       0.0744    0.0253 [ 0.0326  0.1149] 0.5845 0.9994 6e-04
 participant.Omar       -0.0464    0.0235 [-0.0842 -0.0106] 0.4798 1.0001 6e-04
 participant.Sami        0.0564    0.0220 [ 0.0201  0.0931] 0.3944 1.0001 5e-04
 participant.SD         -0.0427    0.0228 [-0.0803 -0.0068] 0.4413 1.0004 6e-04
 participant.Shantel     0.0546    0.0279 [ 0.0100  0.0989] 0.3575 1.0000 6e-04
 participant.Tariq       0.0345    0.0230 [-0.0028  0.0698] 0.5074 1.0006 6e-04
 participant.Tony        0.0725    0.0201 [ 0.0446  0.1062] 0.3536 1.0006 4e-04
 participant.ZR         -0.0183    0.0233 [-0.0584  0.0209] 0.5624 1.0007 3e-04

## Random effect vowelfleece

                       estimate std.error          HDI(89%)  ratio   rhat  mcse
 participant.Ali         0.0016    0.0288 [-0.0584  0.0520] 1.4833 0.9998 5e-04
 participant.Amanda      0.0046    0.0274 [-0.0442  0.0591] 1.2276 0.9997 5e-04
 participant.CB         -0.0157    0.0329 [-0.0747  0.0390] 1.2827 0.9996 5e-04
 participant.Chantelle  -0.0138    0.0321 [-0.0780  0.0400] 1.1375 1.0005 5e-04
 participant.ChrisB      0.0000    0.0300 [-0.0579  0.0569] 1.4811 0.9994 5e-04
 participant.Daniel     -0.0085    0.0329 [-0.0678  0.0464] 1.3362 0.9996 5e-04
 participant.Denzel      0.0005    0.0306 [-0.0501  0.0624] 1.2247 0.9993 5e-04
 participant.F1          0.0045    0.0264 [-0.0424  0.0614] 1.3322 0.9995 8e-04
 participant.F10         0.0383    0.0413 [-0.0081  0.1046] 0.3849 1.0020 6e-04
 participant.F3          0.0132    0.0288 [-0.0317  0.0706] 0.9016 1.0006 4e-04
 participant.F4         -0.0019    0.0283 [-0.0588  0.0506] 1.4690 0.9999 7e-04
 participant.F7         -0.0229    0.0350 [-0.0944  0.0217] 0.6209 1.0016 7e-04
 participant.F8         -0.0213    0.0316 [-0.0828  0.0289] 0.8129 1.0000 5e-04
 participant.F9         -0.0044    0.0277 [-0.0581  0.0405] 1.2488 0.9999 5e-04
 participant.GW          0.0053    0.0299 [-0.0437  0.0681] 1.1801 0.9992 5e-04
 participant.Ibrahim     0.0086    0.0287 [-0.0456  0.0680] 1.4282 0.9993 6e-04
 participant.Jessica    -0.0119    0.0344 [-0.0850  0.0394] 0.7604 1.0006 7e-04
 participant.Joe        -0.0201    0.0326 [-0.0876  0.0271] 0.8151 1.0005 6e-04
 participant.Kai        -0.0049    0.0389 [-0.0695  0.0622] 0.8964 1.0000 5e-04
 participant.Khadir      0.0016    0.0271 [-0.0467  0.0544] 1.6600 0.9991 5e-04
 participant.Lola        0.0082    0.0300 [-0.0379  0.0719] 0.9793 0.9994 5e-04
 participant.Lucy        0.0026    0.0291 [-0.0481  0.0606] 1.2503 0.9999 5e-04
 participant.M1         -0.0064    0.0276 [-0.0613  0.0388] 1.0188 1.0002 6e-04
 participant.M3          0.0267    0.0362 [-0.0196  0.0956] 0.6136 1.0013 9e-04
 participant.M4         -0.0362    0.0366 [-0.0994  0.0113] 0.4949 1.0007 7e-04
 participant.M5          0.0334    0.0364 [-0.0157  0.0924] 0.5885 1.0011 5e-04
 participant.M6         -0.0049    0.0273 [-0.0601  0.0416] 1.3978 0.9997 5e-04
 participant.M7         -0.0013    0.0267 [-0.0536  0.0470] 1.2443 1.0000 4e-04
 participant.M8         -0.0082    0.0264 [-0.0667  0.0328] 1.3601 0.9998 4e-04
 participant.Matisse    -0.0033    0.0287 [-0.0569  0.0496] 1.5553 0.9993 5e-04
 participant.Moses      -0.0057    0.0305 [-0.0673  0.0463] 0.9785 1.0000 5e-04
 participant.Omar       -0.0037    0.0289 [-0.0604  0.0491] 1.4533 0.9999 9e-04
 participant.Sami        0.0628    0.0562 [-0.0073  0.1462] 0.3229 1.0024 7e-04
 participant.SD         -0.0117    0.0288 [-0.0725  0.0326] 1.3851 1.0003 4e-04
 participant.Shantel     0.0074    0.0301 [-0.0452  0.0690] 1.4991 0.9994 6e-04
 participant.Tariq      -0.0088    0.0303 [-0.0709  0.0391] 0.9553 1.0003 4e-04
 participant.Tony        0.0093    0.0314 [-0.0454  0.0705] 1.6872 0.9994 5e-04
 participant.ZR         -0.0028    0.0281 [-0.0585  0.0494] 1.5947 0.9992 4e-04

## Random effect (Intercept: word...APRIL..)

                estimate std.error          HDI(89%) ratio   rhat  mcse
 word...APRIL..  -0.0143    0.0411 [-0.0823  0.0466] 1.746 0.9991 5e-04

## Random effect (Intercept: word...BAYING..)

                 estimate std.error          HDI(89%)  ratio   rhat  mcse
 word...BAYING..   0.0085    0.0394 [-0.0622  0.0692] 2.0252 0.9994 5e-04

## Random effect (Intercept: word...EAT..)

              estimate std.error          HDI(89%)  ratio   rhat  mcse
 word...EAT..  -0.0113    0.0395 [-0.0766  0.0547] 2.0509 0.9996 4e-04

## Random effect (Intercept: word...EIGHTEENTH..)

                     estimate std.error          HDI(89%)  ratio   rhat  mcse
 word...EIGHTEENTH..   0.0035    0.0407 [-0.0613  0.0724] 2.2032 0.9998 5e-04

## Random effect (Intercept: word...FACE..)

               estimate std.error          HDI(89%)  ratio   rhat  mcse
 word...FACE..  -0.0033    0.0382 [-0.0681  0.0581] 1.8443 0.9996 4e-04

## Random effect (Intercept: word...SAYING..)

                 estimate std.error          HDI(89%)  ratio   rhat  mcse
 word...SAYING..   0.0117    0.0409 [-0.0559  0.0789] 2.0331 1.0001 5e-04

## Random effect (Intercept: word)

                    estimate std.error          HDI(89%)  ratio   rhat  mcse
 word.A              -0.0017    0.0227 [-0.0375  0.0336] 1.6411 0.9996 2e-04
 word.ABBREVIATIONS   0.0015    0.0406 [-0.0627  0.0691] 2.1975 0.9995 5e-04
 word.ABLE            0.0057    0.0404 [-0.0538  0.0692] 1.8829 0.9995 4e-04
 word.ACE            -0.0028    0.0402 [-0.0691  0.0630] 2.0562 0.9991 5e-04
 word.ACHE           -0.0135    0.0414 [-0.0837  0.0530] 1.8063 0.9993 4e-04
 word.ACQUAINTANCES   0.0034    0.0417 [-0.0637  0.0703] 2.3059 0.9992 5e-04
 word.AEROPLANE       0.0070    0.0380 [-0.0517  0.0745] 1.7488 0.9993 5e-04
 word.AGE             0.0139    0.0314 [-0.0348  0.0669] 1.6444 1.0002 4e-04
 word.AGES           -0.0166    0.0390 [-0.0817  0.0461] 2.0171 0.9993 4e-04
 word.AIM            -0.0064    0.0399 [-0.0743  0.0538] 1.9732 0.9997 5e-04
 word.ALWAYS          0.0246    0.0381 [-0.0375  0.0848] 2.3589 0.9996 4e-04
 word.AMAZINGLY      -0.0092    0.0426 [-0.0746  0.0590] 2.1169 0.9993 5e-04
 word.ANYWAY         -0.0002    0.0352 [-0.0574  0.0522] 1.7083 0.9996 4e-04
 word.ANYWAYS        -0.0069    0.0419 [-0.0750  0.0567] 1.8167 0.9994 5e-04
 word.APPRECIATE     -0.0101    0.0427 [-0.0811  0.0548] 1.9937 0.9995 5e-04
 word.APRIL          -0.0109    0.0356 [-0.0674  0.0439] 2.0834 0.9996 4e-04
 word.ASIAN          -0.0077    0.0399 [-0.0734  0.0567] 1.9366 0.9998 5e-04
 word.ATE             0.0025    0.0385 [-0.0634  0.0634] 2.0306 0.9996 5e-04
 word.AWAY            0.0511    0.0229 [ 0.0111  0.0896] 1.6583 0.9995 3e-04
 word.BABIES         -0.0013    0.0410 [-0.0643  0.0710] 2.1615 0.9994 6e-04
 word.BABY            0.0253    0.0199 [-0.0081  0.0561] 1.4682 0.9994 2e-04
 word.BAIT            0.0056    0.0336 [-0.0454  0.0650] 1.9085 0.9998 4e-04
 word.BAJAN          -0.0085    0.0412 [-0.0757  0.0594] 2.2012 0.9994 5e-04
 word.BARBADOS       -0.0079    0.0421 [-0.0758  0.0608] 1.9775 0.9993 5e-04
 word.BASED           0.0075    0.0366 [-0.0517  0.0664] 1.5422 1.0000 5e-04
 word.BASICALLY      -0.0118    0.0199 [-0.0437  0.0198] 1.5509 0.9998 2e-04
 word.BE             -0.0160    0.0299 [-0.0647  0.0308] 2.1367 0.9998 4e-04
 word.BEACH          -0.0105    0.0396 [-0.0769  0.0503] 1.8085 0.9993 5e-04
 word.BEAT            0.0002    0.0404 [-0.0631  0.0677] 1.6131 0.9999 4e-04
 word.BEATING         0.0051    0.0423 [-0.0602  0.0747] 2.0911 0.9993 5e-04
 word.BECAME         -0.0167    0.0415 [-0.0866  0.0480] 1.4956 0.9995 5e-04
 word.BEE             0.0325    0.0343 [-0.0202  0.0880] 1.7644 0.9999 4e-04
 word.BEEF            0.0039    0.0390 [-0.0594  0.0670] 1.8395 0.9997 4e-04
 word.BEEPING        -0.0025    0.0399 [-0.0661  0.0617] 2.1110 0.9991 4e-04
 word.BEES           -0.0131    0.0406 [-0.0780  0.0530] 2.0650 0.9994 5e-04
 word.BEHAVIOR       -0.0016    0.0412 [-0.0688  0.0633] 2.0532 0.9995 5e-04
 word.BEHAVIOUR      -0.0161    0.0373 [-0.0775  0.0476] 1.7306 1.0000 4e-04
 word.BEING          -0.0018    0.0393 [-0.0707  0.0589] 1.9326 0.9991 5e-04
 word.BELIEVE        -0.0020    0.0392 [-0.0641  0.0640] 1.9497 0.9996 4e-04
 word.BIRTHDAY       -0.0030    0.0336 [-0.0564  0.0498] 2.4495 0.9992 4e-04
 word.BLAME          -0.0058    0.0392 [-0.0650  0.0594] 2.1019 0.9996 5e-04
 word.BLAZING        -0.0025    0.0397 [-0.0683  0.0600] 1.7523 0.9991 5e-04
 word.BRAIN           0.0045    0.0421 [-0.0666  0.0705] 1.9397 1.0000 5e-04
 word.BRAVE           0.0056    0.0396 [-0.0614  0.0724] 1.9655 0.9992 5e-04
 word.BREAK           0.0353    0.0394 [-0.0278  0.0962] 1.7930 0.9993 4e-04
 word.BREAKING       -0.0027    0.0411 [-0.0661  0.0679] 1.9260 1.0000 5e-04
 word.CAKE           -0.0324    0.0220 [-0.0682  0.0003] 1.7381 0.9996 3e-04
 word.CAME           -0.0414    0.0203 [-0.0733 -0.0066] 1.4978 0.9993 2e-04
 word.CAPABLE        -0.0094    0.0414 [-0.0775  0.0550] 1.8888 0.9992 4e-04
 word.CASE           -0.0168    0.0393 [-0.0749  0.0489] 2.3420 0.9993 4e-04
 word.CDS            -0.0073    0.0410 [-0.0688  0.0622] 2.0603 0.9993 5e-04
 word.CELEBRATE      -0.0087    0.0408 [-0.0751  0.0587] 1.9846 0.9999 5e-04
 word.CELEBRATING    -0.0045    0.0426 [-0.0676  0.0683] 1.7484 0.9993 6e-04
 word.CHANGE          0.1021    0.0241 [ 0.0643  0.1427] 1.3647 0.9997 3e-04
 word.CHANGED         0.0370    0.0346 [-0.0182  0.0921] 1.6085 0.9996 4e-04
 word.CHANGES         0.0021    0.0387 [-0.0598  0.0630] 1.7439 1.0000 4e-04
 word.CHANGING       -0.0068    0.0403 [-0.0659  0.0552] 2.0004 0.9993 5e-04
 word.CHASE          -0.0191    0.0374 [-0.0815  0.0395] 1.4418 0.9992 4e-04
 word.CHASED         -0.0106    0.0378 [-0.0729  0.0523] 1.9285 0.9993 5e-04
 word.CHASING        -0.0381    0.0361 [-0.0978  0.0169] 1.6894 0.9994 5e-04
 word.CHEESE          0.0417    0.0334 [-0.0077  0.0968] 1.5925 1.0002 4e-04
 word.CHELSEA        -0.0005    0.0411 [-0.0666  0.0664] 2.1055 0.9995 5e-04
 word.CHINESE        -0.0060    0.0429 [-0.0704  0.0606] 2.1085 0.9992 5e-04
 word.CLAIM           0.0048    0.0387 [-0.0605  0.0680] 1.7660 0.9996 5e-04
 word.CLAIMED        -0.0064    0.0403 [-0.0696  0.0608] 1.9775 0.9993 5e-04
 word.CLAIMING        0.0007    0.0382 [-0.0641  0.0652] 1.9177 0.9996 4e-04
 word.COMMUNICATE    -0.0107    0.0415 [-0.0768  0.0524] 2.0738 0.9996 5e-04
 word.COMMUNICATION  -0.0011    0.0425 [-0.0693  0.0615] 1.9871 0.9997 5e-04
 word.COMPLICATED     0.0015    0.0397 [-0.0618  0.0652] 2.0163 1.0001 5e-04
 word.CONVERSATION    0.0042    0.0334 [-0.0498  0.0571] 1.6706 0.9990 4e-04
 word.CONVERSATIONS   0.0063    0.0370 [-0.0533  0.0666] 1.9328 0.9995 6e-04
 word.CRAZY           0.0798    0.0319 [ 0.0265  0.1295] 1.1194 1.0001 4e-04
 word.CREATE         -0.0139    0.0399 [-0.0788  0.0515] 1.7103 0.9996 4e-04
 word.CREATED        -0.0140    0.0388 [-0.0763  0.0509] 2.2271 0.9994 5e-04
 word.CREATING       -0.0050    0.0405 [-0.0719  0.0592] 1.6881 0.9991 5e-04
 word.CREEPY          0.0057    0.0410 [-0.0646  0.0705] 1.8865 0.9996 5e-04
 word.CUPCAKE        -0.0288    0.0413 [-0.0945  0.0348] 1.6684 1.0003 4e-04
 word.D               0.0029    0.0395 [-0.0624  0.0644] 2.0521 0.9994 5e-04
 word.DAISY          -0.0191    0.0374 [-0.0776  0.0405] 1.8870 1.0001 4e-04
 word.DANGEROUS       0.0109    0.0400 [-0.0505  0.0813] 1.9480 0.9992 5e-04
 word.DATE           -0.0014    0.0350 [-0.0536  0.0536] 1.6479 0.9993 4e-04
 word.DATES          -0.0028    0.0383 [-0.0630  0.0613] 1.9598 0.9994 5e-04
 word.DAY            -0.0473    0.0208 [-0.0796 -0.0143] 1.3507 1.0000 3e-04
 word.DAYLIGHT        0.0401    0.0383 [-0.0192  0.1086] 1.4567 0.9996 5e-04
 word.DAYS            0.0095    0.0228 [-0.0275  0.0453] 1.5151 0.9996 3e-04
 word.DICTATE        -0.0044    0.0412 [-0.0725  0.0635] 1.7248 0.9998 5e-04
 word.DICTATING       0.0065    0.0397 [-0.0548  0.0717] 2.0897 0.9997 5e-04
 word.DISABLED        0.0029    0.0396 [-0.0566  0.0668] 1.7592 1.0007 5e-04
 word.DISGRACEFUL    -0.0051    0.0408 [-0.0712  0.0585] 1.7310 0.9999 5e-04
 word.DONATE          0.0030    0.0407 [-0.0653  0.0654] 1.8394 0.9992 5e-04
 word.DONATED         0.0256    0.0393 [-0.0398  0.0862] 1.9043 0.9999 5e-04
 word.E              -0.0324    0.0366 [-0.0900  0.0268] 1.7922 0.9996 5e-04
 word.EACH           -0.0125    0.0398 [-0.0750  0.0535] 1.6072 1.0007 4e-04
 word.EASIER          0.0110    0.0420 [-0.0594  0.0770] 2.0402 0.9992 5e-04
 word.EASIEST        -0.0124    0.0403 [-0.0727  0.0566] 1.9444 0.9997 4e-04
 word.EASY           -0.0017    0.0355 [-0.0570  0.0603] 2.2648 0.9995 4e-04
 word.EAT             0.0199    0.0412 [-0.0468  0.0874] 1.8126 0.9993 5e-04
 word.EATING          0.0022    0.0390 [-0.0587  0.0681] 2.0368 0.9993 4e-04
 word.EDUCATION      -0.0118    0.0400 [-0.0787  0.0500] 2.1854 0.9992 5e-04
 word.EGYPT           0.0163    0.0403 [-0.0478  0.0775] 1.7691 0.9999 4e-04
 word.EIGHT           0.0397    0.0222 [ 0.0040  0.0747] 1.5057 0.9997 3e-04
 word.EIGHTEEN       -0.0191    0.0334 [-0.0743  0.0347] 1.8855 0.9993 4e-04
 word.EIGHTH         -0.0033    0.0413 [-0.0660  0.0634] 2.1324 0.9995 4e-04
 word.EIGHTY          0.0109    0.0363 [-0.0465  0.0759] 2.1443 0.9998 5e-04
 word.ELABORATE      -0.0072    0.0391 [-0.0726  0.0570] 1.7560 0.9993 4e-04
 word.ELABORATED      0.0037    0.0399 [-0.0617  0.0687] 2.3861 0.9993 5e-04
 word.EQUALLY        -0.0023    0.0400 [-0.0637  0.0653] 1.7377 0.9994 5e-04
 word.ESCALATES      -0.0149    0.0421 [-0.0846  0.0554] 1.7325 0.9995 5e-04
 word.ESCAPE         -0.0188    0.0401 [-0.0890  0.0408] 2.2612 0.9994 5e-04
 word.ESCAPED        -0.0230    0.0289 [-0.0695  0.0211] 1.6508 0.9995 3e-04
 word.ESTATE         -0.0101    0.0365 [-0.0639  0.0543] 1.8396 1.0000 4e-04
 word.EVEN            0.0215    0.0282 [-0.0227  0.0674] 1.7557 1.0001 3e-04
 word.EXPLAIN        -0.0136    0.0375 [-0.0773  0.0426] 2.0439 0.9993 4e-04
 word.EXPLAINING     -0.0069    0.0404 [-0.0734  0.0600] 1.7909 0.9996 5e-04
 word.FACE            0.0220    0.0293 [-0.0226  0.0676] 2.0858 0.9998 3e-04
 word.FACEBOOK       -0.0091    0.0351 [-0.0692  0.0495] 2.0377 0.9993 4e-04
 word.FACED          -0.0078    0.0425 [-0.0736  0.0602] 1.8172 0.9996 5e-04
 word.FACES          -0.0165    0.0372 [-0.0767  0.0418] 2.0789 0.9996 4e-04
 word.FAKE            0.0186    0.0354 [-0.0353  0.0779] 1.8650 0.9993 4e-04
 word.FAMOUS         -0.0305    0.0351 [-0.0906  0.0230] 2.1236 0.9994 4e-04
 word.FAVORITE       -0.0040    0.0415 [-0.0697  0.0609] 1.9640 0.9996 4e-04
 word.FAVOURITE      -0.0104    0.0414 [-0.0775  0.0539] 2.2323 0.9994 5e-04
 word.FEAST           0.0181    0.0372 [-0.0386  0.0788] 1.7809 0.9996 5e-04
 word.FEET            0.0448    0.0396 [-0.0179  0.1121] 1.4108 0.9996 5e-04
 word.FLAMES          0.0004    0.0382 [-0.0626  0.0626] 1.8630 0.9999 4e-04
 word.FLIRTATIOUS     0.0000    0.0393 [-0.0641  0.0611] 2.1635 0.9995 5e-04
 word.FRIDAY         -0.0057    0.0405 [-0.0713  0.0599] 1.8643 0.9998 5e-04
 word.GAME           -0.0237    0.0312 [-0.0733  0.0225] 1.9004 0.9993 3e-04
 word.GAMES          -0.0170    0.0401 [-0.0798  0.0478] 2.1207 0.9999 5e-04
 word.GATE            0.0488    0.0277 [ 0.0044  0.0913] 1.7805 0.9993 3e-04
 word.GAVE           -0.0220    0.0247 [-0.0630  0.0170] 1.5767 0.9997 3e-04
 word.GAY            -0.0232    0.0310 [-0.0725  0.0274] 1.6091 0.9991 4e-04
 word.GAYS           -0.0136    0.0406 [-0.0774  0.0541] 1.7671 0.9997 4e-04
 word.GEE            -0.0065    0.0403 [-0.0734  0.0552] 2.2468 0.9997 5e-04
 word.GEES           -0.0047    0.0423 [-0.0724  0.0663] 1.8200 0.9995 5e-04
 word.GENERATION      0.0072    0.0367 [-0.0526  0.0690] 2.2561 0.9995 4e-04
 word.GRADES         -0.0027    0.0406 [-0.0677  0.0643] 2.1342 0.9992 5e-04
 word.GRATEFUL        0.0008    0.0409 [-0.0612  0.0654] 2.1840 0.9992 5e-04
 word.GREAT           0.0179    0.0347 [-0.0392  0.0730] 1.7185 0.9994 4e-04
 word.GRENADA        -0.0062    0.0388 [-0.0679  0.0647] 2.1793 0.9993 6e-04
 word.GREY            0.0815    0.0336 [ 0.0269  0.1342] 1.1550 0.9995 4e-04
 word.H              -0.0065    0.0392 [-0.0666  0.0596] 2.0176 0.9994 5e-04
 [ reached 'max' / getOption("max.print") -- omitted 220 rows ]

## Random effect (Intercept: word.AIN)

            estimate std.error          HDI(89%)  ratio   rhat  mcse
 word.AIN.T  -0.0354    0.0381 [-0.0974  0.0237] 1.6333 1.0005 4e-04

## Random effect (Intercept: word.ASIAN)

              estimate std.error          HDI(89%)  ratio   rhat  mcse
 word.ASIAN.S  -0.0042    0.0394 [-0.0710  0.0581] 1.5929 1.0002 5e-04

## Random effect (Intercept: word.BABY)

             estimate std.error          HDI(89%)  ratio   rhat  mcse
 word.BABY.S  -0.0153    0.0376 [-0.0747  0.0533] 2.0026 0.9996 5e-04

## Random effect (Intercept: word.BEE)

            estimate std.error          HDI(89%)  ratio   rhat  mcse
 word.BEE.S  -0.0094    0.0402 [-0.0770  0.0515] 1.8234 0.9993 5e-04

## Random effect (Intercept: word.BIRTHDAY)

                 estimate std.error          HDI(89%)  ratio   rhat  mcse
 word.BIRTHDAY.S   0.0125    0.0396 [-0.0564  0.0713] 1.8907 0.9995 4e-04

## Random effect (Intercept: word.EGYPT)

              estimate std.error          HDI(89%)  ratio   rhat  mcse
 word.EGYPT.S   0.0041    0.0404 [-0.0596  0.0715] 2.2487 0.9997 5e-04

## Random effect (Intercept: word.GATE)

             estimate std.error          HDI(89%)  ratio   rhat  mcse
 word.GATE.S   0.0144    0.0414 [-0.0525  0.0787] 1.7065 0.9994 5e-04

## Random effect (Intercept: word.HE)

           estimate std.error          HDI(89%)  ratio   rhat  mcse
 word.HE.S    9e-04    0.0397 [-0.0640  0.0637] 1.6297 0.9994 4e-04

## Random effect (Intercept: word.INVESTIGATOR)

                     estimate std.error          HDI(89%)  ratio   rhat  mcse
 word.INVESTIGATOR.S  -0.0105    0.0395 [-0.0718  0.0578] 1.9097 0.9993 5e-04

## Random effect (Intercept: word.M)

            estimate std.error          HDI(89%)  ratio   rhat  mcse
 word.M.KAY  -0.0095    0.0406 [-0.0750  0.0542] 1.8924 0.9993 5e-04

## Random effect (Intercept: word.NAME)

             estimate std.error          HDI(89%)  ratio   rhat  mcse
 word.NAME.S  -0.0123    0.0379 [-0.0795  0.0479] 1.8993 0.9997 4e-04

## Random effect (Intercept: word.PAPER)

              estimate std.error          HDI(89%)  ratio   rhat  mcse
 word.PAPER.S  -0.0032    0.0396 [-0.0616  0.0624] 2.0418 0.9993 5e-04

## Random effect (Intercept: word.SHE)

            estimate std.error          HDI(89%)  ratio   rhat  mcse
 word.SHE.S   0.0111    0.0395 [-0.0500  0.0729] 1.9159 0.9999 5e-04

## Random effect (Intercept: word.THEY)

              estimate std.error          HDI(89%)  ratio   rhat  mcse
 word.THEY.D    0.0013    0.0378 [-0.0653  0.0609] 1.7357 0.9995 5e-04
 word.THEY.VE  -0.0088    0.0341 [-0.0634  0.0461] 1.8412 0.9994 5e-04

## Random effect (Intercept: word.TODAY)

              estimate std.error          HDI(89%)  ratio   rhat  mcse
 word.TODAY.S   0.0074    0.0417 [-0.0573  0.0765] 1.9459 0.9994 5e-04

## Random effect (Intercept: word.WE)

            estimate std.error          HDI(89%)  ratio   rhat  mcse
 word.WE.VE   0.0158    0.0416 [-0.0537  0.0805] 1.6836 0.9995 5e-04
dimnames(face_posterior2)
$iterations
NULL

$parameters
  [1] "b_Intercept"                            
  [2] "b_Log_dur_z"                            
  [3] "b_vowelfleece"                          
  [4] "b_agechild"                             
  [5] "b_sexF"                                 
  [6] "b_fol_segfol_approximant"               
  [7] "b_fol_segfol_nasal"                     
  [8] "b_fol_segfol_voiced_C"                  
  [9] "b_fol_segfol_voiced_clus"               
 [10] "b_fol_segfol_voiceless_C"               
 [11] "b_fol_segfol_voiceless_clus"            
 [12] "b_fol_segother"                         
 [13] "b_vowelfleece:agechild"                 
 [14] "b_vowelfleece:sexF"                     
 [15] "b_agechild:sexF"                        
 [16] "b_vowelfleece:agechild:sexF"            
 [17] "sd_participant__Intercept"              
 [18] "sd_participant__vowelfleece"            
 [19] "sd_word__Intercept"                     
 [20] "cor_participant__Intercept__vowelfleece"
 [21] "sigma"                                  
 [22] "r_participant[Ali,Intercept]"           
 [23] "r_participant[Amanda,Intercept]"        
 [24] "r_participant[CB,Intercept]"            
 [25] "r_participant[Chantelle,Intercept]"     
 [26] "r_participant[ChrisB,Intercept]"        
 [27] "r_participant[Daniel,Intercept]"        
 [28] "r_participant[Denzel,Intercept]"        
 [29] "r_participant[F1,Intercept]"            
 [30] "r_participant[F10,Intercept]"           
 [31] "r_participant[F3,Intercept]"            
 [32] "r_participant[F4,Intercept]"            
 [33] "r_participant[F7,Intercept]"            
 [34] "r_participant[F8,Intercept]"            
 [35] "r_participant[F9,Intercept]"            
 [36] "r_participant[GW,Intercept]"            
 [37] "r_participant[Ibrahim,Intercept]"       
 [38] "r_participant[Jessica,Intercept]"       
 [39] "r_participant[Joe,Intercept]"           
 [40] "r_participant[Kai,Intercept]"           
 [41] "r_participant[Khadir,Intercept]"        
 [42] "r_participant[Lola,Intercept]"          
 [43] "r_participant[Lucy,Intercept]"          
 [44] "r_participant[M1,Intercept]"            
 [45] "r_participant[M3,Intercept]"            
 [46] "r_participant[M4,Intercept]"            
 [47] "r_participant[M5,Intercept]"            
 [48] "r_participant[M6,Intercept]"            
 [49] "r_participant[M7,Intercept]"            
 [50] "r_participant[M8,Intercept]"            
 [51] "r_participant[Matisse,Intercept]"       
 [52] "r_participant[Moses,Intercept]"         
 [53] "r_participant[Omar,Intercept]"          
 [54] "r_participant[Sami,Intercept]"          
 [55] "r_participant[SD,Intercept]"            
 [56] "r_participant[Shantel,Intercept]"       
 [57] "r_participant[Tariq,Intercept]"         
 [58] "r_participant[Tony,Intercept]"          
 [59] "r_participant[ZR,Intercept]"            
 [60] "r_participant[Ali,vowelfleece]"         
 [61] "r_participant[Amanda,vowelfleece]"      
 [62] "r_participant[CB,vowelfleece]"          
 [63] "r_participant[Chantelle,vowelfleece]"   
 [64] "r_participant[ChrisB,vowelfleece]"      
 [65] "r_participant[Daniel,vowelfleece]"      
 [66] "r_participant[Denzel,vowelfleece]"      
 [67] "r_participant[F1,vowelfleece]"          
 [68] "r_participant[F10,vowelfleece]"         
 [69] "r_participant[F3,vowelfleece]"          
 [70] "r_participant[F4,vowelfleece]"          
 [71] "r_participant[F7,vowelfleece]"          
 [72] "r_participant[F8,vowelfleece]"          
 [73] "r_participant[F9,vowelfleece]"          
 [74] "r_participant[GW,vowelfleece]"          
 [75] "r_participant[Ibrahim,vowelfleece]"     
 [76] "r_participant[Jessica,vowelfleece]"     
 [77] "r_participant[Joe,vowelfleece]"         
 [78] "r_participant[Kai,vowelfleece]"         
 [79] "r_participant[Khadir,vowelfleece]"      
 [80] "r_participant[Lola,vowelfleece]"        
 [81] "r_participant[Lucy,vowelfleece]"        
 [82] "r_participant[M1,vowelfleece]"          
 [83] "r_participant[M3,vowelfleece]"          
 [84] "r_participant[M4,vowelfleece]"          
 [85] "r_participant[M5,vowelfleece]"          
 [86] "r_participant[M6,vowelfleece]"          
 [87] "r_participant[M7,vowelfleece]"          
 [88] "r_participant[M8,vowelfleece]"          
 [89] "r_participant[Matisse,vowelfleece]"     
 [90] "r_participant[Moses,vowelfleece]"       
 [91] "r_participant[Omar,vowelfleece]"        
 [92] "r_participant[Sami,vowelfleece]"        
 [93] "r_participant[SD,vowelfleece]"          
 [94] "r_participant[Shantel,vowelfleece]"     
 [95] "r_participant[Tariq,vowelfleece]"       
 [96] "r_participant[Tony,vowelfleece]"        
 [97] "r_participant[ZR,vowelfleece]"          
 [98] "r_word[((APRIL)),Intercept]"            
 [99] "r_word[((BAYING)),Intercept]"           
[100] "r_word[((EAT)),Intercept]"              
[101] "r_word[((EIGHTEENTH)),Intercept]"       
[102] "r_word[((FACE)),Intercept]"             
[103] "r_word[((SAYING)),Intercept]"           
[104] "r_word[A,Intercept]"                    
[105] "r_word[ABBREVIATIONS,Intercept]"        
[106] "r_word[ABLE,Intercept]"                 
[107] "r_word[ACE,Intercept]"                  
[108] "r_word[ACHE,Intercept]"                 
[109] "r_word[ACQUAINTANCES,Intercept]"        
[110] "r_word[AEROPLANE,Intercept]"            
[111] "r_word[AGE,Intercept]"                  
[112] "r_word[AGES,Intercept]"                 
[113] "r_word[AIM,Intercept]"                  
[114] "r_word[AIN'T,Intercept]"                
[115] "r_word[ALWAYS,Intercept]"               
[116] "r_word[AMAZINGLY,Intercept]"            
[117] "r_word[ANYWAY,Intercept]"               
[118] "r_word[ANYWAYS,Intercept]"              
[119] "r_word[APPRECIATE,Intercept]"           
[120] "r_word[APRIL,Intercept]"                
[121] "r_word[ASIAN,Intercept]"                
[122] "r_word[ASIAN'S,Intercept]"              
[123] "r_word[ATE,Intercept]"                  
[124] "r_word[AWAY,Intercept]"                 
[125] "r_word[BABIES,Intercept]"               
[126] "r_word[BABY,Intercept]"                 
[127] "r_word[BABY'S,Intercept]"               
[128] "r_word[BAIT,Intercept]"                 
[129] "r_word[BAJAN,Intercept]"                
[130] "r_word[BARBADOS,Intercept]"             
[131] "r_word[BASED,Intercept]"                
[132] "r_word[BASICALLY,Intercept]"            
[133] "r_word[BE,Intercept]"                   
[134] "r_word[BEACH,Intercept]"                
[135] "r_word[BEAT,Intercept]"                 
[136] "r_word[BEATING,Intercept]"              
[137] "r_word[BECAME,Intercept]"               
[138] "r_word[BEE,Intercept]"                  
[139] "r_word[BEE'S,Intercept]"                
[140] "r_word[BEEF,Intercept]"                 
[141] "r_word[BEEPING,Intercept]"              
[142] "r_word[BEES,Intercept]"                 
[143] "r_word[BEHAVIOR,Intercept]"             
[144] "r_word[BEHAVIOUR,Intercept]"            
[145] "r_word[BEING,Intercept]"                
[146] "r_word[BELIEVE,Intercept]"              
[147] "r_word[BIRTHDAY,Intercept]"             
[148] "r_word[BIRTHDAY'S,Intercept]"           
[149] "r_word[BLAME,Intercept]"                
[150] "r_word[BLAZING,Intercept]"              
[151] "r_word[BRAIN,Intercept]"                
[152] "r_word[BRAVE,Intercept]"                
[153] "r_word[BREAK,Intercept]"                
[154] "r_word[BREAKING,Intercept]"             
[155] "r_word[CAKE,Intercept]"                 
[156] "r_word[CAME,Intercept]"                 
[157] "r_word[CAPABLE,Intercept]"              
[158] "r_word[CASE,Intercept]"                 
[159] "r_word[CDS,Intercept]"                  
[160] "r_word[CELEBRATE,Intercept]"            
[161] "r_word[CELEBRATING,Intercept]"          
[162] "r_word[CHANGE,Intercept]"               
[163] "r_word[CHANGED,Intercept]"              
[164] "r_word[CHANGES,Intercept]"              
[165] "r_word[CHANGING,Intercept]"             
[166] "r_word[CHASE,Intercept]"                
[167] "r_word[CHASED,Intercept]"               
[168] "r_word[CHASING,Intercept]"              
[169] "r_word[CHEESE,Intercept]"               
[170] "r_word[CHELSEA,Intercept]"              
[171] "r_word[CHINESE,Intercept]"              
[172] "r_word[CLAIM,Intercept]"                
[173] "r_word[CLAIMED,Intercept]"              
[174] "r_word[CLAIMING,Intercept]"             
[175] "r_word[COMMUNICATE,Intercept]"          
[176] "r_word[COMMUNICATION,Intercept]"        
[177] "r_word[COMPLICATED,Intercept]"          
[178] "r_word[CONVERSATION,Intercept]"         
[179] "r_word[CONVERSATIONS,Intercept]"        
[180] "r_word[CRAZY,Intercept]"                
[181] "r_word[CREATE,Intercept]"               
[182] "r_word[CREATED,Intercept]"              
[183] "r_word[CREATING,Intercept]"             
[184] "r_word[CREEPY,Intercept]"               
[185] "r_word[CUPCAKE,Intercept]"              
[186] "r_word[D,Intercept]"                    
[187] "r_word[DAISY,Intercept]"                
[188] "r_word[DANGEROUS,Intercept]"            
[189] "r_word[DATE,Intercept]"                 
[190] "r_word[DATES,Intercept]"                
[191] "r_word[DAY,Intercept]"                  
[192] "r_word[DAYLIGHT,Intercept]"             
[193] "r_word[DAYS,Intercept]"                 
[194] "r_word[DICTATE,Intercept]"              
[195] "r_word[DICTATING,Intercept]"            
[196] "r_word[DISABLED,Intercept]"             
[197] "r_word[DISGRACEFUL,Intercept]"          
[198] "r_word[DONATE,Intercept]"               
[199] "r_word[DONATED,Intercept]"              
[200] "r_word[E,Intercept]"                    
[201] "r_word[EACH,Intercept]"                 
[202] "r_word[EASIER,Intercept]"               
[203] "r_word[EASIEST,Intercept]"              
[204] "r_word[EASY,Intercept]"                 
[205] "r_word[EAT,Intercept]"                  
[206] "r_word[EATING,Intercept]"               
[207] "r_word[EDUCATION,Intercept]"            
[208] "r_word[EGYPT,Intercept]"                
[209] "r_word[EGYPT'S,Intercept]"              
[210] "r_word[EIGHT,Intercept]"                
[211] "r_word[EIGHTEEN,Intercept]"             
[212] "r_word[EIGHTH,Intercept]"               
[213] "r_word[EIGHTY,Intercept]"               
[214] "r_word[ELABORATE,Intercept]"            
[215] "r_word[ELABORATED,Intercept]"           
[216] "r_word[EQUALLY,Intercept]"              
[217] "r_word[ESCALATES,Intercept]"            
[218] "r_word[ESCAPE,Intercept]"               
[219] "r_word[ESCAPED,Intercept]"              
[220] "r_word[ESTATE,Intercept]"               
[221] "r_word[EVEN,Intercept]"                 
[222] "r_word[EXPLAIN,Intercept]"              
[223] "r_word[EXPLAINING,Intercept]"           
[224] "r_word[FACE,Intercept]"                 
[225] "r_word[FACEBOOK,Intercept]"             
[226] "r_word[FACED,Intercept]"                
[227] "r_word[FACES,Intercept]"                
[228] "r_word[FAKE,Intercept]"                 
[229] "r_word[FAMOUS,Intercept]"               
[230] "r_word[FAVORITE,Intercept]"             
[231] "r_word[FAVOURITE,Intercept]"            
[232] "r_word[FEAST,Intercept]"                
[233] "r_word[FEET,Intercept]"                 
[234] "r_word[FLAMES,Intercept]"               
[235] "r_word[FLIRTATIOUS,Intercept]"          
[236] "r_word[FRIDAY,Intercept]"               
[237] "r_word[GAME,Intercept]"                 
[238] "r_word[GAMES,Intercept]"                
[239] "r_word[GATE,Intercept]"                 
[240] "r_word[GATE'S,Intercept]"               
[241] "r_word[GAVE,Intercept]"                 
[242] "r_word[GAY,Intercept]"                  
[243] "r_word[GAYS,Intercept]"                 
[244] "r_word[GEE,Intercept]"                  
[245] "r_word[GEES,Intercept]"                 
[246] "r_word[GENERATION,Intercept]"           
[247] "r_word[GRADES,Intercept]"               
[248] "r_word[GRATEFUL,Intercept]"             
[249] "r_word[GREAT,Intercept]"                
[250] "r_word[GRENADA,Intercept]"              
[251] "r_word[GREY,Intercept]"                 
[252] "r_word[H,Intercept]"                    
[253] "r_word[HAIRSPRAY,Intercept]"            
[254] "r_word[HATE,Intercept]"                 
[255] "r_word[HATED,Intercept]"                
[256] "r_word[HAY,Intercept]"                  
[257] "r_word[HAYS,Intercept]"                 
[258] "r_word[HE,Intercept]"                   
[259] "r_word[HE'S,Intercept]"                 
[260] "r_word[HEY,Intercept]"                  
[261] "r_word[HEYA,Intercept]"                 
[262] "r_word[HOLIDAY,Intercept]"              
[263] "r_word[HOLIDAYS,Intercept]"             
[264] "r_word[INDICATE,Intercept]"             
[265] "r_word[INSPIRATION,Intercept]"          
[266] "r_word[INSPIRATIONAL,Intercept]"        
[267] "r_word[INTEGRATED,Intercept]"           
[268] "r_word[INVADING,Intercept]"             
[269] "r_word[INVESTIGATOR'S,Intercept]"       
[270] "r_word[ISOLATION,Intercept]"            
[271] "r_word[ISRAELI,Intercept]"              
[272] "r_word[J,Intercept]"                    
[273] "r_word[JAIL,Intercept]"                 
[274] "r_word[JAKES,Intercept]"                
[275] "r_word[JAMAICAN,Intercept]"             
[276] "r_word[JAMIE,Intercept]"                
[277] "r_word[K,Intercept]"                    
[278] "r_word[KAY,Intercept]"                  
[279] "r_word[KEEP,Intercept]"                 
[280] "r_word[KEY,Intercept]"                  
[281] "r_word[LABELLED,Intercept]"             
[282] "r_word[LABELS,Intercept]"               
[283] "r_word[LADIES,Intercept]"               
[284] "r_word[LADY,Intercept]"                 
[285] "r_word[LANE,Intercept]"                 
[286] "r_word[LATE,Intercept]"                 
[287] "r_word[LATER,Intercept]"                
[288] "r_word[LAYING,Intercept]"               
[289] "r_word[LEAD,Intercept]"                 
[290] "r_word[LEAVE,Intercept]"                
[291] "r_word[LEAVES,Intercept]"               
[292] "r_word[LEAVING,Intercept]"              
[293] "r_word[LEGAL,Intercept]"                
[294] "r_word[LEGALLY,Intercept]"              
[295] "r_word[M'KAY,Intercept]"                
[296] "r_word[MADE,Intercept]"                 
[297] "r_word[MAIN,Intercept]"                 
[298] "r_word[MAINLY,Intercept]"               
[299] "r_word[MAINSTREAM,Intercept]"           
[300] "r_word[MAISONETTE,Intercept]"           
[301] "r_word[MAJOR,Intercept]"                
[302] "r_word[MAJORLY,Intercept]"              
[303] "r_word[MAKE,Intercept]"                 
[304] "r_word[MAKEOVER,Intercept]"             
[305] "r_word[MAKES,Intercept]"                
[306] "r_word[MAKING,Intercept]"               
[307] "r_word[MATES,Intercept]"                
[308] "r_word[MAY,Intercept]"                  
[309] "r_word[MAYBE,Intercept]"                
[310] "r_word[ME,Intercept]"                   
[311] "r_word[MEDIA,Intercept]"                
[312] "r_word[MEDICATION,Intercept]"           
[313] "r_word[MEET,Intercept]"                 
[314] "r_word[MISTAKE,Intercept]"              
[315] "r_word[MISTAKES,Intercept]"             
[316] "r_word[MKAY,Intercept]"                 
[317] "r_word[MONDAY,Intercept]"               
[318] "r_word[NAKED,Intercept]"                
[319] "r_word[NAME,Intercept]"                 
[320] "r_word[NAME'S,Intercept]"               
[321] "r_word[NAMES,Intercept]"                
[322] "r_word[NATURE,Intercept]"               
[323] "r_word[NAVY,Intercept]"                 
[324] "r_word[NEED,Intercept]"                 
[325] "r_word[NEIGHBORING,Intercept]"          
[326] "r_word[NEIGHBOUR,Intercept]"            
[327] "r_word[NEIGHBOURS,Intercept]"           
[328] "r_word[NEWSPAPERS,Intercept]"           
[329] "r_word[NICKNAME,Intercept]"             
[330] "r_word[NOMINATED,Intercept]"            
[331] "r_word[NOWADAYS,Intercept]"             
[332] "r_word[OCCASIONS,Intercept]"            
[333] "r_word[OKAY,Intercept]"                 
[334] "r_word[ORIGINATED,Intercept]"           
[335] "r_word[ORIGINATORS,Intercept]"          
[336] "r_word[OVERRATED,Intercept]"            
[337] "r_word[PAGE,Intercept]"                 
[338] "r_word[PAGES,Intercept]"                
[339] "r_word[PAID,Intercept]"                 
[340] "r_word[PAIN,Intercept]"                 
[341] "r_word[PAINT,Intercept]"                
[342] "r_word[PAINTED,Intercept]"              
[343] "r_word[PAINTING,Intercept]"             
[344] "r_word[PAPER,Intercept]"                
[345] "r_word[PAPER'S,Intercept]"              
[346] "r_word[PAVEMENT,Intercept]"             
[347] "r_word[PAY,Intercept]"                  
[348] "r_word[PAYING,Intercept]"               
[349] "r_word[PAYS,Intercept]"                 
[350] "r_word[PEACE,Intercept]"                
[351] "r_word[PEEKING,Intercept]"              
[352] "r_word[PEOPLE,Intercept]"               
[353] "r_word[PIECE,Intercept]"                
[354] "r_word[PIECES,Intercept]"               
[355] "r_word[PLACE,Intercept]"                
[356] "r_word[PLACES,Intercept]"               
[357] "r_word[PLATE,Intercept]"                
[358] "r_word[PLAY,Intercept]"                 
[359] "r_word[PLAYED,Intercept]"               
[360] "r_word[PLAYER,Intercept]"               
[361] "r_word[PLAYING,Intercept]"              
[362] "r_word[PLAYS,Intercept]"                
[363] "r_word[PLAYSTATION,Intercept]"          
[364] "r_word[PLAYTIME,Intercept]"             
[365] "r_word[PLEASE,Intercept]"               
[366] "r_word[POLICE,Intercept]"               
[367] "r_word[PORTRAY,Intercept]"              
[368] "r_word[PRAY,Intercept]"                 
[369] "r_word[PRAYED,Intercept]"               
[370] "r_word[PRAYING,Intercept]"              
[371] "r_word[PREVAIL,Intercept]"              
[372] "r_word[RACE,Intercept]"                 
[373] "r_word[RACIAL,Intercept]"               
[374] "r_word[RACING,Intercept]"               
[375] "r_word[RACISM,Intercept]"               
[376] "r_word[RACIST,Intercept]"               
[377] "r_word[RAIN,Intercept]"                 
[378] "r_word[RAINBOW,Intercept]"              
[379] "r_word[RAINY,Intercept]"                
[380] "r_word[RAISE,Intercept]"                
[381] "r_word[RAISED,Intercept]"               
[382] "r_word[RAISING,Intercept]"              
[383] "r_word[RAPE,Intercept]"                 
[384] "r_word[READ,Intercept]"                 
[385] "r_word[RELATED,Intercept]"              
[386] "r_word[RELATIONSHIP,Intercept]"         
[387] "r_word[RELAY,Intercept]"                
[388] "r_word[RETALIATING,Intercept]"          
[389] "r_word[ROLLERBLADES,Intercept]"         
[390] "r_word[SAFE,Intercept]"                 
[391] "r_word[SAFER,Intercept]"                
[392] "r_word[SAFETY,Intercept]"               
[393] "r_word[SAINSBURYS,Intercept]"           
[394] "r_word[SAKE,Intercept]"                 
[395] "r_word[SAME,Intercept]"                 
[396] "r_word[SAVE,Intercept]"                 
[397] "r_word[SAVED,Intercept]"                
[398] "r_word[SAY,Intercept]"                  
[399] "r_word[SAYING,Intercept]"               
[400] "r_word[SCARFACE,Intercept]"             
[401] "r_word[SEATS,Intercept]"                
[402] "r_word[SEE,Intercept]"                  
[403] "r_word[SEEING,Intercept]"               
[404] "r_word[SEES,Intercept]"                 
[405] "r_word[SEPARATED,Intercept]"            
[406] "r_word[SHAKING,Intercept]"              
[407] "r_word[SHAPE,Intercept]"                
[408] "r_word[SHE,Intercept]"                  
[409] "r_word[SHE'S,Intercept]"                
[410] "r_word[SHEEP,Intercept]"                
[411] "r_word[SHEEPS,Intercept]"               
[412] "r_word[SHEETS,Intercept]"               
[413] "r_word[SITUATION,Intercept]"            
[414] "r_word[SITUATIONS,Intercept]"           
[415] "r_word[SLEEP,Intercept]"                
[416] "r_word[SLEEPING,Intercept]"             
[417] "r_word[SNAKE,Intercept]"                
[418] "r_word[SNEEZE,Intercept]"               
[419] "r_word[SPACE,Intercept]"                
[420] "r_word[SPEAK,Intercept]"                
[421] "r_word[SPEAKING,Intercept]"             
[422] "r_word[SPEAKS,Intercept]"               
[423] "r_word[SPRAYING,Intercept]"             
[424] "r_word[STABLE,Intercept]"               
[425] "r_word[STAGE,Intercept]"                
[426] "r_word[STAGED,Intercept]"               
[427] "r_word[STAGES,Intercept]"               
[428] "r_word[STATEMENT,Intercept]"            
[429] "r_word[STATION,Intercept]"              
[430] "r_word[STAY,Intercept]"                 
[431] "r_word[STAYED,Intercept]"               
[432] "r_word[STAYING,Intercept]"              
[433] "r_word[STEAK,Intercept]"                
[434] "r_word[STRAIGHT,Intercept]"             
[435] "r_word[STRAYED,Intercept]"              
[436] "r_word[STREET,Intercept]"               
[437] "r_word[SUNDAYS,Intercept]"              
[438] "r_word[T,Intercept]"                    
[439] "r_word[TABLE,Intercept]"                
[440] "r_word[TAISER,Intercept]"               
[441] "r_word[TAISERED,Intercept]"             
[442] "r_word[TAKE,Intercept]"                 
[443] "r_word[TAKEAWAY,Intercept]"             
[444] "r_word[TAKEN,Intercept]"                
[445] "r_word[TAKES,Intercept]"                
[446] "r_word[TAKING,Intercept]"               
[447] "r_word[TEA,Intercept]"                  
[448] "r_word[TEACHER,Intercept]"              
[449] "r_word[TEACHERS,Intercept]"             
[450] "r_word[TEENAGERS,Intercept]"            
[451] "r_word[THESE,Intercept]"                
[452] "r_word[THEY,Intercept]"                 
[453] "r_word[THEY'D,Intercept]"               
[454] "r_word[THEY'VE,Intercept]"              
[455] "r_word[THREE,Intercept]"                
[456] "r_word[TODAY,Intercept]"                
[457] "r_word[TODAY'S,Intercept]"              
[458] "r_word[TRAINED,Intercept]"              
[459] "r_word[TRAINERS,Intercept]"             
[460] "r_word[TRAINING,Intercept]"             
[461] "r_word[TRAINS,Intercept]"               
[462] "r_word[TREE,Intercept]"                 
[463] "r_word[TREES,Intercept]"                
[464] "r_word[TUESDAY,Intercept]"              
[465] "r_word[V,Intercept]"                    
[466] "r_word[VIBRATING,Intercept]"            
[467] "r_word[WAIST,Intercept]"                
[468] "r_word[WAISTLINE,Intercept]"            
[469] "r_word[WAIT,Intercept]"                 
[470] "r_word[WAITED,Intercept]"               
[471] "r_word[WAITING,Intercept]"              
[472] "r_word[WAKE,Intercept]"                 
[473] "r_word[WAKES,Intercept]"                
[474] "r_word[WASTE,Intercept]"                
[475] "r_word[WASTING,Intercept]"              
[476] "r_word[WAY,Intercept]"                  
[477] "r_word[WAYS,Intercept]"                 
[478] "r_word[WE,Intercept]"                   
[479] "r_word[WE'VE,Intercept]"                
[480] "r_word[WEDNESDAY,Intercept]"            
[481] "r_word[WORKPLACE,Intercept]"            
[482] "r_word[YESTERDAY,Intercept]"            
[483] "lp__"                                   
#color_scheme_set("brightblue")
mcmc_areas(face_posterior2,
           pars = c("r_participant[Ali,vowelfleece]",
                    "r_participant[Amanda,vowelfleece]",
                    "r_participant[CB,vowelfleece]",
                    "r_participant[Chantelle,vowelfleece]",
                    "r_participant[ChrisB,vowelfleece]",
                    "r_participant[Daniel,vowelfleece]",
                    "r_participant[Denzel,vowelfleece]",
                    "r_participant[F1,vowelfleece]",
                    "r_participant[F10,vowelfleece]",
                    "r_participant[F3,vowelfleece]",
                    "r_participant[F4,vowelfleece]",
                    "r_participant[F7,vowelfleece]",
                    "r_participant[F8,vowelfleece]",
                    "r_participant[F9,vowelfleece]",
                    "r_participant[GW,vowelfleece]",
                    "r_participant[Ibrahim,vowelfleece]",
                    "r_participant[Jessica,vowelfleece]",
                    "r_participant[Joe,vowelfleece]",
                    "r_participant[Kai,vowelfleece]",
                    "r_participant[Khadir,vowelfleece]",
                    "r_participant[Lola,vowelfleece]",
                    "r_participant[Lucy,vowelfleece]",
                    "r_participant[M1,vowelfleece]",
                    "r_participant[M3,vowelfleece]",
                    "r_participant[M4,vowelfleece]",
                    "r_participant[M5,vowelfleece]",
                    "r_participant[M6,vowelfleece]",
                    "r_participant[M7,vowelfleece]",
                    "r_participant[M8,vowelfleece]",
                    "r_participant[Matisse,vowelfleece]",
                    "r_participant[Moses,vowelfleece]",
                    "r_participant[Omar,vowelfleece]",
                    "r_participant[Sami,vowelfleece]",
                    "r_participant[SD,vowelfleece]",
                    "r_participant[Shantel,vowelfleece]",
                    "r_participant[Tariq,vowelfleece]",
                    "r_participant[Tony,vowelfleece]",
                    "r_participant[ZR,vowelfleece]"),
           prob=0.89) + ggplot2::theme_minimal() + ggplot2::geom_vline(xintercept=0)

The graph above tells you how the Trajectory Lengths of FACE and FLEECE compare for individuals. For example, Ali seems to have exactly no difference between FACE and FLEECE. Child M4 has more monophthongal FLEECE than FACE, while M3 has the opposite pattern: he has more movement in FLEECE than in FACE.

The random slopes by speaker should be taken with a pinch of salt because we only took about 5 tokens of FLEECE per speaker!

4.1 FACE Trajectory Length boxplot

Define colours for vowels FACE and FLEECE:

vowel_cols <- c("#000000", "#E69F00")
f <- ggplot(face3, aes(x = sex, y = norm_TL, fill = vowel)) + facet_grid(cols = vars(age)) +
  geom_boxplot(width = 0.3, position = position_dodge(0.9)) + 
  theme_minimal() + 
  scale_fill_manual(values = vowel_cols) + 
   theme(legend.title = element_text(size = rel(2)),
        strip.text = element_text(size = rel(2)),
        legend.text = element_text(size = rel(2)),
        axis.title = element_text(size = rel(2)), 
        axis.text = element_text(size = rel(2)), 
        plot.title = element_text(size = rel(2.5))) +
  labs(x = "Age", y = "Trajectory Length", title = "FACE: Trajectory Length") +
  ylim(0, 2)
f

---
title: "UKLVC analysis: FACE"
author: "Rosie Oxbury"
date: "September 2019"
output:
  html_notebook:
    toc: true
    toc_float: true
    number_sections: true
    toc_depth: 3
---
Load packages:
```{r}
library(tidyverse)
library(brms)
library(sjstats)
library(bayesplot)
```

Load data:
```{r}
data <- read_csv("data/tidied_data.csv")

str(data)

data <- data %>% mutate(
  participant = factor(participant),
  vowel = factor(vowel),
  word = factor(word),
  task = factor(task),
  fol.phonOLOG.seg = factor(fol.phonOLOG.seg),
  fol.PHONETIC.seg = factor(fol.PHONETIC.seg),
  age = factor(age),
  sex = factor(sex),
  rol.var = factor(rol.var),
  face.l = factor(face.l),
  price.l = factor(price.l),
  fol_seg = factor(fol_seg)
)
```

Subset the key vowels:
```{r}
face_dat <- data %>% filter(vowel == "face" | vowel == "fleece")
```

#   EDA
```{r}
ggplot(face_dat, aes(x = duration, fill = age)) + geom_histogram(bins = 100)
```
Filter so that duration is not more than 0.75.
```{r}
face2 <- face_dat %>% filter(duration < 0.75)
```
Try plotting again
```{r}
ggplot(face2, aes(x = duration, fill = age)) + geom_histogram(bins = 100)
```

Still some outliers, but it's better.

# Data transformations
Log-transform duration
```{r}
face3 <- mutate(face2,
               LogDur = log10(duration))
```

... and standardize it
```{r}
face3 <- mutate(face3,
               Log_dur_z = scale(LogDur))
```

Recode sex var so that M is the default
```{r}
face3$sex <- relevel(face3$sex, ref = "M")
```

Recode following segment so that 'pause' is the reference level:
```{r}
face3$fol_seg <- relevel(face3$fol_seg, ref = "fol_pause")
```

# FACE F1
Select just FACE:
```{r}
just_face <- face3 %>% filter(vowel=="face")
```

Check distribution
```{r}
ggplot(just_face, aes(x = normF1_20)) + geom_histogram(bins = 100)
```

Check how F1 changes with duration
```{r}
ggplot(just_face, aes(x = Log_dur_z, y = normF1_20, color = age, shape = sex)) + geom_point() +
  geom_smooth()
```
Run a model. Not specifying priors cos I don't feel confident about doing that!
```{r}
face_m1 <- brm(normF1_20 ~ Log_dur_z + age*sex + fol_seg + (1|participant) + (1|word), data = just_face)
```
Tidy_stan summary:
```{r}
tidy_stan(face_m1, prob=0.89, type="fixed", digits=4)
```

Plot the coefficients:
```{r fig.width=6, fig.height=8}
face_posterior1 <- as.matrix(face_m1)

#dimnames(face_posterior1)

#color_scheme_set("brightblue")
mcmc_intervals(face_posterior1,
           pars = c("b_Log_dur_z",
                    "b_agechild", 
                    "b_sexF",
                    "b_fol_segfol_nasal",
                    "b_fol_segfol_approximant",
                    "b_fol_segfol_voiced_C",
                    "b_fol_segfol_voiced_clus",
                    "b_fol_segfol_voiceless_C",
                    "b_fol_segfol_voiceless_clus",
                    "b_fol_segother",
                    "b_agechild:sexF"),
           prob=0.89) + ggplot2::theme_minimal() + ggplot2::geom_vline(xintercept=0)

mcmc_areas(face_posterior1,
           pars = c("b_Log_dur_z",
                    "b_agechild", 
                    "b_sexF",
                    "b_fol_segfol_nasal",
                    "b_fol_segfol_approximant",
                    "b_fol_segfol_voiced_C",
                    "b_fol_segfol_voiced_clus",
                    "b_fol_segfol_voiceless_C",
                    "b_fol_segfol_voiceless_clus",
                    "b_fol_segother",
                    "b_agechild:sexF"),
           prob=0.89) + ggplot2::theme_minimal() + ggplot2::geom_vline(xintercept=0)
```

This tells us that:

- There is a small but definite positive effect of duration on F1, i.e. an increase in duration (longer vowel) predicts a higher F1 (more open onset to FACE)

- The posteriors on the coefficients of age, sex and their interaction are very spread out... Not sure we want to infer too much there... It seems like: children are predicted to have a *lower* F1 than adolescents; adolescent girls are predicted to have a *higher* F1 than adolescent boys; and there is a negative age-sex interaction, such that the direction of the sex-difference between adolescents (girls having higher F1) is reversed for children, i.e. female children tend to have lower F1 than the boys.

- A following nasal predicts a higher F1; following approximant, voiced consonant, or voiced consonant cluster predicts a lower F1

## F1 boxplot

Define palette:
```{r}
sexes <- c("#999999", "#FFFFFF")

pd <- position_dodge(0.9)
```

Plot:
```{r fig.width=8, fig.height=8}
o1 <- ggplot(data = just_face, aes(x = age, y = normF1_20, fill = sex)) + 
  geom_boxplot(width = 0.3, position = pd) +
  theme_minimal() + 
  scale_fill_manual(values = sexes) +
   theme(panel.grid.major = element_blank(),
        panel.grid.minor = element_blank(),
        legend.title = element_text(size = rel(1.5)),
        legend.text = element_text(size = rel(1.5)),
        axis.title = element_text(size = rel(2)), 
        axis.text = element_text(size = rel(2)), 
        plot.title = element_text(size = rel(2.5)), 
        plot.margin=unit(c(0.7,0.7,0.7,0.7),"cm")) +
  labs(x = "Age", y = "Normalized F1 at 20%", 
       title = "FACE: normalized F1 at onset (20%)")

o1
```


# FACE Trajectory

Check for outliers:
```{r}
ggplot(face3, aes(x = norm_TL, fill=vowel)) + geom_histogram(bins = 100)
```

All good.

Build the model:
```{r}
face_m2 <- brm(norm_TL ~ Log_dur_z + vowel*age*sex + fol_seg + (1+vowel|participant) + (1|word), data = face3)
```

Check the summary and posteriors:
```{r fig.width=6, fig.height=8}
tidy_stan(face_m2, prob=0.89, type="fixed", digits=4)

face_posterior2 <- as.matrix(face_m2)

#dimnames(face_posterior1)

#color_scheme_set("brightblue")
mcmc_intervals(face_posterior2,
           pars = c("b_Log_dur_z",
                    "b_vowelfleece",
                    "b_agechild", 
                    "b_sexF",
                    "b_fol_segfol_nasal",
                    "b_fol_segfol_approximant",
                    "b_fol_segfol_voiced_C",
                    "b_fol_segfol_voiced_clus",
                    "b_fol_segfol_voiceless_C",
                    "b_fol_segfol_voiceless_clus",
                    "b_fol_segother",
                    "b_agechild:sexF"),
           prob=0.89) + ggplot2::theme_minimal() + ggplot2::geom_vline(xintercept=0)

mcmc_areas(face_posterior2,
           pars = c("b_Log_dur_z",
                    "b_vowelfleece",
                    "b_agechild", 
                    "b_sexF",
                    "b_fol_segfol_nasal",
                    "b_fol_segfol_approximant",
                    "b_fol_segfol_voiced_C",
                    "b_fol_segfol_voiced_clus",
                    "b_fol_segfol_voiceless_C",
                    "b_fol_segfol_voiceless_clus",
                    "b_fol_segother",
                    "b_agechild:sexF"),
           prob=0.89) + ggplot2::theme_minimal() + ggplot2::geom_vline(xintercept=0)
```

What this tells us is that:

- An increase in duration predicts a greater Trajectory Length

- FLEECE tends to have a smaller Trajectory Length than FACE. However, 0 is inside the credible interval -- so you can't be 89% confident that there is any effect of vowel (FLEECE vs. FACE) on Trajectory Length at all. If you look at the next graph, you can see that 0 even falls inside the 50% credible interval. So we don't have grounds to conclude that FACE is any more diphthongal than FLEECE.

- Being a child as opposed to adolescent, and being adolescent female as opposed to adolescent male, both predict greater Trajectory Length -- i.e. adolescent males have the most monophthongal FACE

- A following nasal or approximant predicts a greater Trajectory Length; otherwise the various following segments have median coefficients pretty close to 0

- There is a negative age-sex interaction. Female children are more monophthongal than male children.

```{r fig.width=6, fig.height=8}
mcmc_intervals(face_posterior2,
           pars = c("b_Log_dur_z",
                    "b_vowelfleece",
                    "b_agechild", 
                    "b_sexF",
                    "b_fol_segfol_nasal",
                    "b_fol_segfol_approximant",
                    "b_fol_segfol_voiced_C",
                    "b_fol_segfol_voiced_clus",
                    "b_fol_segfol_voiceless_C",
                    "b_fol_segfol_voiceless_clus",
                    "b_fol_segother",
                    "b_agechild:sexF"),
           prob=0.50) + ggplot2::theme_minimal() + ggplot2::geom_vline(xintercept=0)
```



Just for fun/out of interest, we can also look at the random effects - because we included 'vowel' in the random effects structure.

```{r fig.width=6, fig.height=10}
tidy_stan(face_m2, prob=0.89, type="random", digits=4)

dimnames(face_posterior2)

#color_scheme_set("brightblue")
mcmc_areas(face_posterior2,
           pars = c("r_participant[Ali,vowelfleece]",
                    "r_participant[Amanda,vowelfleece]",
                    "r_participant[CB,vowelfleece]",
                    "r_participant[Chantelle,vowelfleece]",
                    "r_participant[ChrisB,vowelfleece]",
                    "r_participant[Daniel,vowelfleece]",
                    "r_participant[Denzel,vowelfleece]",
                    "r_participant[F1,vowelfleece]",
                    "r_participant[F10,vowelfleece]",
                    "r_participant[F3,vowelfleece]",
                    "r_participant[F4,vowelfleece]",
                    "r_participant[F7,vowelfleece]",
                    "r_participant[F8,vowelfleece]",
                    "r_participant[F9,vowelfleece]",
                    "r_participant[GW,vowelfleece]",
                    "r_participant[Ibrahim,vowelfleece]",
                    "r_participant[Jessica,vowelfleece]",
                    "r_participant[Joe,vowelfleece]",
                    "r_participant[Kai,vowelfleece]",
                    "r_participant[Khadir,vowelfleece]",
                    "r_participant[Lola,vowelfleece]",
                    "r_participant[Lucy,vowelfleece]",
                    "r_participant[M1,vowelfleece]",
                    "r_participant[M3,vowelfleece]",
                    "r_participant[M4,vowelfleece]",
                    "r_participant[M5,vowelfleece]",
                    "r_participant[M6,vowelfleece]",
                    "r_participant[M7,vowelfleece]",
                    "r_participant[M8,vowelfleece]",
                    "r_participant[Matisse,vowelfleece]",
                    "r_participant[Moses,vowelfleece]",
                    "r_participant[Omar,vowelfleece]",
                    "r_participant[Sami,vowelfleece]",
                    "r_participant[SD,vowelfleece]",
                    "r_participant[Shantel,vowelfleece]",
                    "r_participant[Tariq,vowelfleece]",
                    "r_participant[Tony,vowelfleece]",
                    "r_participant[ZR,vowelfleece]"),
           prob=0.89) + ggplot2::theme_minimal() + ggplot2::geom_vline(xintercept=0)
```

The graph above tells you how the Trajectory Lengths of FACE and FLEECE compare for individuals. For example, Ali seems to have exactly no difference between FACE and FLEECE. Child M4 has more monophthongal FLEECE than FACE, while M3 has the opposite pattern: he has more movement in FLEECE than in FACE.

The random slopes by speaker should be taken with a pinch of salt because we only took about 5 tokens of FLEECE per speaker!


## FACE Trajectory Length boxplot

Define colours for vowels FACE and FLEECE:
```{r}
vowel_cols <- c("#000000", "#E69F00")
```


```{r fig.width=10, fig.height=6}
f <- ggplot(face3, aes(x = sex, y = norm_TL, fill = vowel)) + facet_grid(cols = vars(age)) +
  geom_boxplot(width = 0.3, position = position_dodge(0.9)) + 
  theme_minimal() + 
  scale_fill_manual(values = vowel_cols) + 
   theme(legend.title = element_text(size = rel(2)),
        strip.text = element_text(size = rel(2)),
        legend.text = element_text(size = rel(2)),
        axis.title = element_text(size = rel(2)), 
        axis.text = element_text(size = rel(2)), 
        plot.title = element_text(size = rel(2.5))) +
  labs(x = "Age", y = "Trajectory Length", title = "FACE: Trajectory Length") +
  ylim(0, 2)
f
```
