The goal of the analysis is to select a set of stimuli to cover the whole range. These stimuli can later serve to tri-angulate between the robot faces and biological faces.
The background is in NewStats - Polynomial Regression. The data set of Koopmans comes as average response per robot face (Stimulus). We have estimated a population-level polynomial model. We calculate the absolute error using the center estimate and avg_like. Ten stimuli with lowest error, spanning the whole range, are selected.
load("Uncanny.Rda")
attach(Uncanny)
## The following objects are masked _by_ .GlobalEnv:
##
## M_poly_2, M_poly_3, P_univ_uncanny
## The following object is masked from package:uncanny:
##
## trough
predict(M_poly_3) %>%
left_join(RK_2) %>%
mutate(Range = round(huMech * 100) %/% 10) %>%
mutate(error = abs(avg_like - center)) %>%
group_by(Range) %>%
mutate(Best = (error == min(error))) %>%
ungroup() %>%
filter(Best) %>%
select(Range, Stimulus, huMech, error) %>%
arrange(Range)
## Joining, by = "Obs"
| Range | Stimulus | huMech | error |
|---|---|---|---|
| 0 | 3 | 0.0375 | 1.127113 |
| 1 | 15 | 0.1875 | 1.102520 |
| 2 | 22 | 0.2750 | 1.151304 |
| 3 | 28 | 0.3500 | 1.136541 |
| 4 | 32 | 0.4000 | 1.175056 |
| 5 | 42 | 0.5250 | 1.150055 |
| 6 | 50 | 0.6250 | 1.261243 |
| 7 | 59 | 0.7375 | 1.235725 |
| 8 | 68 | 0.8500 | 1.118666 |
| 9 | 74.2 | 0.9275 | 1.130742 |
detach(Uncanny)
The numbers under Stimulus match the file names of the robot faces. These should be mixed into the biological faces.
Rádlová, S., Landová, E., & Frynta, D. (2018). Judging others by your own standards: Attractiveness of primate faces as seen by human respondents. Frontiers in Psychology, 9(DEC), 1–16. https://doi.org/10.3389/fpsyg.2018.02439
Original plot
For the following analysis, the original data was reconstructed from the figure above (UV_Prim). It has the same structure as RK_2 in NewStats/Polynomial regression.
Research Question: Does the Uncanny Valley effect fall into primate faces?
load("UV_Prim.Rda")
UV_Prim
| Obs | hum_like | response | hum_like0 | hum_like1 | hum_like2 | hum_like3 |
|---|---|---|---|---|---|---|
| 9 | 0.2156427 | 0.5929875 | 1 | 0.2156427 | 0.0465018 | 0.0100278 |
| 21 | 0.3902556 | 0.6742513 | 1 | 0.3902556 | 0.1522995 | 0.0594357 |
| 37 | 0.1954562 | 0.3573225 | 1 | 0.1954562 | 0.0382031 | 0.0074670 |
| 43 | 0.2206893 | 0.0417482 | 1 | 0.2206893 | 0.0487038 | 0.0107484 |
| 44 | 0.4276006 | 0.1108224 | 1 | 0.4276006 | 0.1828423 | 0.0781835 |
| 46 | 0.8242647 | 0.7108200 | 1 | 0.8242647 | 0.6794123 | 0.5600156 |
| 47 | 0.9665793 | 0.7920838 | 1 | 0.9665793 | 0.9342755 | 0.9030514 |
| 48 | 0.8595910 | 0.8692844 | 1 | 0.8595910 | 0.7388967 | 0.6351490 |
UV_Prim %>%
ggplot(aes(x = hum_like,
y = response)) +
geom_point() +
geom_smooth()
## `geom_smooth()` using method = 'loess' and formula 'y ~ x'
Here, four polynomial models are estimated (fitted responses).
M_poly_0 <-
UV_Prim %>%
stan_glm(response ~ 1, data = .)
M_poly_1 <-
UV_Prim %>%
stan_glm(response ~ 1 + hum_like, data = .)
M_poly_2 <-
UV_Prim %>%
stan_glm(response ~ poly(hum_like, 2), data = .)
M_poly_3 <-
UV_Prim %>%
stan_glm(response ~ poly(hum_like, 3), data = .)
Multi-model posterior
P_poly <-
bind_rows(posterior(M_poly_0), posterior(M_poly_1),
posterior(M_poly_2), posterior(M_poly_3))
clu(P_poly)
| .dots1 | .dots2 | .dots3 | .dots4 | .dots5 | .dots6 |
|---|---|---|---|---|---|
| M_poly_0 | Intercept | Intercept | 0.5749413 | 0.5063869 | 0.6383882 |
| M_poly_0 | sigma_resid | NA | 0.2336724 | 0.1941467 | 0.2883181 |
| M_poly_1 | Intercept | Intercept | 0.4456212 | 0.3166194 | 0.5866361 |
| M_poly_1 | hum_like | hum_like | 0.3679498 | 0.0375300 | 0.6850237 |
| M_poly_1 | sigma_resid | NA | 0.2239070 | 0.1853987 | 0.2795419 |
| M_poly_2 | Intercept | Intercept | 0.5747332 | 0.5089760 | 0.6363923 |
| M_poly_2 | poly(hum_like, 2)1 | poly(hum_like, 2)1 | 0.5176882 | 0.0829976 | 0.9560847 |
| M_poly_2 | poly(hum_like, 2)2 | poly(hum_like, 2)2 | 0.2005744 | -0.2485008 | 0.6470145 |
| M_poly_2 | sigma_resid | NA | 0.2240834 | 0.1842811 | 0.2787247 |
| M_poly_3 | Intercept | Intercept | 0.5742051 | 0.5072807 | 0.6384097 |
| M_poly_3 | poly(hum_like, 3)1 | poly(hum_like, 3)1 | 0.5158553 | 0.0627742 | 0.9756548 |
| M_poly_3 | poly(hum_like, 3)2 | poly(hum_like, 3)2 | 0.2012883 | -0.2533328 | 0.6418155 |
| M_poly_3 | poly(hum_like, 3)3 | poly(hum_like, 3)3 | -0.0730887 | -0.5330623 | 0.3824011 |
| M_poly_3 | sigma_resid | NA | 0.2266849 | 0.1864653 | 0.2810992 |
Multi-model posterior predictions (fitted responses):
PP_poly <-
bind_rows(post_pred(M_poly_0), post_pred(M_poly_1),
post_pred(M_poly_2), post_pred(M_poly_3))
PP_poly %>%
predict() %>%
left_join(UV_Prim, by = "Obs") %>%
arrange(model, hum_like) %>%
ggplot(aes(x = hum_like,
y = center)) +
facet_wrap(~model) +
geom_point(aes(y = response, col = "observed")) +
geom_smooth(aes(col = "fitted responses"))
## `geom_smooth()` using method = 'loess' and formula 'y ~ x'
Model selection (Testing theories)
Loo_poly <-
list(loo(M_poly_0),
loo(M_poly_1),
loo(M_poly_2),
loo(M_poly_3))
Loo_poly %>%
compare_IC()
| Model | IC | Estimate | SE | diff_IC |
|---|---|---|---|---|
| M_poly_1 | looic | -4.504289 | 9.855807 | 0.0000000 |
| M_poly_2 | looic | -3.692093 | 9.482880 | 0.8121966 |
| M_poly_3 | looic | -1.591048 | 9.599951 | 2.9132419 |
| M_poly_0 | looic | -1.120659 | 9.218283 | 3.3836300 |
save(UV_Prim, M_poly_0, M_poly_1,
M_poly_2, M_poly_3,
P_poly, PP_poly, Loo_poly,
file = "UV_Prim.Rda")
#detach(Uncanny)
Stimuli <-
readxl::read_excel("Data/BA21/Stimuli.xlsx") %>%
mutate(Stimulus = str_c("S", as.character(Stimulus))) %>%
mutate(Set = factor(Set, 1:4, labels = c("Primate", "Human", "Tri23", "Robot"))) %>%
mutate(humLike = rowMeans(select(., starts_with("H_like")), na.rm = T)) %>%
mutate(valence = rowMeans(select(., starts_with("E_valence")), na.rm = T))
Stimuli
| Stimulus | Set | H_likeness_1 | H_likeness_2 | H_likeness_3 | H_likeness_4 | E_1 | E_2 | E_3 | E_4 | E_valence_1 | E_valence_2 | E_valence_3 | E_valence_4 | AncestoralCloseness | Ethnicity | humLike | valence |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
| S1 | Primate | 30 | 50 | 20 | 40 | 4 | 4 | 2 | 4 | -65 | 20 | -80 | -40 | 5 | 0 | 35.00 | -41.25 |
| S2 | Primate | 35 | 50 | 20 | 30 | 1 | 0 | 0 | 0 | 30 | 0 | 0 | 0 | 5 | 0 | 33.75 | 7.50 |
| S3 | Primate | 20 | 50 | 40 | 20 | 1 | 0 | 0 | 0 | 20 | 0 | 0 | 0 | 5 | 0 | 32.50 | 5.00 |
| S4 | Robot | 5 | NA | NA | NA | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 10 | 0 | 5.00 | 0.00 |
| S5 | Primate | 40 | 60 | 60 | 30 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 10 | 4 | 0 | 47.50 | 2.50 |
| S6 | Robot | 23 | NA | NA | NA | 1 | 1 | 0 | 1 | 10 | 50 | 5 | 0 | 10 | 0 | 23.00 | 16.25 |
| S7 | Primate | 55 | 55 | 60 | 40 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 4 | 0 | 52.50 | 0.00 |
| S8 | Robot | 8 | NA | NA | NA | 0 | 4 | 0 | 4 | 0 | 60 | 5 | 0 | 10 | 0 | 8.00 | 16.25 |
| S9 | Primate | 35 | 50 | 60 | 40 | 1 | 4 | 1 | 1 | 25 | -10 | 10 | 20 | 5 | 0 | 46.25 | 11.25 |
| S10 | Primate | 59 | 60 | 65 | 50 | 3 | 6 | 0 | 1 | -10 | -20 | 0 | 20 | 4 | 0 | 58.50 | -2.50 |
| S11 | Human | 100 | 100 | 100 | 100 | 4 | 4 | 4 | 4 | 88 | 20 | -20 | 60 | 0 | 1 | 100.00 | 37.00 |
| S12 | Primate | 24 | 45 | 25 | 25 | 4 | 4 | 2 | 4 | 70 | -20 | -80 | -70 | 7 | 0 | 29.75 | -25.00 |
| S13 | Primate | 45 | 55 | 70 | 60 | 1 | 0 | 0 | 1 | 15 | 0 | 0 | 10 | 4 | 0 | 57.50 | 6.25 |
| S14 | Primate | 67 | 55 | 65 | 50 | 1 | 0 | 1 | 1 | 10 | 0 | 40 | 10 | 4 | 0 | 59.25 | 15.00 |
| S15 | Primate | 20 | 50 | 50 | 40 | 0 | 3 | 3 | 3 | 0 | -30 | -10 | -5 | 5 | 0 | 40.00 | -11.25 |
| S16 | Primate | 25 | 50 | 20 | 30 | 0 | 3 | 0 | 1 | 5 | -10 | 0 | 10 | 5 | 0 | 31.25 | 1.25 |
| S17 | Primate | 16 | 50 | 20 | 20 | 1 | 3 | 0 | 6 | 15 | -10 | 0 | 10 | 5 | 0 | 26.50 | 3.75 |
| S18 | Primate | 20 | 45 | 50 | 50 | 0 | 3 | 0 | 4 | 0 | -20 | 0 | -10 | 6 | 0 | 41.25 | -7.50 |
| S19 | Primate | 22 | 45 | 50 | 30 | 0 | 0 | 0 | 4 | -10 | 0 | 0 | -10 | 6 | 0 | 36.75 | -5.00 |
| S20 | Human | 100 | 100 | 100 | 100 | 3 | 3 | 3 | 6 | 80 | -60 | -98 | -60 | 0 | 1 | 100.00 | -34.50 |
| S21 | Primate | 33 | 50 | 50 | 60 | 0 | 3 | 0 | 1 | 0 | -60 | 0 | 5 | 6 | 0 | 48.25 | -13.75 |
| S22 | Primate | 40 | 50 | 60 | 30 | 0 | 3 | 0 | 1 | 0 | -30 | 0 | 10 | 6 | 0 | 45.00 | -5.00 |
| S23 | Primate | 60 | 50 | 60 | 30 | 1 | 0 | 0 | 1 | 25 | 0 | 0 | 20 | 4 | 0 | 50.00 | 11.25 |
| S24 | Primate | 55 | 50 | 60 | 40 | 5 | 2 | 5 | 5 | -85 | -80 | -90 | -70 | 4 | 0 | 51.25 | -81.25 |
| S25 | Primate | 97 | 96 | 93 | 95 | 1 | 1 | 1 | 1 | 37 | 60 | 20 | 25 | 1 | 0 | 95.25 | 35.50 |
| S26 | Robot | 26 | NA | NA | NA | 0 | 3 | 0 | 2 | 0 | -40 | 0 | -10 | 10 | 0 | 26.00 | -12.50 |
| S27 | Primate | 18 | 50 | 50 | 50 | 3 | 3 | 0 | 3 | -40 | -40 | 0 | -10 | 5 | 0 | 42.00 | -22.50 |
| S28 | Primate | 67 | 55 | 60 | 40 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 20 | 4 | 0 | 55.50 | 5.00 |
| S29 | Primate | 85 | 75 | 75 | 70 | 4 | 0 | 0 | 1 | -15 | 0 | 0 | 5 | 3 | 0 | 76.25 | -2.50 |
| S30 | Primate | 5 | 30 | 20 | 5 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 | 0 | 15.00 | 0.00 |
| S31 | Primate | 1 | 10 | 15 | 10 | 0 | 3 | 0 | 3 | 0 | -50 | 0 | -10 | 8 | 0 | 9.00 | -15.00 |
| S32 | Primate | 20 | 45 | 33 | 35 | 4 | 4 | 4 | 4 | 65 | 40 | -25 | 30 | 7 | 0 | 33.25 | 27.50 |
| S33 | Primate | 88 | 80 | 85 | 90 | 1 | 4 | 0 | 1 | 30 | -20 | 0 | 10 | 3 | 0 | 85.75 | 5.00 |
| S34 | Primate | 78 | 80 | 75 | 60 | 1 | 5 | 0 | 1 | 15 | -20 | 0 | 10 | 3 | 0 | 73.25 | 1.25 |
| S35 | Primate | 75 | 85 | 75 | 70 | 0 | 3 | 0 | 0 | -10 | -10 | 0 | 5 | 3 | 0 | 76.25 | -3.75 |
| S36 | Primate | 85 | 85 | 75 | 70 | 1 | 1 | 1 | 1 | 50 | 20 | 90 | 30 | 3 | 0 | 78.75 | 47.50 |
| S37 | Primate | 70 | 85 | 80 | 70 | 6 | 6 | 1 | 1 | -30 | -20 | 90 | 20 | 3 | 0 | 76.25 | 15.00 |
| S38 | Robot | 37 | NA | NA | NA | 4 | 4 | 4 | 4 | 30 | 0 | -20 | -15 | 10 | 0 | 37.00 | -1.25 |
| S39 | Human | 100 | 100 | 100 | 100 | 6 | 5 | 5 | 5 | 60 | -50 | -70 | -40 | 0 | 1 | 100.00 | -25.00 |
| S40 | Primate | 18 | 50 | 45 | 50 | 4 | 4 | 4 | 4 | 10 | -20 | -20 | 5 | 5 | 0 | 40.75 | -6.25 |
| S41 | Primate | 33 | 50 | 30 | 30 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 | 0 | 35.75 | 0.00 |
| S42 | Primate | 94 | 90 | 90 | 85 | 1 | 1 | 1 | 4 | 33 | 50 | 30 | 5 | 2 | 0 | 89.75 | 29.50 |
| S43 | Primate | 45 | 50 | 60 | 40 | 0 | 3 | 0 | 0 | -10 | -50 | 0 | 0 | 4 | 0 | 48.75 | -15.00 |
| S44 | Primate | 44 | 50 | 55 | 55 | 1 | 1 | 0 | 1 | 33 | 20 | 0 | 25 | 4 | 0 | 51.00 | 19.50 |
| S45 | Human | 100 | 100 | 100 | 100 | 0 | 4 | 0 | 0 | 0 | -10 | 0 | 0 | 0 | 1 | 100.00 | -2.50 |
| S46 | Robot | 45 | NA | NA | NA | 1 | 6 | 6 | 6 | 20 | -50 | -15 | -30 | 10 | 0 | 45.00 | -18.75 |
| S47 | Primate | 1 | 10 | 10 | 5 | 0 | 5 | 0 | 0 | 0 | -10 | 0 | 0 | 9 | 0 | 6.50 | -2.50 |
| S48 | Primate | 40 | 50 | 35 | 40 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 5 | 7 | 0 | 41.25 | 1.25 |
| S49 | Tri23 | 100 | 100 | 98 | 97 | 1 | 1 | 1 | 1 | 60 | 70 | 50 | 10 | 0 | 0 | 98.75 | 47.50 |
| S50 | Robot | 49 | NA | NA | NA | 2 | 2 | 3 | 3 | -30 | -70 | -30 | -40 | 10 | 0 | 49.00 | -42.50 |
| S51 | Robot | 80 | NA | NA | NA | 0 | 0 | 0 | 1 | -5 | 0 | 0 | 0 | 10 | 0 | 80.00 | -1.25 |
| S52 | Primate | 84 | 90 | 87 | 80 | 3 | 2 | 3 | 0 | -15 | -30 | -10 | 0 | 3 | 0 | 85.25 | -13.75 |
| S53 | Primate | 83 | 90 | 89 | 80 | 0 | 1 | 1 | 1 | 13 | 60 | 40 | 5 | 2 | 0 | 85.50 | 29.50 |
| S54 | Primate | 2 | 10 | 15 | 5 | 5 | 0 | 0 | 0 | -26 | 0 | 0 | 0 | 9 | 0 | 8.00 | -6.50 |
| S55 | Primate | 18 | 40 | 40 | 30 | 2 | 2 | 0 | 3 | -23 | -50 | 0 | -10 | 7 | 0 | 32.00 | -20.75 |
| S56 | Human | 100 | 100 | 100 | 100 | 1 | 1 | 1 | 1 | 20 | 40 | 20 | 10 | 0 | 2 | 100.00 | 22.50 |
| S57 | Primate | 40 | 50 | 55 | 50 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 5 | 4 | 0 | 48.75 | 1.25 |
| S58 | Robot | 57 | NA | NA | NA | 0 | 5 | 0 | 0 | -20 | -50 | 0 | 0 | 10 | 0 | 57.00 | -17.50 |
| S59 | Robot | 76 | NA | NA | NA | 1 | 1 | 1 | 1 | 45 | 65 | 60 | 20 | 10 | 0 | 76.00 | 47.50 |
| S60 | Human | 100 | 100 | 100 | 100 | 0 | 0 | 5 | 0 | 0 | 0 | -5 | 0 | 0 | 3 | 100.00 | -1.25 |
| S61 | Primate | 1 | 20 | 15 | 3 | 2 | 2 | 0 | 0 | -20 | -10 | 0 | 0 | 8 | 0 | 9.75 | -7.50 |
| S62 | Primate | 85 | 80 | 92 | 90 | 5 | 3 | 3 | 6 | -60 | -70 | -60 | -30 | 2 | 0 | 86.75 | -55.00 |
| S63 | Primate | 91 | 85 | 88 | 90 | 0 | 3 | 0 | 0 | 0 | -20 | 0 | 0 | 2 | 0 | 88.50 | -5.00 |
| S64 | Primate | 95 | 90 | 85 | 80 | 0 | 6 | 0 | 4 | -15 | -20 | 0 | -20 | 2 | 0 | 87.50 | -13.75 |
| S65 | Primate | 97 | 95 | 93 | 90 | 3 | 3 | 3 | 3 | -24 | -20 | -10 | -20 | 1 | 0 | 93.75 | -18.50 |
| S66 | Robot | 69 | NA | NA | NA | 1 | 4 | 1 | 1 | 20 | 25 | 20 | 20 | 10 | 0 | 69.00 | 21.25 |
| S67 | Primate | 70 | 75 | 75 | 70 | 0 | 3 | 0 | 0 | 0 | -10 | 0 | 0 | 3 | 0 | 72.50 | -2.50 |
| S68 | Primate | 18 | 20 | 30 | 25 | 5 | 3 | 0 | 0 | -22 | -40 | 0 | -5 | 8 | 0 | 23.25 | -16.75 |
| S69 | Primate | 28 | 45 | 35 | 30 | 4 | 4 | 4 | 4 | 55 | 20 | 10 | 25 | 7 | 0 | 34.50 | 27.50 |
| S70 | Primate | 20 | 40 | 35 | 30 | 5 | 5 | 5 | 5 | -60 | -30 | -40 | -40 | 7 | 0 | 31.25 | -42.50 |
| S71 | Primate | 90 | 90 | 88 | 85 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 0 | 88.25 | 0.00 |
| S72 | Primate | 82 | 75 | 65 | 70 | 0 | 0 | 0 | 0 | -10 | 0 | 0 | 0 | 3 | 0 | 73.00 | -2.50 |
| S73 | Primate | 2 | 20 | 15 | 20 | 5 | 5 | 5 | 5 | -40 | -50 | -60 | -60 | 8 | 0 | 14.25 | -52.50 |
| S74 | Human | 100 | 100 | 100 | 100 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 | 100.00 | 0.00 |
| S75 | Primate | 35 | 50 | 48 | 40 | 5 | 6 | 0 | 5 | -20 | -30 | -10 | -10 | 7 | 0 | 43.25 | -17.50 |
| S76 | Primate | 80 | 80 | 82 | 80 | 0 | 5 | 0 | 0 | 0 | -50 | 0 | 0 | 3 | 0 | 80.50 | -12.50 |
| S77 | Primate | 75 | 75 | 83 | 60 | 1 | 1 | 1 | 1 | 30 | 40 | 95 | 60 | 3 | 0 | 73.25 | 56.25 |
| S78 | Primate | 55 | 65 | 65 | 55 | 1 | 4 | 4 | 1 | 65 | 80 | 40 | 70 | 4 | 0 | 60.00 | 63.75 |
| S79 | Primate | 63 | 65 | 65 | 40 | 1 | 1 | 1 | 1 | 70 | 90 | 95 | 90 | 4 | 0 | 58.25 | 86.25 |
| S80 | Primate | 20 | 40 | 20 | 10 | 1 | 0 | 0 | 0 | 20 | 0 | 0 | 0 | 7 | 0 | 22.50 | 5.00 |
| S81 | Primate | 25 | 45 | 50 | 45 | 0 | 3 | 3 | 3 | -10 | -60 | -30 | -40 | 4 | 0 | 41.25 | -35.00 |
| S82 | Primate | 48 | 65 | 65 | 40 | 5 | 1 | 5 | 1 | 30 | 50 | -35 | 50 | 4 | 0 | 54.50 | 23.75 |
| S83 | Primate | 33 | 50 | 55 | 50 | 5 | 5 | 5 | 5 | -73 | -80 | -90 | -80 | 4 | 0 | 47.00 | -80.75 |
| S84 | Primate | 97 | 90 | 94 | 90 | 5 | 5 | 5 | 4 | 80 | -100 | -95 | -60 | 2 | 0 | 92.75 | -43.75 |
| S85 | Primate | 91 | 90 | 95 | 95 | 4 | 4 | 2 | 2 | -25 | -30 | -90 | -50 | 2 | 0 | 92.75 | -48.75 |
| S86 | Primate | 75 | 70 | 83 | 85 | 5 | 5 | 5 | 5 | -40 | -70 | -70 | -20 | 3 | 0 | 78.25 | -50.00 |
| S87 | Primate | 70 | 75 | 90 | 85 | 5 | 4 | 5 | 6 | -70 | -20 | -80 | -40 | 2 | 0 | 80.00 | -52.50 |
| S88 | Primate | 95 | 93 | 92 | 90 | 2 | 4 | 2 | 6 | -68 | -10 | -85 | -60 | 2 | 0 | 92.50 | -55.75 |
| S89 | Primate | 42 | 50 | 75 | 50 | 0 | 4 | 2 | 1 | 0 | 0 | -50 | 15 | 3 | 0 | 54.25 | -8.75 |
| S90 | Human | 100 | 100 | 100 | 100 | 0 | 4 | 0 | 0 | 0 | 20 | 0 | 0 | 0 | 3 | 100.00 | 5.00 |
| S91 | Primate | 68 | 70 | 73 | 75 | 0 | 4 | 4 | 4 | 0 | -30 | 0 | -20 | 3 | 0 | 71.50 | -12.50 |
| S92 | Primate | 93 | 85 | 88 | 85 | 3 | 4 | 4 | 4 | -30 | -30 | -50 | -20 | 2 | 0 | 87.75 | -32.50 |
| S93 | Human | 100 | 100 | 100 | 100 | 5 | 6 | 6 | 6 | -85 | -60 | -90 | -70 | 0 | 1 | 100.00 | -76.25 |
| S94 | Primate | 90 | 80 | 87 | 85 | 5 | 6 | 5 | 6 | -70 | -70 | -70 | -70 | 2 | 0 | 85.50 | -70.00 |
| S95 | Primate | 95 | 95 | 95 | 90 | 3 | 3 | 3 | 3 | -30 | -40 | -10 | -40 | 1 | 0 | 93.75 | -30.00 |
| S96 | Primate | 35 | 40 | 30 | 25 | 3 | 3 | 3 | 3 | -34 | -30 | -40 | -40 | 7 | 0 | 32.50 | -36.00 |
| S97 | Human | 100 | 100 | 100 | 100 | 1 | 1 | 1 | 1 | 70 | 80 | 70 | 75 | 0 | 1 | 100.00 | 73.75 |
| S98 | Human | 100 | 100 | 100 | 100 | 4 | 4 | 2 | 4 | 85 | 0 | -95 | -70 | 0 | 1 | 100.00 | -20.00 |
| S99 | Primate | 23 | 50 | 30 | 30 | 5 | 5 | 5 | 5 | -70 | -60 | -60 | -40 | 7 | 0 | 33.25 | -57.50 |
| S100 | Primate | 71 | 70 | 80 | 65 | 1 | 1 | 1 | 1 | 40 | 40 | 90 | 50 | 3 | 0 | 71.50 | 55.00 |
Stimuli %>%
select(starts_with("H_like"), AncestoralCloseness) %>%
corrr::correlate() %>%
corrr::network_plot()
##
## Correlation method: 'pearson'
## Missing treated using: 'pairwise.complete.obs'
D_raw <-
readxl::read_excel("Data/BA21/Thesis +Uncanny+Valley+Effect_April+29,+2021_12.19.xlsx") %>%
filter(StartDate != "Start Date")
BA21 <- D_raw %>%
select(Part = ResponseId, matches("^S\\d+")) %>%
pivot_longer(-Part, names_to = "Trial", values_to = "response") %>%
filter(!is.na(response)) %>%
separate(Trial, into = c("Stimulus", "Item", "attempt")) %>%
left_join(Stimuli %>% select(Stimulus, Set, valence, humLike), by = "Stimulus") %>%
mutate(Scale = if_else(str_detect(Item, "^2"),
"Eeriness",
"Display"),
response = mascutils::rescale_unit(as.numeric(response)),
response = mascutils::rescale_centered(response, scale = .999),
humLike = mascutils::rescale_unit(humLike),
#Stimulus = str_extract(Trial, "^S\\d{2,3}_\\d"),
#Item = str_extract(Trial, ),
#humLike = as.numeric(str_extract(Stimulus, "\\d{1,3}")),
humLike_2 = humLike^2,
humLike_3 = humLike^3) %>%
bayr::as_tbl_obs()
BA21
qplot(BA21$response)
min(BA21$response)
max(BA21$response)
BA21_prim <- BA21 %>%
filter(Set == "Primate")
BA21 %>%
group_by(Part) %>%
summarize(N = n())
BA21 %>%
group_by(Stimulus) %>%
summarize(N = n())
BA21_agg <-
BA21_prim %>%
filter(Scale == "Eeriness") %>%
group_by(Stimulus, Set, valence, humLike) %>%
summarize(mean_resp = mean(response, na.rm = T)) %>%
ungroup() %>%
as_tbl_obs()
BA21_agg
For now, we work with responses averaged over stimuli, meaning every face gets a score for eeriness.
BA21_agg %>%
ggplot(aes(x = humLike,
y = mean_resp)) +
geom_point() +
geom_smooth() +
facet_wrap(~Set)
## `geom_smooth()` using method = 'loess' and formula 'y ~ x'
Four polynomial models are estimated, from cubic to grand mean. Valence of facial expression is included as a control (except M_5).
M_poly_3 <- stan_glm(mean_resp ~ valence + poly(humLike, 3),
data = BA21_agg)
M_poly_2 <- stan_glm(mean_resp ~ valence + poly(humLike, 2),
data = BA21_agg)
M_poly_1 <- stan_glm(mean_resp ~ valence + poly(humLike, 1),
data = BA21_agg)
M_poly_0 <- stan_glm(mean_resp ~ 1 + valence,
data = BA21_agg)
clu(M_poly_3)
| .dots1 | .dots2 | .dots3 | .dots4 | .dots5 |
|---|---|---|---|---|
| Intercept | Intercept | 0.5742051 | 0.5072807 | 0.6384097 |
| poly(hum_like, 3)1 | poly(hum_like, 3)1 | 0.5158553 | 0.0627742 | 0.9756548 |
| poly(hum_like, 3)2 | poly(hum_like, 3)2 | 0.2012883 | -0.2533328 | 0.6418155 |
| poly(hum_like, 3)3 | poly(hum_like, 3)3 | -0.0730887 | -0.5330623 | 0.3824011 |
| sigma_resid | NA | 0.2266849 | 0.1864653 | 0.2810992 |
PP_poly_3 <- post_pred(M_poly_3)
PP_poly_3 %>%
predict() %>%
left_join(BA21_agg, by = "Obs") %>%
ggplot(aes(x = humLike)) +
geom_point(aes(y = mean_resp, color = "observed")) +
geom_smooth(aes(y = mean_resp, color = "LOESS"), se = F) +
geom_smooth(aes(y = center, color = "cubic"), se = F)
## `geom_smooth()` using method = 'loess' and formula 'y ~ x'
## `geom_smooth()` using method = 'loess' and formula 'y ~ x'
We approximate relative predictive accuracy for all four population-level models, using leave-one-out approximation (this is not the real LOO, just a better approximation).
Loo_poly <-
list(loo(M_poly_0),
loo(M_poly_1),
loo(M_poly_2),
loo(M_poly_3))
compare_IC(Loo_poly)
| Model | IC | Estimate | SE | diff_IC |
|---|---|---|---|---|
| M_poly_1 | looic | -4.504289 | 9.855807 | 0.0000000 |
| M_poly_2 | looic | -3.692093 | 9.482880 | 0.8121966 |
| M_poly_3 | looic | -1.591048 | 9.599951 | 2.9132419 |
| M_poly_0 | looic | -1.120659 | 9.218283 | 3.3836300 |
On population level, the cubic model is not the preferred one. No reason to be disappointed. We will see that individual differences are clouding the population level.
Finally, we use the cubic model to create a test statistic on the MCMC samples. This will tell us the present credibility of an Uncanny valley curve is.
P_wide <-
posterior(M_poly_3) %>%
as_tibble() %>%
filter(type == "fixef") %>%
select(chain, iter, fixef, value) %>%
pivot_wider(id_cols = c("chain", "iter"),
names_from = fixef,
values_from = value) %>%
mutate(shoulder = uncanny::shoulder(.[3:6]),
trough = uncanny::trough(.[3:6]),
is_uncanny = !is.na(shoulder) & !is.na(trough))
cat("The probability of the population level being a UV curve is: ", mean(P_wide$is_uncanny))
## The probability of the population level being a UV curve is: 0.71475
M_poly_4 <-
stan_glmer(response ~ 1 + valence + humLike + humLike_2 + humLike_3 +
(1 + valence + humLike + humLike_2 + humLike_3|Part),
data = BA21)
PP_poly_4 <- post_pred(M_poly_4, thin = 5)
fixef_ml(M_poly_4)
## Joining, by = c("model", "fixef")
| fixef | center | lower | upper | SD_Part |
|---|---|---|---|---|
| Intercept | 0.5830876 | 0.5588493 | 0.6071084 | 0.0265600 |
| valence | 0.0011037 | 0.0009161 | 0.0013039 | 0.0002174 |
| humLike | 0.4744013 | 0.3075432 | 0.6419703 | 0.0178591 |
| humLike_2 | -1.1059097 | -1.4796795 | -0.7347198 | 0.0238931 |
| humLike_3 | 0.7134679 | 0.4704401 | 0.9512299 | 0.0236899 |
PP_poly_4 %>%
predict() %>%
left_join(BA21, by = "Obs") %>%
ggplot(aes(x = humLike)) +
geom_smooth(aes(y = center, color = "cubic model",
group = Part), se = F) +
labs(color = "Source")
## `geom_smooth()` using method = 'loess' and formula 'y ~ x'
PP_poly_4 %>%
predict() %>%
left_join(BA21, by = "Obs") %>%
ggplot(aes(x = humLike)) +
facet_wrap(~Part) +
#geom_point(aes(y = response, color = "observed")) +
#geom_smooth(aes(y = response, color = "LOESS"), se = F) +
geom_smooth(aes(y = center, color = "cubic",
group = Part), se = F)
## `geom_smooth()` using method = 'loess' and formula 'y ~ x'
See here
M_poly_5 <-
brm(bf(response ~ valence + humLike + humLike_2 + humLike_3 + (1 + valence + humLike +
humLike_2 + humLike_3|Part),
phi ~ 1 + (1 | Part)),
family = Beta(),
data = BA21,
inits = "0")
PP_poly_5 <- post_pred(M_poly_5, thin = 5)
fixef_ml(M_poly_5)
## Joining, by = c("model", "fixef")
| fixef | center | lower | upper | SD_Part | |
|---|---|---|---|---|---|
| Intercept | 0.244664 | 0.1307294 | 0.3507426 | 0.1724776 | NA |
| NA | 1.237866 | 1.0444732 | 1.4085492 | NA | 0.553705 |
| valence | 0.003522 | 0.0027052 | 0.0043748 | 0.0012250 | NA |
| humLike | 2.175117 | 1.4781516 | 2.8778466 | 0.1163176 | NA |
| humLike_2 | -5.069604 | -6.6074480 | -3.5802527 | 0.1533726 | NA |
| humLike_3 | 3.300843 | 2.3730858 | 4.2767061 | 0.1391181 | NA |
PP_poly_5 %>%
predict() %>%
left_join(BA21, by = "Obs") %>%
ggplot(aes(x = humLike)) +
geom_smooth(aes(y = center, color = "cubic model",
group = Part), se = F) +
labs(color = "Source")
## `geom_smooth()` using method = 'loess' and formula 'y ~ x'
PP_poly_5 %>%
predict() %>%
left_join(BA21, by = "Obs") %>%
ggplot(aes(x = humLike)) +
facet_wrap(~Part) +
#geom_point(aes(y = response, color = "observed")) +
#geom_smooth(aes(y = response, color = "LOESS"), se = F) +
geom_smooth(aes(y = center, color = "cubic",
group = Part), se = F)
## `geom_smooth()` using method = 'loess' and formula 'y ~ x'
P_univ_uncanny <-
posterior(M_poly_5) %>%
#as_tibble() %>%
dplyr::filter(is.na(nonlin), fixef != "valence") %>%
re_scores() %>%
select(iter, Part = re_entity, fixef, value) %>%
tidyr::spread(key = "fixef", value = "value") %>%
select(iter, Part,
humLike_0 = Intercept,
humLike_1 = humLike, humLike_2, humLike_3) %>%
mutate(
trough = trough(select(., humLike_0:humLike_3)),
shoulder = shoulder(select(., humLike_0:humLike_3)),
has_trough = !is.na(trough),
has_shoulder = !is.na(shoulder),
shoulder_left = trough > shoulder,
is_uncanny = has_trough & has_shoulder & shoulder_left
)
Individual positions of shoulder and trough
P_univ_uncanny %>%
ggplot(aes(x = Part)) +
geom_violin(aes(y = trough, color = trough)) +
geom_violin(aes(y = shoulder, color = shoulder)) +
# theme(axis.text.x = element_text(angle = 90)) +
coord_flip()
P_univ_uncanny %>%
group_by(Part) %>%
summarize(prob_uncanny = mean(is_uncanny, na.rm = T)) %>%
# ungroup() %>%
# mutate(Part_ord = rank(prob_uncanny)) %>%
mutate(label = str_c(100 * round(prob_uncanny, 4), "%")) %>%
ggplot(aes(x = Part, y = prob_uncanny)) +
geom_col() +
geom_label(aes(label = label), size = 2) +
theme(axis.text.x = element_text(angle = 90))
save(BA21, BA21_agg, PP_poly_4, M_poly_4, P_univ_uncanny, M_poly_3,
PP_poly_3, M_poly_2, M_poly_1, M_poly_0, Loo_poly, M_poly_5, PP_poly_5,
P_univ_uncanny,
file = "BA21.Rda")