Main File for S2 main CAM study

Author

Julius Fenn, Louisa Estadieu

Notes

prepare data

set up data.frame questionnaires

setwd("outputs")
# > pre study
suppressMessages(read_file('preCAM.txt') %>%
                   # ... split it into lines ...
                   str_split('\n') %>% first() %>%
                   # ... filter empty rows ...
                   discard(function(x) x == '') %>%
                   discard(function(x) x == '\r') %>%
                   # ... parse JSON into a data.frame
                   map_dfr(fromJSON, flatten=TRUE)) -> dat_preCAM

# > post first CAM
suppressMessages(read_file('postCAM.txt') %>%
                   # ... split it into lines ...
                   str_split('\n') %>% first() %>%
                   # ... filter empty rows ...
                   discard(function(x) x == '') %>%
                   discard(function(x) x == '\r') %>%
                   # ... parse JSON into a data.frame
                   map_dfr(fromJSON, flatten=TRUE)) -> dat_postCAM

# > post second CAM
suppressMessages(read_file('secondPostCAM.txt') %>%
                   # ... split it into lines ...
                   str_split('\n') %>% first() %>%
                   # ... filter empty rows ...
                   discard(function(x) x == '') %>%
                   discard(function(x) x == '\r') %>%
                   # ... parse JSON into a data.frame
                   map_dfr(fromJSON, flatten=TRUE)) -> dat_secondPostCAM



########################################
# create counter variable for both data sets
########################################
### pre study
dat_preCAM$ID <- NA

tmp_IDcounter <- 0
for(i in 1:nrow(dat_preCAM)){
  if(!is.na(dat_preCAM$sender[i]) && dat_preCAM$sender[i] == "Greetings"){
    # tmp <- dat_preCAM$prolific_pid[i]
    tmp_IDcounter = tmp_IDcounter + 1
  }
  dat_preCAM$ID[i] <- tmp_IDcounter
}



### post study
dat_postCAM$ID <- NA

tmp_IDcounter <- 0
for(i in 1:nrow(dat_postCAM)){
  if(!is.na(dat_postCAM$sender[i]) && dat_postCAM$sender[i] == "CAMfeedbackGeneral"){
    # tmp <- dat_postCAM$prolific_pid[i]
    tmp_IDcounter = tmp_IDcounter + 1
  }
  dat_postCAM$ID[i] <- tmp_IDcounter
}

### second post study
#> fix error in "sender variable"
for(i in 1:nrow(dat_secondPostCAM)){
  if(is.na(dat_secondPostCAM$sender[i])){
    if(!is.na(dat_secondPostCAM$sender[i+1])){
      dat_secondPostCAM$sender[i] <- "adaptiveAnswer"
    }
  }
}



dat_secondPostCAM$ID <- NA
tmp_IDcounter <- 0
for(i in 1:nrow(dat_secondPostCAM)){
  if(!is.na(dat_secondPostCAM$sender[i]) && dat_secondPostCAM$sender[i] == "adaptiveAnswer"){
    # tmp <- dat_secondPostCAM$prolific_pid[i]
    tmp_IDcounter = tmp_IDcounter + 1
  }
  dat_secondPostCAM$ID[i] <- tmp_IDcounter
}


########################################
# keep only complete data sets
########################################
### pre-study
# sort(table(dat_preCAM$ID))
sum(table(dat_preCAM$ID) != max(table(dat_preCAM$ID)))
[1] 0
sum(table(dat_preCAM$ID) == max(table(dat_preCAM$ID)))
[1] 227
dat_preCAM <- dat_preCAM[dat_preCAM$ID %in% names(table(dat_preCAM$ID))[table(dat_preCAM$ID) == max(table(dat_preCAM$ID))],]

### post-study
# sort(table(dat_postCAM$ID))
sum(table(dat_postCAM$ID) != max(table(dat_postCAM$ID)))
[1] 2
sum(table(dat_postCAM$ID) == max(table(dat_postCAM$ID)))
[1] 226
# dat_postCAM <- dat_postCAM[dat_postCAM$ID %in% names(table(dat_postCAM$ID))[table(dat_postCAM$ID) == max(table(dat_postCAM$ID))],]
dat_postCAM <- dat_postCAM[dat_postCAM$ID %in% names(table(dat_postCAM$ID))[table(dat_postCAM$ID) >= 4],]

### post-study second
# sort(table(dat_secondPostCAM$ID))
sum(table(dat_secondPostCAM$ID) != max(table(dat_secondPostCAM$ID)))
[1] 2
sum(table(dat_secondPostCAM$ID) == max(table(dat_secondPostCAM$ID)))
[1] 226
# dat_secondPostCAM <- dat_secondPostCAM[dat_secondPostCAM$ID %in% names(table(dat_secondPostCAM$ID))[table(dat_secondPostCAM$ID) == max(table(dat_secondPostCAM$ID))],]
dat_secondPostCAM <- dat_secondPostCAM[dat_secondPostCAM$ID %in% names(table(dat_secondPostCAM$ID))[table(dat_secondPostCAM$ID) >= 11],]



########################################
# json (from JATOS) to 2D data.frame
########################################
################################ pre-study
tmp_notNumeric <- str_subset(string = colnames(dat_preCAM), pattern = "^meta|^R")
tmp_notNumeric <- str_subset(string = tmp_notNumeric, pattern = "labjs|location", negate = TRUE)

vec_ques <- c("PROLIFIC_PID",
                "choosen_Robot", 
              "dummy_informedconsent", 
              "commCheck", tmp_notNumeric)

vec_notNumeric = c("PROLIFIC_PID",
                "choosen_Robot", tmp_notNumeric)

questionnaire_preCAM <- questionnairetype(dataset = dat_preCAM, 
                                        listvars = vec_ques, 
                                        notNumeric = vec_notNumeric, verbose = FALSE)


dim(questionnaire_preCAM)
[1] 227  18
################################ post-study
vec_ques <- c("PROLIFIC_PID",
              "feedCAM_repres", "feedCAM_technicalprobs", "feedCAM_technicalprobsText",
              "feedCAM_already", "feedCAM_alreadyText")

vec_notNumeric = c("PROLIFIC_PID", 
                   "feedCAM_technicalprobsText", "feedCAM_alreadyText")

questionnaire_postCAM <- questionnairetype(dataset = dat_postCAM, 
                                        listvars = vec_ques, 
                                        notNumeric = vec_notNumeric, verbose = FALSE)

################################ post-study second
tmp_numeric <- str_subset(string = colnames(dat_secondPostCAM), pattern = "^GAToRS|^Almere|^LiWang")


vec_ques <- c("PROLIFIC_PID", 
              "ans1",
                tmp_numeric,
                "feedback_critic")

vec_notNumeric = c("PROLIFIC_PID",
                   "ans1",
                   "feedback_critic")

questionnaire_secondPostCAM <- questionnairetype(dataset = dat_secondPostCAM, 
                                        listvars = vec_ques, 
                                        notNumeric = vec_notNumeric, verbose = FALSE)


dim(questionnaire_secondPostCAM)
[1] 227  39
################################ merge all data sets
questionnaire <-  left_join(x = questionnaire_preCAM, y = questionnaire_postCAM, by='PROLIFIC_PID') %>%
                left_join(., questionnaire_secondPostCAM, by='PROLIFIC_PID') 

questionnaire$ID.x <- NULL
questionnaire$ID.y <- NULL

dim(questionnaire)
[1] 227  60
## save as .xlsx file
xlsx::write.xlsx2(x = questionnaire, file = "questionnaire.xlsx")

set up CAM data

pre

Load CAM data

setwd("outputs")
suppressMessages(read_file("CAMdata.txt") %>%
  # ... split it into lines ...
  str_split('\n') %>% first() %>%
    discard(function(x) x == '') %>%
    discard(function(x) x == '\r') %>%
  # ... filter empty rows ...
  discard(function(x) x == '')) -> dat_CAM_pre

raw_CAM_pre <- list()
for(i in 1:length(dat_CAM_pre)){
  raw_CAM_pre[[i]] <- jsonlite::fromJSON(txt = dat_CAM_pre[[i]])
}

Create CAM files, draw CAMs and compute network indicators

########################################
# create CAM single files (nodes, connectors, merged)
########################################
CAMfiles_pre <- create_CAMfiles(datCAM = raw_CAM_pre, reDeleted = TRUE)
Nodes and connectors, which were deleted by participants were removed. 
 # deleted nodes:  352 
 # deleted connectors:  96
########################################
# draw CAMs
########################################
CAMdrawn_pre <- draw_CAM(dat_merged = CAMfiles_pre[[3]],
                     dat_nodes = CAMfiles_pre[[1]],ids_CAMs = "all",
                     plot_CAM = FALSE,
                     useCoordinates = TRUE,
                     relvertexsize = 3,
                     reledgesize = 1)
processing 227 CAMs... 
[1] "== participantCAM in drawnCAM"
########################################
# draw CAMs
########################################
tmp_microIndicator <- c("Rettungsroboter", "sozialer Assistenzroboter", "Vorteile", "Nachteile")
networkIndicators_pre <- compute_indicatorsCAM(drawn_CAM = CAMdrawn_pre, 
                                           micro_degree = tmp_microIndicator, 
                                           micro_valence = tmp_microIndicator, 
                                           micro_centr_clo = tmp_microIndicator, 
                                           micro_transitivity = tmp_microIndicator, 
                                           largestClique = FALSE)


########################################
# wordlists
########################################
CAMwordlist_pre <- create_wordlist(
  dat_nodes =  CAMfiles_pre[[1]],
  dat_merged =  CAMfiles_pre[[3]],
  useSummarized = TRUE,
  order = "frequency",
  splitByValence = FALSE,
  comments = TRUE,
  raterSubsetWords = NULL,
  rater = FALSE
)
[1] "create_wordlist - use raw words"
[1] 0
[1] 3113
[1] "temporarily suffixes are added, because not all words have been summarized"
processing 227 CAMs... 
[1] "== participantCAM in drawnCAM"
DT::datatable(CAMwordlist_pre, options = list(pageLength = 5)) 

save CAMs as .json files, and as .png (igraph)

save_CAMs_as_pictures = FALSE

if(save_CAMs_as_pictures){
  setwd("outputs")

setwd("savedCAMs_pre")
setwd("png")
### remove all files if there are any
if(length(list.files()) >= 1){
  file.remove(list.files())
  cat('\n!
      all former .png files have been deleted')
}

### if no participant ID was provided replace by randomly generated CAM ID

if(all(CAMfiles_pre[[3]]$participantCAM.x == "noID")){
  CAMfiles_pre[[3]]$participantCAM.x <- CAMfiles_pre[[3]]$CAM.x
}

### save as .json files, and as .png (igraph)
ids_CAMs <- unique(CAMfiles_pre[[3]]$participantCAM.x); length(ids_CAMs)


for(i in 1:length(ids_CAMs)){
  save_graphic(filename = paste0("CAM_", i, "_t1")) #  paste0(ids_CAMs[i]))
  CAM_igraph <- CAMdrawn_pre[[c(1:length(CAMdrawn_pre))[
    names(CAMdrawn_pre) == paste0(unique(CAMfiles_pre[[3]]$participantCAM.x)[i])]]]
  plot(CAM_igraph, edge.arrow.size = .7,
       layout=layout_nicely, vertex.frame.color="black", asp = .5, margin = -0.1,
       vertex.size = 10, vertex.label.cex = .9)
  dev.off()
}

setwd("../json")
### remove all files if there are any
if(length(list.files()) >= 1){
  file.remove(list.files())
  cat('\n!
      all former .json files have been deleted')
}
for(i in 1:length(raw_CAM_pre)){
  if(!is_empty(raw_CAM_pre[[i]]$nodes)){
    if(nrow(raw_CAM_pre[[i]]$nodes) > 5){
      write(toJSON(raw_CAM_pre[[i]], encoding = "UTF-8"),
            paste0(raw_CAM_pre[[i]]$idCAM, ".json"))
    }
  }
}
}

post

Load CAM data

setwd("outputs")
suppressMessages(read_file("secondCAMdata.txt") %>%
  # ... split it into lines ...
  str_split('\n') %>% first() %>%
    discard(function(x) x == '') %>%
    discard(function(x) x == '\r') %>%
  # ... filter empty rows ...
  discard(function(x) x == '')) -> dat_CAM_post

raw_CAM_post <- list()
for(i in 1:length(dat_CAM_post)){
  raw_CAM_post[[i]] <- jsonlite::fromJSON(txt = dat_CAM_post[[i]])
}

Create CAM files, draw CAMs and compute network indicators

########################################
# create CAM single files (nodes, connectors, merged)
########################################
CAMfiles_post <- create_CAMfiles(datCAM = raw_CAM_post, reDeleted = TRUE)
Nodes and connectors, which were deleted by participants were removed. 
 # deleted nodes:  148 
 # deleted connectors:  80
########################################
# draw CAMs
########################################
CAMdrawn_post <- draw_CAM(dat_merged = CAMfiles_post[[3]],
                     dat_nodes = CAMfiles_post[[1]],ids_CAMs = "all",
                     plot_CAM = FALSE,
                     useCoordinates = TRUE,
                     relvertexsize = 3,
                     reledgesize = 1)
processing 227 CAMs... 
[1] "== participantCAM in drawnCAM"
########################################
# draw CAMs
########################################
tmp_microIndicator <- c("Rettungsroboter", "sozialer Assistenzroboter", "Vorteile", "Nachteile")
networkIndicators_post <- compute_indicatorsCAM(drawn_CAM = CAMdrawn_post, 
                                           micro_degree = tmp_microIndicator, 
                                           micro_valence = tmp_microIndicator, 
                                           micro_centr_clo = tmp_microIndicator, 
                                           micro_transitivity = tmp_microIndicator, 
                                           largestClique = FALSE)


########################################
# wordlists
########################################
CAMwordlist_post <- create_wordlist(
  dat_nodes =  CAMfiles_post[[1]],
  dat_merged =  CAMfiles_post[[3]],
  order = "frequency",
  splitByValence = FALSE,
  comments = TRUE,
  raterSubsetWords = NULL,
  rater = FALSE
)
[1] "create_wordlist - use raw words"
[1] 0
[1] 3571
[1] "temporarily suffixes are added, because not all words have been summarized"
processing 227 CAMs... 
[1] "== participantCAM in drawnCAM"
DT::datatable(CAMwordlist_post, options = list(pageLength = 5)) 

save CAMs as .json files, and as .png (igraph)

save_CAMs_as_pictures = FALSE

if(save_CAMs_as_pictures){
  setwd("outputs")

setwd("savedCAMs_post")
setwd("png")
### remove all files if there are any
if(length(list.files()) >= 1){
  file.remove(list.files())
  cat('\n!
      all former .png files have been deleted')
}

### if no participant ID was provided replace by randomly generated CAM ID

if(all(CAMfiles_post[[3]]$participantCAM.x == "noID")){
  CAMfiles_post[[3]]$participantCAM.x <- CAMfiles_post[[3]]$CAM.x
}

### save as .json files, and as .png (igraph)
ids_CAMs <- unique(CAMfiles_post[[3]]$participantCAM.x); length(ids_CAMs)


for(i in 1:length(ids_CAMs)){
  save_graphic(filename = paste0("CAM_", i, "_t2")) #  paste0(ids_CAMs[i], "_t2"))
  CAM_igraph <- CAMdrawn_post[[c(1:length(CAMdrawn_post))[
    names(CAMdrawn_post) == paste0(unique(CAMfiles_post[[3]]$participantCAM.x)[i])]]]
  plot(CAM_igraph, edge.arrow.size = .7,
       layout=layout_nicely, vertex.frame.color="black", asp = .5, margin = -0.1,
       vertex.size = 10, vertex.label.cex = .9)
  dev.off()
}

setwd("../json")
### remove all files if there are any
if(length(list.files()) >= 1){
  file.remove(list.files())
  cat('\n!
      all former .json files have been deleted')
}
for(i in 1:length(raw_CAM_post)){
  if(!is_empty(raw_CAM_post[[i]]$nodes)){
    if(nrow(raw_CAM_post[[i]]$nodes) > 5){
      write(toJSON(raw_CAM_post[[i]], encoding = "UTF-8"),
            paste0(raw_CAM_post[[i]]$idCAM, ".json"))
    }
  }
}
}

identify types of changes (delta CAM)

################
# set A, B, C, D types
################
# !!! i = 215
if (all(unique(CAMfiles_pre[[1]]$participantCAM) == unique(CAMfiles_post[[1]]$participantCAM))) {
  vec_type <- c()
  error <- 0
  verbose = FALSE
  
  ##
  list_newWords_text <- list()
  list_newWords_value <- list()
  list_ids <- list()
  h = 1
  for (i in 1:length(unique(CAMfiles_pre[[1]]$participantCAM))) {
    praeCAM <-
      CAMfiles_pre[[1]][CAMfiles_pre[[1]]$participantCAM == unique(CAMfiles_pre[[1]]$participantCAM)[i],]
    postCAM <-
      CAMfiles_post[[1]][CAMfiles_post[[1]]$participantCAM == unique(CAMfiles_post[[1]]$participantCAM)[i],]
    
    ## to test:
    # praeCAM$text %in% postCAM$text
    # postCAM$text %in% praeCAM$text
    # length(praeCAM$text)
    # length(postCAM$text)
    # praeCAM$text
    # postCAM$text
    
    ## Typ A
    if (all(postCAM$text %in% praeCAM$text) &
        length(postCAM$text) < length(praeCAM$text)) {
      vec_type[i] <- "A"
      if (verbose) {
        cat("\n i:", i, "type:", vec_type[i], "\n")
      }
      error = error + 1
    }
    
    ## Typ B
    if (all(praeCAM$text %in% postCAM$text) &
        length(postCAM$text) > length(praeCAM$text)) {
      vec_type[i] <- "B"
      if (verbose) {
        cat("\n i:", i, "type:", vec_type[i], "\n")
      }
      error = error + 1
      
      ## get words and values
      list_newWords_text[[h]] <-
        postCAM$text[!postCAM$text %in% praeCAM$text]
      list_newWords_value[[h]] <-
        postCAM$value[!postCAM$text %in% praeCAM$text]
      list_ids[[h]] <- rep(unique(CAMfiles_pre[[1]]$participantCAM)[i], times = length(list_newWords_value[[h]]))
      
      h = h + 1
    }
    
    ## Typ C
    if (all(praeCAM$text %in% postCAM$text) &
        all(postCAM$text %in% praeCAM$text)) {
      vec_type[i] <- "C"
      if (verbose) {
        cat("\n i:", i, "type:", vec_type[i], "\n")
      }
      error = error + 1
    }
    
    ## Typ D
    # smaller > pr? UE post, post UE pr?
    if (sum(praeCAM$text %in% postCAM$text) < length(praeCAM$text) &
        sum(postCAM$text %in% praeCAM$text) < length(postCAM$text)) {
      vec_type[i] <- "D"
      if (verbose) {
        cat("\n i:", i, "type:", vec_type[i], "\n")
      }
      error = error + 1
    }
    
    if (error > 1) {
      print("ERROR in (not exclusive logical condition)", i)
      stop("check your data and adjust this function")
    }
    error = 0
  }
}


table(vec_type)
vec_type
  A   B   C   D 
  2 142  38  44 
barplot(table(unlist(list_newWords_value)))

# sort(table(unlist(list_newWords_text)))


dat_newWords <- data.frame(participantCAM = unlist(list_ids), 
                           text = unlist(list_newWords_text), 
                           value = unlist(list_newWords_value))
DT::datatable(dat_newWords, options = list(pageLength = 5)) 
tmp <- dat_newWords %>% 
  group_by(participantCAM) %>%
  dplyr::summarise(mean = mean(value), n = n())


tmp2 <- data.frame(typeRobot = questionnaire$choosen_Robot[questionnaire$PROLIFIC_PID %in% dat_newWords$participantCAM], participantCAM = tmp$participantCAM, mean = tmp$mean, n = tmp$n)


DT::datatable(tmp2, options = list(pageLength = 5)) 
boxplot(tmp2$mean ~ tmp2$typeRobot)

boxplot(tmp2$n ~ tmp2$typeRobot)

analyze data

describe data set

feedback to the study

Question: Haben Sie Feedback oder Kritik an der Online-Studie?

DT::datatable(questionnaire[,c("PROLIFIC_PID", "feedback_critic")], options = list(pageLength = 5)) 

technical problems CAMEL

DT::datatable(questionnaire[,c("PROLIFIC_PID", str_subset(string = colnames(questionnaire), pattern = "^feedCAM"))], options = list(pageLength = 5)) 
hist(questionnaire$feedCAM_repres)

summary(questionnaire$feedCAM_repres)
   Min. 1st Qu.  Median    Mean 3rd Qu.    Max. 
  2.000   6.000   6.000   5.991   7.000   7.000 

anaylze data set

group comparisons

networkIndicators_pre$timepoint <- "pre"
networkIndicators_post$timepoint <- "post"

networkIndicators_post$timepoint <- factor(networkIndicators_post$timepoint, 
                                           levels = c("pre", "post"), 
                                           ordered = FALSE)

networkIndicators <- rbind(networkIndicators_pre, networkIndicators_post)

### add type robot
networkIndicators$typeRobot <- ifelse(test = !is.na(networkIndicators$valence_micro_Rettungsroboter), yes = "rescue robots", no = "socially assistive robots")
table(networkIndicators$typeRobot)

            rescue robots socially assistive robots 
                      242                       212 
### add ID
networkIndicators$ID <- c(1:(nrow(networkIndicators) / 2), 1:(nrow(networkIndicators) / 2))

########################################
# show which robot was on average perceived more positive (overall data set)
########################################
summary(networkIndicators$mean_valence_macro[!is.na(networkIndicators$valence_micro_Rettungsroboter)])
   Min. 1st Qu.  Median    Mean 3rd Qu.    Max. 
-0.6154  0.0000  0.2426  0.2929  0.5000  1.5556 
summary(networkIndicators$mean_valence_macro[!is.na(networkIndicators$valence_micro_sozialerAssistenzroboter)])
    Min.  1st Qu.   Median     Mean  3rd Qu.     Max. 
-2.28571 -0.18182  0.00000  0.05642  0.28571  1.50000 
########################################
# post - pre difference of robot -> average valence
########################################

############
# overall
############
### overall
hist(networkIndicators_post$mean_valence_macro - networkIndicators_pre$mean_valence_macro)

summary(networkIndicators_post$mean_valence_macro - networkIndicators_pre$mean_valence_macro)
     Min.   1st Qu.    Median      Mean   3rd Qu.      Max. 
-0.418269 -0.052427  0.008929  0.051497  0.153846  1.205128 
ggwithinstats(
  data = networkIndicators,
  x = timepoint,
  y = mean_valence_macro
)

# table(networkIndicators$ID, networkIndicators$typeRobot)

############
# separated by robots - mean valence
############
# fit1 <- afex::aov_car(mean_valence_macro ~ timepoint*typeRobot + Error(ID / timepoint),
#                       data = networkIndicators)
# plot(CAMdrawn_pre[[53]])
# plot(CAMdrawn_post[[53]])
# plot(CAMdrawn_pre[[70]])
# plot(CAMdrawn_post[[70]])
networkIndicators_anova <- networkIndicators[!networkIndicators$ID %in% c(53, 70),]

fit1 <- afex::aov_car(mean_valence_macro ~ timepoint*typeRobot + Error(ID / timepoint),
                      data = networkIndicators_anova)
Converting to factor: typeRobot
Contrasts set to contr.sum for the following variables: typeRobot
fit1a <- afex::aov_ez(id = "ID", dv = "mean_valence_macro",
                      data = networkIndicators_anova, between=c("typeRobot"), within=c("timepoint"))
Converting to factor: typeRobot
Contrasts set to contr.sum for the following variables: typeRobot
# partical eta squared
anova(fit1, es = "pes")
Anova Table (Type 3 tests)

Response: mean_valence_macro
                    num Df den Df     MSE       F      pes    Pr(>F)    
typeRobot                1    223 0.37219 15.7907 0.066128 9.555e-05 ***
timepoint                1    223 0.01935 16.0532 0.067153 8.395e-05 ***
typeRobot:timepoint      1    223 0.01935  1.2639 0.005636    0.2621    
---
Signif. codes:  0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
# generalized eta squared
fit1a # > identical results
Anova Table (Type 3 tests)

Response: mean_valence_macro
               Effect     df  MSE         F   ges p.value
1           typeRobot 1, 223 0.37 15.79 ***  .063   <.001
2           timepoint 1, 223 0.02 16.05 ***  .004   <.001
3 typeRobot:timepoint 1, 223 0.02      1.26 <.001    .262
---
Signif. codes:  0 '***' 0.001 '**' 0.01 '*' 0.05 '+' 0.1 ' ' 1
dfvalcor <- data_summary(networkIndicators_anova, varname="mean_valence_macro",
                         groupnames=c("timepoint","typeRobot"))
Lade nötiges Paket: plyr
Warning: Paket 'plyr' wurde unter R Version 4.3.2 erstellt
------------------------------------------------------------------------------
You have loaded plyr after dplyr - this is likely to cause problems.
If you need functions from both plyr and dplyr, please load plyr first, then dplyr:
library(plyr); library(dplyr)
------------------------------------------------------------------------------

Attache Paket: 'plyr'
Die folgenden Objekte sind maskiert von 'package:dplyr':

    arrange, count, desc, failwith, id, mutate, rename, summarise,
    summarize
Das folgende Objekt ist maskiert 'package:purrr':

    compact
p <- ggplot(dfvalcor, aes(x=timepoint, y=mean_valence_macro, fill=typeRobot)) +
  geom_bar(stat="identity", color="black",
           position=position_dodge()) +
  geom_errorbar(aes(ymin=mean_valence_macro-se, ymax=mean_valence_macro+se), width=.2,
                position=position_dodge(.9)) + ggplot_theme + ylab(label = "average emotional evaluation")
print(p)

############
# separated by robots - number of concepts
############
fit1 <- afex::aov_car(num_nodes_macro ~ timepoint*typeRobot + Error(ID / timepoint),
                      data = networkIndicators_anova)
Converting to factor: typeRobot
Contrasts set to contr.sum for the following variables: typeRobot
fit1a <- afex::aov_ez(id = "ID", dv = "num_nodes_macro",
                      data = networkIndicators_anova, between=c("typeRobot"), within=c("timepoint"))
Converting to factor: typeRobot
Contrasts set to contr.sum for the following variables: typeRobot
# partical eta squared
anova(fit1, es = "pes")
Anova Table (Type 3 tests)

Response: num_nodes_macro
                    num Df den Df     MSE        F     pes Pr(>F)    
typeRobot                1    223 21.0662   0.3014 0.00135 0.5836    
timepoint                1    223  2.5553 189.3689 0.45922 <2e-16 ***
typeRobot:timepoint      1    223  2.5553   0.3402 0.00152 0.5603    
---
Signif. codes:  0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
# generalized eta squared
fit1a # > identical results
Anova Table (Type 3 tests)

Response: num_nodes_macro
               Effect     df   MSE          F   ges p.value
1           typeRobot 1, 223 21.07       0.30  .001    .584
2           timepoint 1, 223  2.56 189.37 ***  .084   <.001
3 typeRobot:timepoint 1, 223  2.56       0.34 <.001    .560
---
Signif. codes:  0 '***' 0.001 '**' 0.01 '*' 0.05 '+' 0.1 ' ' 1
dfvalcor <- data_summary(networkIndicators_anova, varname="num_nodes_macro",
                         groupnames=c("timepoint","typeRobot"))
p <- ggplot(dfvalcor, aes(x=timepoint, y=num_nodes_macro, fill=typeRobot)) +
  geom_bar(stat="identity", color="black",
           position=position_dodge()) +
  geom_errorbar(aes(ymin=num_nodes_macro-se, ymax=num_nodes_macro+se), width=.2,
                position=position_dodge(.9)) + ggplot_theme + ylab(label = "number of drawn concepts")
print(p)

########################################
# post - pre difference of robot -> average number of concepts
########################################
ggwithinstats(
  data = networkIndicators,
  x = timepoint,
  y = num_nodes_macro
)

open text answers (adaptive question)

Question: Ihre angepasste Mind-Map hatte eine durchschnittliche emotionale Bewertung von XXX, diese war im Vergleich zu ihrer anfangs gezeichneten Mind-Map (durchschnittliche emotionale Bewertung von XXX) XXX. Bitte erklären Sie, warum Sie diese XXX wahrgenommen haben:

questionnaire_secondPostCAM$meanDifferencesCAMs <- round(x = networkIndicators_post$mean_valence_macro - networkIndicators_pre$mean_valence_macro, digits = 2)
DT::datatable(questionnaire_secondPostCAM[,c("meanDifferencesCAMs", "ans1", "feedback_critic")], options = list(pageLength = 5)) 

check questions (will be removed)

###
c("63b87d2c9edd47ad702413a1", "5edc8e2eaa0f2295a0fcef85", "6529483ef2abe408d574860a") %in% questionnaire_preCAM$PROLIFIC_PID
[1] TRUE TRUE TRUE
###




tmpID <- questionnaire_preCAM$ID[questionnaire_preCAM$PROLIFIC_PID == "63b87d2c9edd47ad702413a1"]
plot(CAMdrawn_pre[[tmpID]])

plot(CAMdrawn_post[[tmpID]])

tmpID <- questionnaire_preCAM$ID[questionnaire_preCAM$PROLIFIC_PID == "5edc8e2eaa0f2295a0fcef85"]
plot(CAMdrawn_pre[[tmpID]])

plot(CAMdrawn_post[[tmpID]])

tmpID <- questionnaire_preCAM$ID[questionnaire_preCAM$PROLIFIC_PID == "6529483ef2abe408d574860a"]
plot(CAMdrawn_pre[[tmpID]])

plot(CAMdrawn_post[[tmpID]])

c("6523fe0741e7f3a5002ea3fd", "652d6b317dbb2b3e923226f3", "6554e5a51df35f3564d4a327", "6551ecae69c45ecbc9cd6964", "655a149f3cc84dd3699ea3c3") %in% questionnaire_preCAM$PROLIFIC_PID
[1] FALSE FALSE FALSE FALSE FALSE