#working directory
setwd("C:/Users/vitto/Desktop/Ethicub 2/Study 1/Analisi dati")
#libraries
library(dplyr)
##
## Caricamento pacchetto: 'dplyr'
## I seguenti oggetti sono mascherati da 'package:stats':
##
## filter, lag
## I seguenti oggetti sono mascherati da 'package:base':
##
## intersect, setdiff, setequal, union
library(magrittr)
library(psych)
library(careless)
## Warning: il pacchetto 'careless' è stato creato con R versione 4.4.2
library(stringr)
each participant is in a different dataset, so I’m creating a list of all raw datasets.
Vector with all raw data file names:
fileNames <- list.files("./Raw_data") %>%
{extract(.,grepl(".csv",.))} %>%
{extract(.,grepl("2025-06-16|2025-06-23|2025-06-24|2025-06-25",.))}
Empty data.frame with number and names of columns equal to a random raw dataset:
allRaw <- matrix(ncol = ncol(read.csv(paste0("./Raw_data/",fileNames[1]))),
nrow = 0) %>%
data.frame
names(allRaw) <- colnames(read.csv(paste0("./Raw_data/",fileNames[1])))
Row binding all raw dataset in one. Participants are discriminated by ID prolific
for (i in 1:length(fileNames)){
a <- read.csv(paste0("./Raw_data/",fileNames[i]))
allRaw <- rbind(allRaw,
a)
}
Removing rows that don’t represent a slider response or dilemma, we can analyse them in other ways
allRaw <- allRaw[!is.na(allRaw$slider_1.response)|!is.na(allRaw$Dilemma),]
demogFiles <- list.files("./Demog_data")
demog <- matrix(ncol = ncol(read.csv(paste0("./Demog_data/",demogFiles[1]))),
nrow = 0) %>%
data.frame
names(demog) <- colnames(read.csv(paste0("./Demog_data/",demogFiles[1])))
for (i in 1:length(demogFiles)){
demog <- rbind(demog,read.csv(paste0("./Demog_data/",demogFiles[i])))
}
Now the dataset looks like this
head(allRaw)
## Intro.started Intro.stopped B_intro.numClicks B_intro.timesOn
## 1 0 15.772 1 [15.814000000000004]
## 2 NA NA NA
## 3 NA NA NA
## 4 NA NA NA
## 5 NA NA NA
## 6 NA NA NA
## B_intro.timesOff ID.Prolific date expName
## 1 [15.814000000000004] 67693c16ec12422b88ef0bf2 2025-06-16_16h11.28.020 Study1
## 2 67693c16ec12422b88ef0bf2 2025-06-16_16h11.28.020 Study1
## 3 67693c16ec12422b88ef0bf2 2025-06-16_16h11.28.020 Study1
## 4 67693c16ec12422b88ef0bf2 2025-06-16_16h11.28.020 Study1
## 5 67693c16ec12422b88ef0bf2 2025-06-16_16h11.28.020 Study1
## 6 67693c16ec12422b88ef0bf2 2025-06-16_16h11.28.020 Study1
## psychopyVersion OS frameRate R_Dilemma.started R_Dilemma.stopped
## 1 2024.2.4 Win32 60.09615 NA NA
## 2 2024.2.4 Win32 60.09615 15.802 91.166
## 3 2024.2.4 Win32 60.09615 NA NA
## 4 2024.2.4 Win32 60.09615 NA NA
## 5 2024.2.4 Win32 60.09615 NA NA
## 6 2024.2.4 Win32 60.09615 NA NA
## B_dilemma.numClicks B_dilemma.timesOn B_dilemma.timesOff mouse.x
## 1 NA
## 2 1 [91.20100000000001] [91.20100000000001] [0.07421875]
## 3 NA
## 4 NA
## 5 NA
## 6 NA
## mouse.y mouse.leftButton mouse.midButton mouse.rightButton
## 1
## 2 [-0.3971354166666667] [1] [0] [0]
## 3
## 4
## 5
## 6
## mouse.time R_giustif.started R_giustif.stopped B_just.numClicks
## 1 NA NA NA
## 2 [75.349] 91.169 101.673 1
## 3 NA NA NA
## 4 NA NA NA
## 5 NA NA NA
## 6 NA NA NA
## B_just.timesOn B_just.timesOff R_WarmthCompetence_0.started
## 1 NA
## 2 [101.703] [101.703] 101.691
## 3 109.285
## 4 114.813
## 5 116.627
## 6 118.825
## R_WarmthCompetence_0.stopped slider_1.response slider_1.rt trials.thisRepN
## 1 NA NA NA NA
## 2 109.281 3 7.581 0
## 3 114.808 4 5.515 0
## 4 116.624 4 1.805 0
## 5 118.821 3 2.186 0
## 6 122.017 2 3.185 0
## trials.thisTrialN trials.thisN trials.thisIndex trials.ran Adj
## 1 NA NA NA NA
## 2 0 0 4 1 Compassionevole
## 3 1 1 8 1 Interattivo
## 4 2 2 3 1 Biologico
## 5 3 3 2 1 Sociale
## 6 4 4 7 1 Reattivo
## constr Cycle_through_dilemmas.thisRepN Cycle_through_dilemmas.thisTrialN
## 1 NA NA
## 2 W NA NA
## 3 C NA NA
## 4 W NA NA
## 5 W NA NA
## 6 C NA NA
## Cycle_through_dilemmas.thisN Cycle_through_dilemmas.thisIndex
## 1 NA NA
## 2 NA NA
## 3 NA NA
## 4 NA NA
## 5 NA NA
## 6 NA NA
## Cycle_through_dilemmas.ran Condition Opinion Dilemma_Code Dilemma
## 1 NA
## 2 NA
## 3 NA
## 4 NA
## 5 NA
## 6 NA
## Justification Intro_domande.started bottoneSpecialissimo.numClicks
## 1 NA NA
## 2 NA NA
## 3 NA NA
## 4 NA NA
## 5 NA NA
## 6 NA NA
## bottoneSpecialissimo.timesOn bottoneSpecialissimo.timesOff
## 1
## 2
## 3
## 4
## 5
## 6
## Intro_domande.stopped Accordance_With_Robot.started
## 1 NA NA
## 2 NA NA
## 3 NA NA
## 4 NA NA
## 5 NA NA
## 6 NA NA
## Accordance_With_Robot.stopped Accordance_With_Robot_slider.response
## 1 NA NA
## 2 NA NA
## 3 NA NA
## 4 NA NA
## 5 NA NA
## 6 NA NA
## Accordance_With_Robot_slider.rt Utility.started Utility.stopped
## 1 NA NA NA
## 2 NA NA NA
## 3 NA NA NA
## 4 NA NA NA
## 5 NA NA NA
## 6 NA NA NA
## slider.response slider.rt End.started End.stopped button.numClicks
## 1 NA NA NA NA NA
## 2 NA NA NA NA NA
## 3 NA NA NA NA NA
## 4 NA NA NA NA NA
## 5 NA NA NA NA NA
## 6 NA NA NA NA NA
## button.timesOn button.timesOff
## 1
## 2
## 3
## 4
## 5
## 6
Only the following columns are selected:
allRaw <- allRaw %>%
select(
"ID.Prolific",
date,
R_Dilemma.started, #for attention check
R_Dilemma.stopped, #for attention check
slider_1.response,
Adj,
constr,
Condition,
Opinion,
Dilemma_Code,
Dilemma
)
head(allRaw)
## ID.Prolific date R_Dilemma.started
## 1 67693c16ec12422b88ef0bf2 2025-06-16_16h11.28.020 NA
## 2 67693c16ec12422b88ef0bf2 2025-06-16_16h11.28.020 15.802
## 3 67693c16ec12422b88ef0bf2 2025-06-16_16h11.28.020 NA
## 4 67693c16ec12422b88ef0bf2 2025-06-16_16h11.28.020 NA
## 5 67693c16ec12422b88ef0bf2 2025-06-16_16h11.28.020 NA
## 6 67693c16ec12422b88ef0bf2 2025-06-16_16h11.28.020 NA
## R_Dilemma.stopped slider_1.response Adj constr Condition Opinion
## 1 NA NA
## 2 91.166 3 Compassionevole W
## 3 NA 4 Interattivo C
## 4 NA 4 Biologico W
## 5 NA 3 Sociale W
## 6 NA 2 Reattivo C
## Dilemma_Code Dilemma
## 1
## 2
## 3
## 4
## 5
## 6
2.2 Prpagating single line data We need to propagate Condition, Opinion, Dilemma_Code, Dilemma upwards
condition <- NULL
opinion <- NULL
dilemma_code <- NULL
dilemma <- NULL
for (i in nrow(allRaw):1){
if (allRaw[i,]$Condition==""){
allRaw[i,]$Condition <- condition
allRaw[i,]$Opinion <- opinion
allRaw[i,]$Dilemma_Code <- dilemma_code
allRaw[i,]$Dilemma <- dilemma
}
else{
condition <- allRaw[i,]$Condition
opinion <- allRaw[i,]$Opinion
dilemma_code <- allRaw[i,]$Dilemma_Code
dilemma <- allRaw[i,]$Dilemma
}
}
Now I’ll remove all irrelevant rows containg only Condition, Opinion, Dilemma_Code, Dilemma. They are all the ones without slider response.
rowsToRemove <- allRaw$slider_1.response %>% is.na %>% which
dat <- allRaw[-rowsToRemove,]
#reversing data because of my mistake in labeling answers (this makes sense only for jun 16 participants)
dat$slider_1.response[grepl("2025-06-16",dat$date)] <- 10 - dat$slider_1.response[grepl("2025-06-16",dat$date)]
#exporting clean data
dat %>% write.csv("cleaData_ethicub2.1.csv")
Some forgot to insert their prolific ID, how many people?
cat(
dat$ID.Prolific %>% is.na %>% sum(.)/(nrow(dat)/length(fileNames)),
" participants forgot to input their IDs \n\n
Participants that did input their IDs are:",
unique(dat$ID.Prolific)%>%na.omit%>%paste(collapse = " / "))
## 3 participants forgot to input their IDs
##
##
## Participants that did input their IDs are: 67693c16ec12422b88ef0bf2 / 5f39a2fd2d394917284dd66f / 603d0e0e5b0386b034e93bf5 / 5a5dc26facc75b00017a8374 / 5e60f63ee6385a000bcbffc9 / 5db43b6a2f45e7000bb7ab5c / 5f29510421c44f014deba2f7 / 5f7d002d141e4e1c0e84e98e / 5c0004fc4210510001725547 / 5f21ff288900cb4709dd49aa / 5a77529ff49c9a0001f2fdbd / 5a3e5fb0b77a5000014a755a / 608abc6251feb3ddc3b2e01d / 5ae3003bca00550001e4a4b7 / 6141cd534c8a98741eb01942 / 5e209e63f814783b253a93ed
noID <-
demog$Participant.id[!demog$Participant.id[demog$Status!="RETURNED"] %in%
(dat$ID.Prolific %>% unique)]
cat("\n Participants that did not input their IDs are: \n \n",
noID %>%
paste(collapse = " / ")
)
##
## Participants that did not input their IDs are:
##
## 6141cd534c8a98741eb01942 / 5e9c3ff1cf719d151612648f / 5f29510421c44f014deba2f7
Removing no ID cases
dat <- dat[!is.na(dat$ID.Prolific),]
Since we could not put any attention check, we will analyze the individual standard deviation of answers
for (i in unique(dat$ID.Prolific)){
cat("Participant ",i," has a standard deviation of ")
dat$slider_1.response[dat$ID.Prolific==i] %>%
sd %>%
cat
cat(" and his answers are: ")
dat$slider_1.response[dat$ID.Prolific==i] %>%
cat
print("________________________")
}
## Participant 67693c16ec12422b88ef0bf2 has a standard deviation of 1.907885 and his answers are: 7 6 6 7 8 7 7 6 5 6 2 3 5 7 2 5 6 7 5 3 7 7 5 7 3 6 4 1 2 4 3 1 2 4 2 2 7 6 6 3 2 8 2 8 7 5 7 7 3 6 5 5 6 4 2 3 2 2 2 3 6 7 2 2 6 6 2 6 5 3 4 3 6 6 2 6 5 7 3 7 6 6 2 6 7 7 7 6 2 4 5 5 6 6 3 7 7 4 4 2 3 5 2 3 5 5 6 6 6 6 2 3 6 5 6 4 6 2 6 5 7 7 2 7 7 2 4 6 5 4 3 3 6 6 3 5 6 7 2 6 2 3 3 4 6 2 3 6 6 6 7 7 6 5 6 5 5 3 2 2 2 4 4 5 4 2 2 3 5 7 2 2 7 2 2 3 8 2 8 6 6 6 6 6 3 2 5 3 5 2 2 5[1] "________________________"
## Participant 5f39a2fd2d394917284dd66f has a standard deviation of 1.831044 and his answers are: 8 5 2 2 1 5 5 5 4 2 5 5 2 3 3 3 1 1 4 4 6 5 3 3 3 3 3 2 4 2 2 2 2 2 2 4 2 5 5 4 3 6 2 2 2 2 5 6 3 6 4 7 2 7 7 4 5 2 5 4 2 2 2 6 5 5 2 2 6 6 4 4 5 5 5 3 3 2 2 2 6 5 6 6 2 2 2 2 2 2 2 2 2 2 2 5 2 2 7 2 2 2 2 2 2 1 1 1 2 5 4 5 3 3 3 2 2 2 6 3 6 6 3 6 6 3 3 5 3 3 7 7 2 2 5 2 2 5 7 4 5 7 5 5 2 2 2 5 5 3 5 5 5 3 5 5 7 7 7 5 7 7 7 5 6 6 8 8 2 2 2 2 2 2 2 2 6 2 2 2 3 3 1 1 3 5 5 5 5 3 5 5[1] "________________________"
## Participant 603d0e0e5b0386b034e93bf5 has a standard deviation of 2.111941 and his answers are: 2 4 2 4 3 4 5 4 3 4 1 7 2 6 1 2 4 5 3 2 2 5 6 6 4 7 5 7 4 8 4 5 7 7 5 1 3 8 5 3 6 8 7 7 5 2 5 8 3 6 7 7 7 1 6 7 8 7 7 7 4 4 4 5 6 6 6 1 5 5 1 7 5 6 8 7 1 6 6 6 7 7 7 1 3 5 6 6 7 6 1 5 4 4 5 6 4 6 7 7 7 7 5 2 4 8 1 4 4 1 6 2 3 7 4 7 7 5 7 7 1 6 1 4 6 7 4 7 7 7 3 6 7 2 2 2 6 2 5 6 3 1 7 7 6 6 6 5 1 4 4 6 6 7 7 7 3 3 3 6 3 7 4 8 1 2 7 7 7 1 6 6 6 1 7 7 6 7 7 7 7 6 6 6 7 7 7 7 7 1 6 1[1] "________________________"
## Participant 5a5dc26facc75b00017a8374 has a standard deviation of 2.060276 and his answers are: 4 1 7 7 8 8 8 9 9 8 7 7 6 7 8 9 8 2 7 8 7 8 7 9 2 8 6 8 7 7 5 3 8 6 8 5 6 5 8 7 7 5 2 8 9 3 6 7 9 8 9 7 8 6 7 8 9 9 3 8 8 9 9 8 9 9 8 8 9 4 9 7 4 6 7 4 6 4 7 2 3 6 4 6 6 9 6 6 6 6 8 9 5 5 8 3 8 8 7 8 9 7 4 6 6 6 7 7 7 9 7 5 6 9 8 8 9 9 8 8 9 8 9 8 9 8 9 9 8 8 4 6 4 4 6 6 7 6 4 4 4 6 2 5 7 3 7 2 5 3 3 6 7 6 4 7 7 6 6 7 3 3 5 7 8 3 5 3 8 8 5 8 6 2 5 6 6 7 3 3 8 8 8 3 3 8 5 3 2 5 4 8[1] "________________________"
## Participant 5e60f63ee6385a000bcbffc9 has a standard deviation of 2.322964 and his answers are: 7 9 2 6 7 5 7 9 7 7 8 6 6 8 6 3 7 9 5 9 9 9 8 7 4 9 9 7 9 8 7 9 8 9 7 8 7 7 8 7 7 5 7 8 6 7 8 7 8 5 8 8 7 7 7 7 7 6 6 6 6 8 8 5 3 7 8 5 9 5 4 9 5 3 3 8 3 8 8 2 5 9 9 9 7 8 3 7 8 7 7 9 6 9 9 8 3 3 4 9 9 9 2 1 2 9 9 9 8 9 9 3 3 8 1 1 3 9 9 1 8 8 9 9 8 8 9 8 8 8 7 5 8 6 1 8 8 2 2 7 5 5 8 9 8 9 7 6 7 9 5 9 9 9 6 9 7 6 8 7 5 3 3 9 6 3 8 7 9 9 5 6 8 7 9 8 8 6 9 6 9 3 3 1 9 1 9 9 9 8 1 3[1] "________________________"
## Participant 5db43b6a2f45e7000bb7ab5c has a standard deviation of 1.399317 and his answers are: 1 2 2 1 1 1 1 5 2 4 3 1 1 2 1 1 1 1 1 1 1 3 1 1 1 2 1 1 4 1 1 1 1 1 1 1 4 1 1 1 1 1 2 1 1 4 1 1 1 4 1 1 3 1 1 1 1 3 1 1 2 4 1 1 1 3 1 2 1 1 2 3 1 4 4 1 1 4 1 1 1 1 3 2 6 1 1 1 1 1 3 2 1 5 3 1 1 1 1 2 1 5 1 1 3 1 1 3 2 1 1 1 1 3 4 1 4 1 1 1 1 1 2 1 4 1 1 1 3 1 1 1 1 5 1 1 1 1 2 1 4 5 2 4 2 1 3 3 1 1 1 1 3 1 3 2 1 5 1 1 1 2 1 1 1 1 2 1 1 1 3 1 1 4 1 1 1 1 3 3 1 7 7 1 5 5 1 1 1 8 1 1[1] "________________________"
## Participant 5f29510421c44f014deba2f7 has a standard deviation of 0.8406528 and his answers are: 6 5 6 6 4 5 6 6 6 6 6 5 6 6 4 5 5 6 5 4 5 4 4 6 7 5 5 6 6 5 6 5 4 5 5 4 5 5 5 5 4 5 5 6 4 5 4 5 4 6 4 5 3 3 4 4 5 4 4 5 6 5 4 5 5 5 4 5 4 5 4 5 5 5 6 6 5 5 6 4 5 6 5 4 6 5 6 5 3 5 5 5 6 5 6 5 7 4 7 6 6 6 6 7 6 5 6 6 7 4 5 6 6 6 6 5 5 5 4 6 5 5 5 6 6 6 5 6 5 5 5 4 4 6 6 6 6 6 5 5 5 6 6 5 4 6 6 6 6 5 5 4 4 5 5 6 5 7 5 6 5 6 6 6 6 6 6 5 7 7 7 6 6 5 6 6 6 6 5 6 4 5 5 5 5 5 5 5 5 6 5 5[1] "________________________"
## Participant 5f7d002d141e4e1c0e84e98e has a standard deviation of 2.887054 and his answers are: 6 1 5 2 4 3 9 1 1 9 9 9 6 6 8 4 7 9 7 9 6 9 6 5 9 9 7 9 7 7 9 6 6 1 8 8 5 7 7 9 7 7 5 7 1 8 6 6 9 6 9 5 6 7 6 6 6 1 5 9 7 3 9 3 1 5 9 9 5 7 4 5 8 2 9 8 1 2 7 2 2 5 8 2 5 6 7 7 5 6 9 9 8 9 8 1 9 9 6 6 7 9 9 6 6 7 7 1 5 5 5 5 4 5 4 3 5 3 1 3 9 2 9 9 1 1 8 8 9 2 1 1 1 1 2 1 1 9 5 9 9 9 2 9 1 1 1 7 1 8 1 8 8 7 1 8 2 8 1 1 7 9 9 9 9 1 2 1 2 2 7 7 7 1 8 6 1 1 1 9 5 5 6 5 5 7 7 7 6 1 3 8[1] "________________________"
## Participant 5c0004fc4210510001725547 has a standard deviation of 1.515544 and his answers are: 5 4 7 7 7 7 6 7 7 3 8 5 3 5 2 5 7 5 5 7 3 5 5 7 5 5 7 7 7 5 3 5 4 3 2 8 2 3 3 5 4 7 5 5 4 5 4 5 3 5 7 5 5 4 5 4 5 3 4 7 5 3 5 3 7 7 5 4 4 5 4 5 4 5 2 2 7 5 5 7 3 5 5 2 7 5 4 7 5 3 5 5 3 5 6 6 5 2 4 7 4 7 5 8 3 5 8 2 5 7 5 7 3 5 5 6 5 5 6 7 7 5 3 5 8 8 7 6 5 5 5 4 8 3 8 5 4 3 5 6 5 7 8 5 6 5 6 6 5 7 7 5 5 5 6 7 7 4 5 5 4 5 6 7 5 4 5 5 5 3 5 6 4 5 4 5 6 6 6 6 5 6 5 7 2 2 4 5 5 7 3 4[1] "________________________"
## Participant 5f21ff288900cb4709dd49aa has a standard deviation of 1.003797 and his answers are: 8 9 8 7 8 7 8 7 9 7 8 7 7 7 7 8 9 8 7 8 9 7 9 8 7 6 7 8 9 8 9 8 6 6 7 8 9 8 8 7 7 8 9 8 9 7 8 7 8 9 8 9 8 7 8 7 8 7 8 8 9 9 7 9 8 9 6 9 6 6 7 9 7 6 9 8 8 9 8 8 6 7 9 8 8 9 6 8 9 9 8 9 6 6 6 9 6 9 8 9 6 8 9 6 9 8 6 9 8 6 9 8 9 6 7 8 6 6 7 8 8 8 9 8 7 9 9 8 8 7 8 8 9 6 8 9 7 6 8 9 6 8 7 8 9 8 8 8 8 9 8 8 8 7 8 7 7 8 9 7 7 8 9 7 8 9 8 7 7 8 6 6 7 9 9 9 8 9 6 6 9 9 9 8 9 8 7 7 8 7 8 7[1] "________________________"
## Participant 5a77529ff49c9a0001f2fdbd has a standard deviation of 2.210008 and his answers are: 8 3 9 9 9 9 4 6 4 7 1 8 6 9 7 6 8 9 5 8 7 4 7 7 9 8 7 7 8 8 6 7 7 9 3 9 7 7 8 6 5 9 8 9 8 7 8 9 9 9 6 5 5 7 6 7 7 8 8 7 2 6 5 7 7 6 5 6 7 7 7 7 7 9 7 5 7 7 8 7 7 9 5 8 8 7 2 1 5 9 9 1 4 7 8 3 7 4 8 4 8 2 4 6 7 7 3 9 9 5 3 6 3 8 8 3 4 3 6 7 3 2 7 6 7 1 9 9 8 4 5 1 7 8 7 2 5 8 7 4 6 7 5 8 9 8 5 7 7 5 4 6 7 8 7 2 9 9 9 9 9 6 7 8 5 5 5 6 5 3 3 1 1 6 9 1 9 1 9 9 6 5 7 8 9 5 6 7 4 6 7 7[1] "________________________"
## Participant 5a3e5fb0b77a5000014a755a has a standard deviation of 2.444492 and his answers are: 3 5 9 8 9 9 9 7 9 8 9 9 1 3 1 7 2 1 1 3 3 2 1 1 2 4 3 1 4 5 3 1 1 6 1 1 1 5 1 1 5 4 5 5 5 6 4 5 6 6 6 7 7 7 7 7 5 4 8 8 6 9 8 5 2 7 9 9 8 5 8 7 9 9 7 8 9 7 7 9 8 9 9 9 7 6 8 7 8 9 9 9 9 9 5 8 3 2 2 6 6 6 4 3 3 2 2 2 3 2 2 2 2 2 2 5 4 4 3 2 6 6 7 7 8 8 8 8 7 6 6 6 6 4 5 5 5 5 5 4 4 5 5 6 2 2 2 2 4 3 2 2 2 5 5 2 7 7 7 7 6 6 7 5 5 6 7 6 6 4 7 5 5 5 4 4 5 6 5 4 2 3 3 4 3 6 6 6 6 6 4 5[1] "________________________"
## Participant 608abc6251feb3ddc3b2e01d has a standard deviation of 1.053217 and his answers are: 3 7 4 5 7 5 4 4 7 5 7 7 7 7 7 7 7 7 7 4 6 5 7 5 4 5 6 5 6 4 5 6 6 7 5 6 6 5 6 5 6 6 4 4 5 6 7 6 4 4 5 4 4 6 6 6 6 6 4 5 4 4 4 6 6 6 5 4 4 4 6 6 6 6 6 6 6 6 6 4 6 6 6 6 6 6 4 6 6 6 4 4 5 6 5 6 4 4 6 7 5 6 4 6 6 6 6 6 4 4 5 6 7 6 7 5 6 4 7 4 6 6 6 4 4 4 4 6 6 6 4 6 4 6 4 6 6 4 8 7 4 4 4 6 6 6 6 5 7 7 5 6 7 5 6 4 4 6 6 5 5 6 5 7 7 4 7 7 4 6 6 6 4 6 5 5 6 4 6 6 6 6 5 4 4 6 6 6 6 6 6 6[1] "________________________"
## Participant 5ae3003bca00550001e4a4b7 has a standard deviation of 1.600875 and his answers are: 7 7 5 7 7 5 7 7 8 8 6 8 5 5 8 5 8 5 8 8 7 5 6 7 8 8 8 8 7 8 8 6 5 4 7 4 7 8 8 8 5 4 4 5 8 5 6 5 4 4 7 4 5 3 7 7 5 7 7 5 8 5 8 8 5 8 8 5 4 5 4 5 8 5 5 3 3 8 3 8 7 5 3 7 4 8 4 5 5 5 8 3 8 4 4 8 8 6 5 7 7 7 5 5 7 7 5 5 3 3 6 7 5 4 7 3 3 8 8 8 8 8 6 8 3 8 5 8 8 5 5 5 5 5 6 8 8 5 8 8 3 5 6 8 6 6 7 4 5 5 7 5 5 6 6 5 4 4 3 6 6 6 5 3 5 3 7 5 7 5 6 5 5 4 7 7 7 5 7 7 4 6 5 8 6 5 7 5 5 6 3 6[1] "________________________"
## Participant 6141cd534c8a98741eb01942 has a standard deviation of 2.477032 and his answers are: 6 7 6 3 5 6 5 6 1 1 1 1 6 5 7 6 5 5 2 6 1 1 1 1 1 6 6 7 7 1 7 1 1 6 1 1 1 7 1 1 7 7 8 1 6 1 7 1 2 7 7 1 7 2 7 7 1 1 1 8 6 1 1 1 6 5 1 6 1 1 6 6 1 6 1 6 5 5 6 1 1 1 6 1 1 6 1 1 7 1 4 4 4 1 4 1 7 1 6 6 1 1 1 2 6 6 5 1 7 1 5 5 1 1 4 1 4 1 1 1 3 1 6 1 2 1 6 3 1 1 3 1 1 6 3 3 1 1 1 4 1 1 3 6 7 5 1 1 1 1 1 1 1 5 3 6 1 5 6 1 6 1 6 1 1 4 1 4 1 7 8 8 2 7 7 1 6 1 1 1 6 4 1 1 1 1 4 1 5 1 5 5[1] "________________________"
## Participant 5e209e63f814783b253a93ed has a standard deviation of 1.894438 and his answers are: 8 3 5 4 1 1 8 7 2 7 3 5 6 5 6 4 6 1 3 4 6 7 2 3 2 7 3 6 8 3 1 3 7 8 8 5 7 4 7 5 4 7 5 5 1 3 3 6 4 7 8 5 8 2 1 6 4 4 8 8 2 2 7 5 6 3 4 6 1 5 1 4 4 4 3 4 6 5 1 6 4 6 3 5 9 5 5 4 6 8 5 7 3 4 3 1 4 5 6 4 5 4 6 3 4 3 5 5 4 6 3 6 6 6 6 2 2 1 4 6 5 3 6 1 6 6 7 5 6 4 6 5 5 1 5 6 6 4 4 3 5 4 3 3 3 3 1 3 6 5 4 6 6 6 7 3 6 4 3 1 6 5 4 4 6 3 3 6 7 4 3 3 2 6 1 5 5 4 4 2 3 5 3 4 3 6 4 1 4 3 4 2[1] "________________________"
We have access to the reagin speed, we need to calculate the reading speed when it was the first time they were presented with the stimulus
timeData <-
#creating a dataframe with only id, time and dilemma because a lot of rows are useless
cbind.data.frame(
ID = dat$ID.Prolific,
start = dat$R_Dilemma.started,
finish = dat$R_Dilemma.stopped,
dilemma = dat$Dilemma_Code) %>%
na.omit %>%
#Selecting only those rows in which they were first presented with dilemma
group_by(ID,dilemma) %>%
slice_min(start,n=1) %>%
ungroup
Adding read delay column
timeData %>%
mutate(
readDelay = finish-start
) -> timeData
which is the minimal human time to read the average dilemma?
minTime <- str_count(dat$Dilemma,boundary("word")) %>%
mean %>%
#La letteratura suggerisce 3 parole al secondo, prendiamone 5 per essere larghissimi
{./5}
avgTime <- str_count(dat$Dilemma,boundary("word")) %>%
mean %>%
#La letteratura suggerisce 3 parole al secondo
{./3}
Plotting individual reading times
Red line is super speedy readers velocity, green line is what is accepted in the literature
for (i in unique(timeData$ID)){
# dataframe with time and dilemma for each participant
barplot(timeData$readDelay[timeData$ID==i],
names.arg = timeData$dilemma[timeData$ID==i],
ylim = c(0,100),
main=i)
abline(h=minTime, col="red")
abline(h=avgTime, col="green")
}
blackList <- c(
"5f29510421c44f014deba2f7",
"5db43b6a2f45e7000bb7ab5c")
for (i in blackList){
dat <- dat[dat$ID.Prolific!=i,]
}
dat$ID.Prolific %>% unique
## [1] "67693c16ec12422b88ef0bf2" "5f39a2fd2d394917284dd66f"
## [3] "603d0e0e5b0386b034e93bf5" "5a5dc26facc75b00017a8374"
## [5] "5e60f63ee6385a000bcbffc9" "5f7d002d141e4e1c0e84e98e"
## [7] "5c0004fc4210510001725547" "5f21ff288900cb4709dd49aa"
## [9] "5a77529ff49c9a0001f2fdbd" "5a3e5fb0b77a5000014a755a"
## [11] "608abc6251feb3ddc3b2e01d" "5ae3003bca00550001e4a4b7"
## [13] "6141cd534c8a98741eb01942" "5e209e63f814783b253a93ed"
#Plotting confidence intervals
error.bars(cbind(
"C-C"=dat$slider_1.response[dat$constr=="C" & dat$Condition=="C"],
"C-W"=dat$slider_1.response[dat$constr=="C" & dat$Condition=="W"],
"W-W"=dat$slider_1.response[dat$constr=="W" & dat$Condition=="W"],
"W-C"=dat$slider_1.response[dat$constr=="W" & dat$Condition=="C"]),
eyes=F,
main = "Questionnaire scores. construct-condition")
par(xpd = FALSE)
abline(v=2.5)
#t.test, one tailed, pairing for ID, dilemma and robot's opinion
temp_data <- dat %>%
slice(which(constr=="C")) %>%
arrange(ID.Prolific,Dilemma_Code,Opinion)
t <- t.test(temp_data$slider_1.response[temp_data$Condition=="W"],
temp_data$slider_1.response[temp_data$Condition=="C"],
alternative = "less",
paired=T
)
print(t)
##
## Paired t-test
##
## data: temp_data$slider_1.response[temp_data$Condition == "W"] and temp_data$slider_1.response[temp_data$Condition == "C"]
## t = -5.6057, df = 671, p-value = 1.516e-08
## alternative hypothesis: true mean difference is less than 0
## 95 percent confidence interval:
## -Inf -0.2826776
## sample estimates:
## mean difference
## -0.4002976
t2d(t$statistic,n=nrow(temp_data))
## t
## -0.3058155
temp_data <- dat %>%
slice(which(constr=="W")) %>%
arrange(ID.Prolific,Dilemma_Code,Opinion)
t <- t.test(temp_data$slider_1.response[temp_data$Condition=="W"],
temp_data$slider_1.response[temp_data$Condition=="C"],
alternative = "greater",
paired=T
)
print(t)
##
## Paired t-test
##
## data: temp_data$slider_1.response[temp_data$Condition == "W"] and temp_data$slider_1.response[temp_data$Condition == "C"]
## t = 8.5263, df = 671, p-value < 2.2e-16
## alternative hypothesis: true mean difference is greater than 0
## 95 percent confidence interval:
## 0.6843549 Inf
## sample estimates:
## mean difference
## 0.8482143
t2d(t$statistic,n=nrow(temp_data))
## t
## 0.4651487
We can conclude that the warm and competent styles are perceived as different and this difference is successfully measured by the scale. ## Between Design
set.seed(1)
IDs <- dat$ID.Prolific %>% unique
numberParticipants <- IDs %>% length
warmParticipants <- dat$ID.Prolific %>%
unique %>%
sample(numberParticipants/2)
compParticipants <- dat$ID.Prolific %>%
unique %>%
{IDs[!.%in%warmParticipants]}
warmParticipants
## [1] "5a77529ff49c9a0001f2fdbd" "5a5dc26facc75b00017a8374"
## [3] "5c0004fc4210510001725547" "67693c16ec12422b88ef0bf2"
## [5] "5f39a2fd2d394917284dd66f" "5ae3003bca00550001e4a4b7"
## [7] "603d0e0e5b0386b034e93bf5"
compParticipants
## [1] "5e60f63ee6385a000bcbffc9" "5f7d002d141e4e1c0e84e98e"
## [3] "5f21ff288900cb4709dd49aa" "5a3e5fb0b77a5000014a755a"
## [5] "608abc6251feb3ddc3b2e01d" "6141cd534c8a98741eb01942"
## [7] "5e209e63f814783b253a93ed"
cbind("W"=dat$slider_1.response[dat$ID.Prolific==warmParticipants & dat$Condition=="W" & dat$constr=="W"],
"C"=dat$slider_1.response[dat$ID.Prolific==compParticipants & dat$Condition=="C" & dat$constr=="W"]) %>%
error.bars(eyes=F, main="Warmth", xlab="Condition", ylab="Questionnaire Score")
## Warning in cbind(W = dat$slider_1.response[dat$ID.Prolific == warmParticipants
## & : number of rows of result is not a multiple of vector length (arg 2)
cbind("W"=dat$slider_1.response[dat$ID.Prolific==warmParticipants & dat$Condition=="W" & dat$constr=="C"],
"C"=dat$slider_1.response[dat$ID.Prolific==compParticipants & dat$Condition=="C" & dat$constr=="C"]) %>%
error.bars(eyes=F,main="Competence", xlab="Condition", ylab="Questionnaire Score")
## Warning in cbind(W = dat$slider_1.response[dat$ID.Prolific == warmParticipants
## & : number of rows of result is not a multiple of vector length (arg 1)
Between participant we do not observe a statistically signigficant difference