###FITTING DISTRIBUTION OF SENSITIVITY (D-PRIME) DATA
rm(list = ls())
library(MASS)
library(actuar)
##
## Attaching package: 'actuar'
## The following object is masked from 'package:grDevices':
##
## cm
library(fitdistrplus)
## Loading required package: survival
data_d<- read.delim("~/Dropbox/Alla-project/Paper-biases/Data-Analysis/Perceptual_matching/d-prime.txt")
my_data <- data_d$Sensitivity
print(my_data)
## [1] 1.617846 2.199920 2.463661 2.403506 2.525875 1.983975 2.252651 3.059552
## [9] 2.264424 3.470939 3.681091 3.503001 3.148150 2.635407 2.359741 3.499326
## [17] 3.463365 4.141430 4.885677 4.274446 3.105771 2.737890 2.778319 3.311161
## [25] 3.681091 3.302946 2.330829 3.933658 2.784145 5.276515 2.784282 3.322602
## [33] 4.885677 2.796496 2.828036 3.272504 1.446170 1.837026 3.075050 2.697618
## [41] 2.549133 2.216705 2.562440 2.727774 2.968471 2.353642 3.220047 3.148737
## [49] 3.147480 3.292400 2.300992 1.781014 3.370770 3.382560 2.671208 3.253881
## [57] 3.207158 2.594942 3.145141 3.388536 1.880776 2.083861 2.394033 2.875816
## [65] 3.164569 2.489211 2.165527 2.160334 1.919576 2.894646 2.486148 3.677452
## [73] 2.698405 2.285093 2.273094 3.000956 3.463092 4.284888 2.830486 3.564732
## [81] 2.894994 3.343499 3.092874 2.740180 2.490574 3.246236 2.619035 3.071379
## [89] 2.709835 3.557011 2.949270 2.740155 3.793647 2.826044 2.531403 3.075102
## [97] 2.371603 2.114574 2.570751 2.925569 2.624914 2.566906 2.420628 2.284502
## [105] 1.891519 2.438751 2.822283 3.304659 3.100244 2.920151 2.688602 2.091876
## [113] 2.779947 2.722402 2.420628 2.886101 2.736885 3.332747 2.594051 2.981151
## [121] 1.662597 2.199920 2.522106 2.115727 3.079429 1.862971 2.016209 1.896546
## [129] 1.444945 2.127285 3.455430 3.322602 3.474889 2.853277 2.574531 4.284888
## [137] 3.778984 4.486009 3.435158 3.431519 2.620669 2.947894 2.944820 3.161681
## [145] 4.284888 3.843898 3.650097 4.482805 3.358418 3.627892 3.071215 2.552846
## [153] 3.388536 2.159529 2.458340 3.158554 3.160618 1.940120 2.884762 3.062832
## [161] 2.912225 2.569922 2.623811 2.190094 2.251822 3.000956 2.514072 3.824549
## [169] 3.051495 2.834820 1.882303 2.194539 1.919178 3.000956 2.732550 3.430944
## [177] 2.953483 2.519004 2.671208 3.367416 2.026698 2.359741 2.128035 2.619035
## [185] 2.326386 1.323496 2.315023 2.311236 1.954739 2.247875 3.824549 3.824549
## [193] 3.568536 2.920151 2.120910 3.723505 4.130913 4.284888 3.677452 3.509894
## [201] 2.717185 2.992675 3.834622 4.184603 3.568536 3.501106 2.394570 3.062666
## [209] 3.220791 2.944537 2.857502 2.948557 3.928790 3.583806 3.128454 2.904972
## [217] 3.587445 2.290905 3.198849 3.005804 2.549957 2.671208 1.968220 2.418493
## [225] 2.550694 1.922223 2.939823 4.537894 3.681091 3.199417 1.812074 2.044186
## [233] 2.889096 3.215546 3.145141 2.981130 2.633369 2.317410 2.194937 2.660066
## [241] 2.615999 2.623811 2.953559 2.966128 2.771461 2.736885 2.078579 2.010632
## [249] 2.531999 2.252651 3.008041 2.301305 2.377256 3.016429 3.072637 3.575473
## [257] 4.044460 3.927287 4.885677 4.137791 3.072455 3.377292 2.669543 3.808374
## [265] 3.681091 3.634266 3.009565 3.463145 3.020257 4.137791 2.448506 4.001943
## [273] 4.414880 3.506640 3.094602 2.770326 4.885677 1.698441 2.421747 3.087690
## [281] 2.465918 2.498560 2.774892 2.384897 2.493793 2.949033 2.866435 3.370770
## [289] 3.207331 2.120910 2.005690 2.100033 2.823156 3.564732 2.894994 3.003811
## [297] 3.164569 2.595487 2.884646 3.259238 2.433975 2.053257 2.848669 2.574531
## [305] 3.083763 1.796858 2.078480 2.311236 2.130669 2.234554 2.770122 4.284119
## [313] 2.692167 3.357633 2.844455 4.070121 2.595473 3.931278 2.953559 3.782941
## [321] 2.947894 3.083763 3.114525 3.688907 3.455430 4.003035 3.398189 3.567762
## [329] 2.949270 3.296487 2.563826 3.164569 3.020879 2.277974 2.476381 2.540781
## [337] 3.578615 1.979330 2.516410 2.554279 2.037864 2.560555 1.699767 2.677530
## [345] 2.462293 2.407138 2.595443 3.254411 3.003733 2.083861 1.882920 1.785837
## [353] 3.047205 2.962475 2.289899 2.334441 2.312634 2.339568 2.566270 2.813808
plot_d<-plotdist(my_data, histo = TRUE, demp = TRUE)
plot_d
## NULL
descdist_d<-descdist(my_data, boot = 1000)
descdist_d
## summary statistics
## ------
## min: 1.323496 max: 5.276515
## median: 2.85539
## mean: 2.892866
## estimated sd: 0.6676109
## estimated skewness: 0.5515886
## estimated kurtosis: 3.425584
#Before fitting d-prime data, we plotted empirical distribution of the data and computed descriptive statistics (median: 2.85539; mean: 2.892; estimated sd: 0.667; estimated skewness: 0.551; estimated kurtosis: 3.425). The non-zero skewness reveals a slight lack of symmetry of the empirical distribution. Because of the kurtosis is not far from 3, the fit of three common right-skewed distributions could be considered, Weibull, gamma and lognormal distributions.
fit_n <- fitdist(my_data, "weibull")
fit_g <- fitdist(my_data, "gamma")
fit_ln <- fitdist(my_data, "lnorm")
summary(fit_n)
## Fitting of the distribution ' weibull ' by maximum likelihood
## Parameters :
## estimate Std. Error
## shape 4.488962 0.17102547
## scale 3.158547 0.03931324
## Loglikelihood: -377.0102 AIC: 758.0203 BIC: 765.7926
## Correlation matrix:
## shape scale
## shape 1.0000000 0.3319669
## scale 0.3319669 1.0000000
summary(fit_g)
## Fitting of the distribution ' gamma ' by maximum likelihood
## Parameters :
## estimate Std. Error
## shape 19.086467 1.4103648
## rate 6.597789 0.4939879
## Loglikelihood: -356.0185 AIC: 716.0371 BIC: 723.8093
## Correlation matrix:
## shape rate
## shape 1.0000000 0.9869337
## rate 0.9869337 1.0000000
summary(fit_ln)
## Fitting of the distribution ' lnorm ' by maximum likelihood
## Parameters :
## estimate Std. Error
## meanlog 1.0358260 0.01218039
## sdlog 0.2311066 0.00861211
## Loglikelihood: -356.3599 AIC: 716.7197 BIC: 724.4919
## Correlation matrix:
## meanlog sdlog
## meanlog 1 0
## sdlog 0 1
gofstat(list(fit_n, fit_g, fit_ln), fitnames = c("weibull", "gamma", "lognormal"))
## Goodness-of-fit statistics
## weibull gamma lognormal
## Kolmogorov-Smirnov statistic 0.07584538 0.02887265 0.03229409
## Cramer-von Mises statistic 0.48520448 0.02448589 0.03858867
## Anderson-Darling statistic 3.41369088 0.17084687 0.21177726
##
## Goodness-of-fit criteria
## weibull gamma lognormal
## Akaike's Information Criterion 758.0203 716.0371 716.7197
## Bayesian Information Criterion 765.7926 723.8093 724.4919
par(mfrow=c(2,2))
plot.legend <- c("weibull", "gamma", "lognormal")
#We we then compared the fit of a Weibull, a lognormal and a gamma distributions to the d-prime data set. The Q-Q plot shows the lack-of-fit at the distributions tails while the P-P plot shows that the three fitted distributions correctly describes the center of the distribution. The density plot and histogram indicate that the gamma distribution could be preferred for its better capturing the data.
denscomp(list(fit_n, fit_g, fit_ln), legendtext = plot.legend)
cdfcomp (list(fit_n, fit_g, fit_ln), legendtext = plot.legend)
qqcomp (list(fit_n, fit_g, fit_ln), legendtext = plot.legend)
ppcomp (list(fit_n, fit_g, fit_ln), legendtext = plot.legend)
ests <- bootdist(fit_g, niter = 1e3) #estimates
summary(ests)
## Parametric bootstrap medians and 95% percentile CI
## Median 2.5% 97.5%
## shape 19.146081 16.787348 22.243969
## rate 6.630558 5.811013 7.685689
#AIC and BIC values respectively give the preference to the gamma distribution.
plot(ests)
#########
rm(list = ls())
library(lmerTest)
## Loading required package: lme4
## Loading required package: Matrix
##
## Attaching package: 'lmerTest'
## The following object is masked from 'package:lme4':
##
## lmer
## The following object is masked from 'package:stats':
##
## step
library(lme4)
library(ggplot2)
library(sjmisc)
library(sjPlot)
data_d<- read.delim("~/Dropbox/Alla-project/Paper-biases/Data-Analysis/Perceptual_matching/d-prime.txt")
#Arranging data
Shape_Category<-as.factor(data_d$Shape_Category)
Shape_Category=factor(Shape_Category,levels(Shape_Category)[c(4,6,2,3,1,5)])
Experiment=as.factor(data_d$Experiment)
Experiment=factor(Experiment,levels(Experiment)[c(2,3,1)])
Saliency<-as.factor(data_d$Saliency)
Sensitivity<-as.numeric(data_d$Sensitivity)
Subject<-as.factor(data_d$Subject)
d_prime=data.frame(Shape_Category,Sensitivity,Subject,Experiment, Saliency)
summary(d_prime)
## Shape_Category Sensitivity Subject Experiment Saliency
## Me :60 Min. :1.323 1 : 6 Personal:120 High:180
## Stranger :60 1st Qu.:2.421 2 : 6 Reward :120 Low :180
## High_reward:60 Median :2.855 3 : 6 Emotion :120
## Low_reward :60 Mean :2.893 4 : 6
## Happy :60 3rd Qu.:3.298 5 : 6
## Neutral :60 Max. :5.277 6 : 6
## (Other):324
print(d_prime)
## Shape_Category Sensitivity Subject Experiment Saliency
## 1 Me 1.617846 1 Personal High
## 2 Me 2.199920 2 Personal High
## 3 Me 2.463661 3 Personal High
## 4 Me 2.403506 4 Personal High
## 5 Me 2.525875 5 Personal High
## 6 Me 1.983975 6 Personal High
## 7 Me 2.252651 7 Personal High
## 8 Me 3.059552 8 Personal High
## 9 Me 2.264424 9 Personal High
## 10 Me 3.470939 10 Personal High
## 11 Me 3.681091 11 Personal High
## 12 Me 3.503001 12 Personal High
## 13 Me 3.148150 13 Personal High
## 14 Me 2.635407 14 Personal High
## 15 Me 2.359741 15 Personal High
## 16 Me 3.499326 16 Personal High
## 17 Me 3.463365 17 Personal High
## 18 Me 4.141430 18 Personal High
## 19 Me 4.885677 19 Personal High
## 20 Me 4.274446 20 Personal High
## 21 Me 3.105771 21 Personal High
## 22 Me 2.737890 22 Personal High
## 23 Me 2.778319 23 Personal High
## 24 Me 3.311161 24 Personal High
## 25 Me 3.681091 25 Personal High
## 26 Me 3.302946 26 Personal High
## 27 Me 2.330829 27 Personal High
## 28 Me 3.933658 28 Personal High
## 29 Me 2.784145 29 Personal High
## 30 Me 5.276515 30 Personal High
## 31 Me 2.784282 31 Personal High
## 32 Me 3.322602 32 Personal High
## 33 Me 4.885677 33 Personal High
## 34 Me 2.796496 34 Personal High
## 35 Me 2.828036 35 Personal High
## 36 Me 3.272504 36 Personal High
## 37 Me 1.446170 37 Personal High
## 38 Me 1.837026 38 Personal High
## 39 Me 3.075050 39 Personal High
## 40 Me 2.697618 40 Personal High
## 41 Me 2.549133 41 Personal High
## 42 Me 2.216705 42 Personal High
## 43 Me 2.562440 43 Personal High
## 44 Me 2.727774 44 Personal High
## 45 Me 2.968471 45 Personal High
## 46 Me 2.353642 46 Personal High
## 47 Me 3.220047 47 Personal High
## 48 Me 3.148737 48 Personal High
## 49 Me 3.147480 49 Personal High
## 50 Me 3.292400 50 Personal High
## 51 Me 2.300992 51 Personal High
## 52 Me 1.781014 52 Personal High
## 53 Me 3.370770 53 Personal High
## 54 Me 3.382560 54 Personal High
## 55 Me 2.671208 55 Personal High
## 56 Me 3.253881 56 Personal High
## 57 Me 3.207158 57 Personal High
## 58 Me 2.594942 58 Personal High
## 59 Me 3.145141 59 Personal High
## 60 Me 3.388536 60 Personal High
## 61 Stranger 1.880776 1 Personal Low
## 62 Stranger 2.083861 2 Personal Low
## 63 Stranger 2.394033 3 Personal Low
## 64 Stranger 2.875816 4 Personal Low
## 65 Stranger 3.164569 5 Personal Low
## 66 Stranger 2.489211 6 Personal Low
## 67 Stranger 2.165527 7 Personal Low
## 68 Stranger 2.160334 8 Personal Low
## 69 Stranger 1.919576 9 Personal Low
## 70 Stranger 2.894646 10 Personal Low
## 71 Stranger 2.486148 11 Personal Low
## 72 Stranger 3.677452 12 Personal Low
## 73 Stranger 2.698405 13 Personal Low
## 74 Stranger 2.285093 14 Personal Low
## 75 Stranger 2.273094 15 Personal Low
## 76 Stranger 3.000956 16 Personal Low
## 77 Stranger 3.463092 17 Personal Low
## 78 Stranger 4.284888 18 Personal Low
## 79 Stranger 2.830486 19 Personal Low
## 80 Stranger 3.564732 20 Personal Low
## 81 Stranger 2.894994 21 Personal Low
## 82 Stranger 3.343499 22 Personal Low
## 83 Stranger 3.092874 23 Personal Low
## 84 Stranger 2.740180 24 Personal Low
## 85 Stranger 2.490574 25 Personal Low
## 86 Stranger 3.246236 26 Personal Low
## 87 Stranger 2.619035 27 Personal Low
## 88 Stranger 3.071379 28 Personal Low
## 89 Stranger 2.709835 29 Personal Low
## 90 Stranger 3.557011 30 Personal Low
## 91 Stranger 2.949270 31 Personal Low
## 92 Stranger 2.740155 32 Personal Low
## 93 Stranger 3.793647 33 Personal Low
## 94 Stranger 2.826044 34 Personal Low
## 95 Stranger 2.531403 35 Personal Low
## 96 Stranger 3.075102 36 Personal Low
## 97 Stranger 2.371603 37 Personal Low
## 98 Stranger 2.114574 38 Personal Low
## 99 Stranger 2.570751 39 Personal Low
## 100 Stranger 2.925569 40 Personal Low
## 101 Stranger 2.624914 41 Personal Low
## 102 Stranger 2.566906 42 Personal Low
## 103 Stranger 2.420628 43 Personal Low
## 104 Stranger 2.284502 44 Personal Low
## 105 Stranger 1.891519 45 Personal Low
## 106 Stranger 2.438751 46 Personal Low
## 107 Stranger 2.822283 47 Personal Low
## 108 Stranger 3.304659 48 Personal Low
## 109 Stranger 3.100244 49 Personal Low
## 110 Stranger 2.920151 50 Personal Low
## 111 Stranger 2.688602 51 Personal Low
## 112 Stranger 2.091876 52 Personal Low
## 113 Stranger 2.779947 53 Personal Low
## 114 Stranger 2.722402 54 Personal Low
## 115 Stranger 2.420628 55 Personal Low
## 116 Stranger 2.886101 56 Personal Low
## 117 Stranger 2.736885 57 Personal Low
## 118 Stranger 3.332747 58 Personal Low
## 119 Stranger 2.594051 59 Personal Low
## 120 Stranger 2.981151 60 Personal Low
## 121 High_reward 1.662597 1 Reward High
## 122 High_reward 2.199920 2 Reward High
## 123 High_reward 2.522106 3 Reward High
## 124 High_reward 2.115727 4 Reward High
## 125 High_reward 3.079429 5 Reward High
## 126 High_reward 1.862971 6 Reward High
## 127 High_reward 2.016209 7 Reward High
## 128 High_reward 1.896546 8 Reward High
## 129 High_reward 1.444945 9 Reward High
## 130 High_reward 2.127285 10 Reward High
## 131 High_reward 3.455430 11 Reward High
## 132 High_reward 3.322602 12 Reward High
## 133 High_reward 3.474889 13 Reward High
## 134 High_reward 2.853277 14 Reward High
## 135 High_reward 2.574531 15 Reward High
## 136 High_reward 4.284888 16 Reward High
## 137 High_reward 3.778984 17 Reward High
## 138 High_reward 4.486009 18 Reward High
## 139 High_reward 3.435158 19 Reward High
## 140 High_reward 3.431519 20 Reward High
## 141 High_reward 2.620669 21 Reward High
## 142 High_reward 2.947894 22 Reward High
## 143 High_reward 2.944820 23 Reward High
## 144 High_reward 3.161681 24 Reward High
## 145 High_reward 4.284888 25 Reward High
## 146 High_reward 3.843898 26 Reward High
## 147 High_reward 3.650097 27 Reward High
## 148 High_reward 4.482805 28 Reward High
## 149 High_reward 3.358418 29 Reward High
## 150 High_reward 3.627892 30 Reward High
## 151 High_reward 3.071215 31 Reward High
## 152 High_reward 2.552846 32 Reward High
## 153 High_reward 3.388536 33 Reward High
## 154 High_reward 2.159529 34 Reward High
## 155 High_reward 2.458340 35 Reward High
## 156 High_reward 3.158554 36 Reward High
## 157 High_reward 3.160618 37 Reward High
## 158 High_reward 1.940120 38 Reward High
## 159 High_reward 2.884762 39 Reward High
## 160 High_reward 3.062832 40 Reward High
## 161 High_reward 2.912225 41 Reward High
## 162 High_reward 2.569922 42 Reward High
## 163 High_reward 2.623811 43 Reward High
## 164 High_reward 2.190094 44 Reward High
## 165 High_reward 2.251822 45 Reward High
## 166 High_reward 3.000956 46 Reward High
## 167 High_reward 2.514072 47 Reward High
## 168 High_reward 3.824549 48 Reward High
## 169 High_reward 3.051495 49 Reward High
## 170 High_reward 2.834820 50 Reward High
## 171 High_reward 1.882303 51 Reward High
## 172 High_reward 2.194539 52 Reward High
## 173 High_reward 1.919178 53 Reward High
## 174 High_reward 3.000956 54 Reward High
## 175 High_reward 2.732550 55 Reward High
## 176 High_reward 3.430944 56 Reward High
## 177 High_reward 2.953483 57 Reward High
## 178 High_reward 2.519004 58 Reward High
## 179 High_reward 2.671208 59 Reward High
## 180 High_reward 3.367416 60 Reward High
## 181 Low_reward 2.026698 1 Reward Low
## 182 Low_reward 2.359741 2 Reward Low
## 183 Low_reward 2.128035 3 Reward Low
## 184 Low_reward 2.619035 4 Reward Low
## 185 Low_reward 2.326386 5 Reward Low
## 186 Low_reward 1.323496 6 Reward Low
## 187 Low_reward 2.315023 7 Reward Low
## 188 Low_reward 2.311236 8 Reward Low
## 189 Low_reward 1.954739 9 Reward Low
## 190 Low_reward 2.247875 10 Reward Low
## 191 Low_reward 3.824549 11 Reward Low
## 192 Low_reward 3.824549 12 Reward Low
## 193 Low_reward 3.568536 13 Reward Low
## 194 Low_reward 2.920151 14 Reward Low
## 195 Low_reward 2.120910 15 Reward Low
## 196 Low_reward 3.723505 16 Reward Low
## 197 Low_reward 4.130913 17 Reward Low
## 198 Low_reward 4.284888 18 Reward Low
## 199 Low_reward 3.677452 19 Reward Low
## 200 Low_reward 3.509894 20 Reward Low
## 201 Low_reward 2.717185 21 Reward Low
## 202 Low_reward 2.992675 22 Reward Low
## 203 Low_reward 3.834622 23 Reward Low
## 204 Low_reward 4.184603 24 Reward Low
## 205 Low_reward 3.568536 25 Reward Low
## 206 Low_reward 3.501106 26 Reward Low
## 207 Low_reward 2.394570 27 Reward Low
## 208 Low_reward 3.062666 28 Reward Low
## 209 Low_reward 3.220791 29 Reward Low
## 210 Low_reward 2.944537 30 Reward Low
## 211 Low_reward 2.857502 31 Reward Low
## 212 Low_reward 2.948557 32 Reward Low
## 213 Low_reward 3.928790 33 Reward Low
## 214 Low_reward 3.583806 34 Reward Low
## 215 Low_reward 3.128454 35 Reward Low
## 216 Low_reward 2.904972 36 Reward Low
## 217 Low_reward 3.587445 37 Reward Low
## 218 Low_reward 2.290905 38 Reward Low
## 219 Low_reward 3.198849 39 Reward Low
## 220 Low_reward 3.005804 40 Reward Low
## 221 Low_reward 2.549957 41 Reward Low
## 222 Low_reward 2.671208 42 Reward Low
## 223 Low_reward 1.968220 43 Reward Low
## 224 Low_reward 2.418493 44 Reward Low
## 225 Low_reward 2.550694 45 Reward Low
## 226 Low_reward 1.922223 46 Reward Low
## 227 Low_reward 2.939823 47 Reward Low
## 228 Low_reward 4.537894 48 Reward Low
## 229 Low_reward 3.681091 49 Reward Low
## 230 Low_reward 3.199417 50 Reward Low
## 231 Low_reward 1.812074 51 Reward Low
## 232 Low_reward 2.044186 52 Reward Low
## 233 Low_reward 2.889096 53 Reward Low
## 234 Low_reward 3.215546 54 Reward Low
## 235 Low_reward 3.145141 55 Reward Low
## 236 Low_reward 2.981130 56 Reward Low
## 237 Low_reward 2.633369 57 Reward Low
## 238 Low_reward 2.317410 58 Reward Low
## 239 Low_reward 2.194937 59 Reward Low
## 240 Low_reward 2.660066 60 Reward Low
## 241 Happy 2.615999 1 Emotion High
## 242 Happy 2.623811 2 Emotion High
## 243 Happy 2.953559 3 Emotion High
## 244 Happy 2.966128 4 Emotion High
## 245 Happy 2.771461 5 Emotion High
## 246 Happy 2.736885 6 Emotion High
## 247 Happy 2.078579 7 Emotion High
## 248 Happy 2.010632 8 Emotion High
## 249 Happy 2.531999 9 Emotion High
## 250 Happy 2.252651 10 Emotion High
## 251 Happy 3.008041 11 Emotion High
## 252 Happy 2.301305 12 Emotion High
## 253 Happy 2.377256 13 Emotion High
## 254 Happy 3.016429 14 Emotion High
## 255 Happy 3.072637 15 Emotion High
## 256 Happy 3.575473 16 Emotion High
## 257 Happy 4.044460 17 Emotion High
## 258 Happy 3.927287 18 Emotion High
## 259 Happy 4.885677 19 Emotion High
## 260 Happy 4.137791 20 Emotion High
## 261 Happy 3.072455 21 Emotion High
## 262 Happy 3.377292 22 Emotion High
## 263 Happy 2.669543 23 Emotion High
## 264 Happy 3.808374 24 Emotion High
## 265 Happy 3.681091 25 Emotion High
## 266 Happy 3.634266 26 Emotion High
## 267 Happy 3.009565 27 Emotion High
## 268 Happy 3.463145 28 Emotion High
## 269 Happy 3.020257 29 Emotion High
## 270 Happy 4.137791 30 Emotion High
## 271 Happy 2.448506 31 Emotion High
## 272 Happy 4.001943 32 Emotion High
## 273 Happy 4.414880 33 Emotion High
## 274 Happy 3.506640 34 Emotion High
## 275 Happy 3.094602 35 Emotion High
## 276 Happy 2.770326 36 Emotion High
## 277 Happy 4.885677 37 Emotion High
## 278 Happy 1.698441 38 Emotion High
## 279 Happy 2.421747 39 Emotion High
## 280 Happy 3.087690 40 Emotion High
## 281 Happy 2.465918 41 Emotion High
## 282 Happy 2.498560 42 Emotion High
## 283 Happy 2.774892 43 Emotion High
## 284 Happy 2.384897 44 Emotion High
## 285 Happy 2.493793 45 Emotion High
## 286 Happy 2.949033 46 Emotion High
## 287 Happy 2.866435 47 Emotion High
## 288 Happy 3.370770 48 Emotion High
## 289 Happy 3.207331 49 Emotion High
## 290 Happy 2.120910 50 Emotion High
## 291 Happy 2.005690 51 Emotion High
## 292 Happy 2.100033 52 Emotion High
## 293 Happy 2.823156 53 Emotion High
## 294 Happy 3.564732 54 Emotion High
## 295 Happy 2.894994 55 Emotion High
## 296 Happy 3.003811 56 Emotion High
## 297 Happy 3.164569 57 Emotion High
## 298 Happy 2.595487 58 Emotion High
## 299 Happy 2.884646 59 Emotion High
## 300 Happy 3.259238 60 Emotion High
## 301 Neutral 2.433975 1 Emotion Low
## 302 Neutral 2.053257 2 Emotion Low
## 303 Neutral 2.848669 3 Emotion Low
## 304 Neutral 2.574531 4 Emotion Low
## 305 Neutral 3.083763 5 Emotion Low
## 306 Neutral 1.796858 6 Emotion Low
## 307 Neutral 2.078480 7 Emotion Low
## 308 Neutral 2.311236 8 Emotion Low
## 309 Neutral 2.130669 9 Emotion Low
## 310 Neutral 2.234554 10 Emotion Low
## 311 Neutral 2.770122 11 Emotion Low
## 312 Neutral 4.284119 12 Emotion Low
## 313 Neutral 2.692167 13 Emotion Low
## 314 Neutral 3.357633 14 Emotion Low
## 315 Neutral 2.844455 15 Emotion Low
## 316 Neutral 4.070121 16 Emotion Low
## 317 Neutral 2.595473 17 Emotion Low
## 318 Neutral 3.931278 18 Emotion Low
## 319 Neutral 2.953559 19 Emotion Low
## 320 Neutral 3.782941 20 Emotion Low
## 321 Neutral 2.947894 21 Emotion Low
## 322 Neutral 3.083763 22 Emotion Low
## 323 Neutral 3.114525 23 Emotion Low
## 324 Neutral 3.688907 24 Emotion Low
## 325 Neutral 3.455430 25 Emotion Low
## 326 Neutral 4.003035 26 Emotion Low
## 327 Neutral 3.398189 27 Emotion Low
## 328 Neutral 3.567762 28 Emotion Low
## 329 Neutral 2.949270 29 Emotion Low
## 330 Neutral 3.296487 30 Emotion Low
## 331 Neutral 2.563826 31 Emotion Low
## 332 Neutral 3.164569 32 Emotion Low
## 333 Neutral 3.020879 33 Emotion Low
## 334 Neutral 2.277974 34 Emotion Low
## 335 Neutral 2.476381 35 Emotion Low
## 336 Neutral 2.540781 36 Emotion Low
## 337 Neutral 3.578615 37 Emotion Low
## 338 Neutral 1.979330 38 Emotion Low
## 339 Neutral 2.516410 39 Emotion Low
## 340 Neutral 2.554279 40 Emotion Low
## 341 Neutral 2.037864 41 Emotion Low
## 342 Neutral 2.560555 42 Emotion Low
## 343 Neutral 1.699767 43 Emotion Low
## 344 Neutral 2.677530 44 Emotion Low
## 345 Neutral 2.462293 45 Emotion Low
## 346 Neutral 2.407138 46 Emotion Low
## 347 Neutral 2.595443 47 Emotion Low
## 348 Neutral 3.254411 48 Emotion Low
## 349 Neutral 3.003733 49 Emotion Low
## 350 Neutral 2.083861 50 Emotion Low
## 351 Neutral 1.882920 51 Emotion Low
## 352 Neutral 1.785837 52 Emotion Low
## 353 Neutral 3.047205 53 Emotion Low
## 354 Neutral 2.962475 54 Emotion Low
## 355 Neutral 2.289899 55 Emotion Low
## 356 Neutral 2.334441 56 Emotion Low
## 357 Neutral 2.312634 57 Emotion Low
## 358 Neutral 2.339568 58 Emotion Low
## 359 Neutral 2.566270 59 Emotion Low
## 360 Neutral 2.813808 60 Emotion Low
#Models tested
Model1<-glm(Sensitivity ~ Experiment/Saliency,data=d_prime,family=Gamma(link="identity"))#fixed effect model where Saliency is nested into Experiment
Model2<-glmer(Sensitivity~Experiment/Saliency+(1|Subject),data=d_prime,family=Gamma(link="identity"))#model with a fixed effect of Experiment/Saliency and Intercepts only by Subject
plot_model(Model2,type = "pred",show.se = TRUE)
## $Experiment
##
## $Saliency
Model3<-glmer(Sensitivity~Experiment/Saliency+(1+Saliency|Subject),data=d_prime,family=Gamma(link="identity"))#model with a fixed effect Experiment/Saliency and intercepts& slopes by Subject. The feasibility of this model derives from visual inspection of individual data.However, this model gave a warning that the model failed to converge with max|grad| = 0.00286513 (tol = 0.002, component 1).
## Warning in checkConv(attr(opt, "derivs"), opt$par, ctrl = control$checkConv, :
## Model failed to converge with max|grad| = 0.00286513 (tol = 0.002, component 1)
library(optimx)
Model3 <- update(Model3,control=glmerControl(optimizer="optimx",optCtrl=list(method="nlminb")))
anova(Model3,Model2, Model1)
#ANOVA analysis indicates that Model3 with 10 parameters yields smaller deviance and smaller the AIC criteria
plot_model(Model3,type = "pred",show.se = TRUE)
## $Experiment
##
## $Saliency
summary(Model3)
## Generalized linear mixed model fit by maximum likelihood (Laplace
## Approximation) [glmerMod]
## Family: Gamma ( identity )
## Formula: Sensitivity ~ Experiment/Saliency + (1 + Saliency | Subject)
## Data: d_prime
## Control: glmerControl(optimizer = "optimx", optCtrl = list(method = "nlminb"))
##
## AIC BIC logLik deviance df.resid
## 461.2 500.0 -220.6 441.2 350
##
## Scaled residuals:
## Min 1Q Median 3Q Max
## -3.5348 -0.5960 -0.0345 0.5321 3.2150
##
## Random effects:
## Groups Name Variance Std.Dev. Corr
## Subject (Intercept) 0.15139 0.3891
## SaliencyLow 0.04454 0.2111 -0.51
## Residual 0.02406 0.1551
## Number of obs: 360, groups: Subject, 60
##
## Fixed effects:
## Estimate Std. Error t value Pr(>|z|)
## (Intercept) 3.0704738 0.1126370 27.260 < 2e-16 ***
## ExperimentReward -0.1151558 0.0691378 -1.666 0.095794 .
## ExperimentEmotion 0.0583719 0.0712518 0.819 0.412652
## ExperimentPersonal:SaliencyLow -0.1942761 0.0813512 -2.388 0.016935 *
## ExperimentReward:SaliencyLow -0.0004686 0.0804329 -0.006 0.995351
## ExperimentEmotion:SaliencyLow -0.2912916 0.0813872 -3.579 0.000345 ***
## ---
## Signif. codes: 0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
##
## Correlation of Fixed Effects:
## (Intr) ExprmR ExprmE ExP:SL ExR:SL
## ExprmntRwrd -0.321
## ExprmntEmtn -0.309 0.507
## ExprmntP:SL -0.524 0.444 0.427
## ExprmntR:SL -0.255 -0.411 -0.003 0.281
## ExprmntE:SL -0.255 0.001 -0.447 0.289 0.281
#Density plots for Saliency per Experiment, broken down by the sensitivity measure
ggplot(data_d, aes(x = Sensitivity)) + geom_density() + facet_wrap(Saliency ~Experiment)
#We also assessed the QQ-plot for random effects where random individual effects were plotted against standard quantiles. The QQ-plot shows that the dots (individual estimates) are plotted along the line, however, there are some deviations from perfect fit.
qqnorm(resid(Model3))
#Scatterplot of d-prime data. The X-axes represents fitted value, the Y-axes represents residuals (the distances of of the points in the scatterplot from the best-fit line). The dashed horizontal line is a best-fit line (an average of zero deviation from the best-fit line). The scatter does not indicate some other random or fixed effect that explains variation in the data.
plot(fitted(Model3), residuals(Model3), xlab = "Fitted Values", ylab = "Residuals")
abline(h = 0, lty = 2)
plot_model(Model3, type = "diag")
## $Subject
## `geom_smooth()` using formula 'y ~ x'
#the R-squared values are marginal and conditional R-squared statistics, based on Nakagawa et al. 2017.
model_summary<-tab_model(Model1, Model2, Model3,dv.labels = c("Model1", "Model2","Model3"),string.ci = "Conf. Int (95%)")
#Marginal R2 represents the marginal R-squared, which is the proportion of the total variance explained by the fixed effect. R2 conditional which is the variance explained by both fixed and random effects.The intra-class correlation coefficient (ICC) is a related statistic that quantifies the proportion of variance explained by a grouping (random) factor in multilevel/ hierarchical data.τ002 is a random by subject variance in saliency level.
model_summary
| Model1 | Model2 | Model3 | |||||||
|---|---|---|---|---|---|---|---|---|---|
| Predictors | Estimates | Conf. Int (95%) | p | Estimates | Conf. Int (95%) | p | Estimates | Conf. Int (95%) | p |
| (Intercept) | 19.85 | 16.81 – 23.76 | <0.001 | 21.05 | 17.13 – 25.86 | <0.001 | 21.55 | 17.28 – 26.88 | <0.001 |
| Experiment [Reward] | 0.90 | 0.71 – 1.15 | 0.410 | 0.89 | 0.77 – 1.03 | 0.107 | 0.89 | 0.78 – 1.02 | 0.096 |
| Experiment [Emotion] | 1.04 | 0.81 – 1.33 | 0.769 | 1.06 | 0.92 – 1.23 | 0.436 | 1.06 | 0.92 – 1.22 | 0.413 |
|
Experiment [Personal] : SaliencyLow |
0.80 | 0.63 – 1.01 | 0.063 | 0.86 | 0.75 – 0.99 | 0.030 | 0.82 | 0.70 – 0.97 | 0.017 |
|
Experiment [Reward] : SaliencyLow |
1.04 | 0.82 – 1.32 | 0.767 | 1.04 | 0.91 – 1.20 | 0.567 | 1.00 | 0.85 – 1.17 | 0.995 |
|
Experiment [Emotion] : SaliencyLow |
0.77 | 0.61 – 0.98 | 0.035 | 0.78 | 0.68 – 0.90 | <0.001 | 0.75 | 0.64 – 0.88 | <0.001 |
| Random Effects | |||||||||
| σ2 | 0.03 | 0.02 | |||||||
| τ00 | 0.12 Subject | 0.15 Subject | |||||||
| τ11 | 0.04 Subject.SaliencyLow | ||||||||
| ρ01 | -0.51 Subject | ||||||||
| ICC | 0.83 | 0.86 | |||||||
| N | 60 Subject | 60 Subject | |||||||
| Observations | 360 | 360 | 360 | ||||||
| R2 Nagelkerke | 0.023 | 0.048 / 0.836 | 0.056 / 0.871 | ||||||
###################################
Add a new chunk by clicking the Insert Chunk button on the toolbar or by pressing Cmd+Option+I.
When you save the notebook, an HTML file containing the code and output will be saved alongside it (click the Preview button or press Cmd+Shift+K to preview the HTML file).
The preview shows you a rendered HTML copy of the contents of the editor. Consequently, unlike Knit, Preview does not run any R code chunks. Instead, the output of the chunk when it was last run in the editor is displayed.