##Pakete laden
# install.packages("car")
# install.packages("GPArotation")
# install.packages("psych")
library(psych)
## Warning: Paket 'psych' wurde unter R Version 4.4.3 erstellt
library(car)
## Warning: Paket 'car' wurde unter R Version 4.4.3 erstellt
## Lade nötiges Paket: carData
## Warning: Paket 'carData' wurde unter R Version 4.4.3 erstellt
##
## Attache Paket: 'car'
## Das folgende Objekt ist maskiert 'package:psych':
##
## logit
library(GPArotation)
## Warning: Paket 'GPArotation' wurde unter R Version 4.4.3 erstellt
##
## Attache Paket: 'GPArotation'
## Die folgenden Objekte sind maskiert von 'package:psych':
##
## equamax, varimin
data_agentnarc <- openxlsx::read.xlsx("data_agentnarc.xlsx")
Codebook<- openxlsx::read.xlsx("Codebook_agentnarc.xlsx")
# Spaltennamen/Keys säubern (verhindert Join-Probleme)
names(Codebook) <- trimws(names(Codebook))
if ("Itemname" %in% names(Codebook)) Codebook$Itemname <- trimws(as.character(Codebook$Itemname))
#Speichern der relevanten Variablen in einem neuen Objekt
names(data_agentnarc)
## [1] "session" "session_id" "study_id"
## [4] "iteration" "created" "modified"
## [7] "ended" "expired" "datenschutz"
## [10] "ident_code" "demo_gender" "demo_age"
## [13] "demo_education_self" "demo_work" "demo_position"
## [16] "ffni_arr_1" "ffni_arr_2" "ffni_arr_4"
## [19] "ffni_as_1" "ffni_as_2" "ffni_aut_1"
## [22] "ffni_aut_2" "ffni_aut_3" "ffni_aut_4"
## [25] "ffni_dis_1" "ffni_dis_3" "ffni_dis_4"
## [28] "ffni_ent_1" "ffni_ent_3" "ffni_exh_1"
## [31] "ffni_exh_2" "ffni_exp_1" "ffni_exp_3"
## [34] "ffni_exp_4" "ffni_gf_1" "ffni_gf_2"
## [37] "ffni_gf_3" "ffni_gf_4" "ffni_ind_1"
## [40] "ffni_ind_3" "ffni_le_1" "ffni_le_2"
## [43] "ffni_le_3" "ffni_le_4" "ffni_man_1"
## [46] "ffni_man_2" "ffni_man_3" "ffni_man_4"
## [49] "ffni_na_1" "ffni_na_2" "ffni_na_3"
## [52] "ffni_na_4" "ffni_ra_1" "ffni_ra_2"
## [55] "ffni_ra_3" "ffni_ra_4" "ffni_sha_1"
## [58] "ffni_sha_3" "ffni_sha_4" "ffni_ts_1"
## [61] "ffni_ts_2" "ffni_ts_3" "ffni_ts_4"
## [64] "narc_1" "narc_2" "narc_3"
## [67] "narc_4" "narc_5" "narc_6"
## [70] "usd3_psych_1" "usd3_psych_2" "usd3_psych_3"
## [73] "usd3_mach_1" "usd3_mach_2" "usd3_mach_3"
## [76] "usd3_nar_1" "usd3_nar_2" "usd3_nar_3"
## [79] "bfi2_extra1" "bfi2_extra2" "bfi2_extra3"
## [82] "bfi2_extra4" "bfi2_extra5" "bfi2_extra6"
## [85] "bfi2_extra7" "bfi2_extra8" "bfi2_extra9"
## [88] "bfi2_extra10" "bfi2_extra11" "bfi2_extra12"
## [91] "bfi2_ver1" "bfi2_ver2" "bfi2_ver3"
## [94] "bfi2_ver4" "bfi2_ver5" "bfi2_ver6"
## [97] "bfi2_ver7" "bfi2_ver8" "bfi2_ver9"
## [100] "bfi2_ver10" "bfi2_ver11" "bfi2_ver12"
## [103] "bfi2_neu1" "bfi2_neu2" "bfi2_neu3"
## [106] "bfi2_neu4" "bfi2_neu5" "bfi2_neu6"
## [109] "bfi2_neu7" "bfi2_neu8" "bfi2_neu9"
## [112] "bfi2_neu10" "bfi2_neu11" "bfi2_neu12"
## [115] "selfest_1" "selfest_2" "selfest_3"
## [118] "selfest_4" "selfest_5" "selfest_6"
## [121] "selfest_7" "selfest_8" "selfest_9"
## [124] "selfest_10" "demo_success" "ffni_arr_3"
## [127] "ffni_as_3" "ffni_ent_2" "ffni_ind_4"
## [130] "ffni_ent_4" "ffni_exh_4" "ffni_dis_2"
## [133] "ffni_as_4" "ffni_sha_2" "ffni_ind_2"
## [136] "ffni_exh_3" "ffni_exp_2" "agent_1C_1"
## [139] "agent_1C_2" "agent_1C_3" "agent_1C_4"
## [142] "agent_1C_5" "agent_1D_1" "agent_1D_2"
## [145] "agent_1D_3" "agent_1D_4" "agent_1D_5"
## [148] "agent_1A_1" "agent_1A_2" "agent_1A_3"
## [151] "agent_1A_4" "agent_1A_5" "agent_1E_1"
## [154] "agent_1E_2" "agent_1E_3" "agent_1E_4"
## [157] "agent_1E_5" "agent_1B_1" "agent_1B_2"
## [160] "agent_1B_3" "agent_1B_4" "agent_1B_5"
## [163] "agent_2E_5" "agent_2B_1" "agent_2B_2"
## [166] "agent_2B_3" "agent_2B_4" "agent_2B_5"
## [169] "agent_2A_1" "agent_2A_2" "agent_2A_3"
## [172] "agent_2A_4" "agent_2A_5" "agent_2D_1"
## [175] "agent_2D_2" "agent_2D_3" "agent_2D_4"
## [178] "agent_2D_5" "agent_3A_1" "agent_3A_2"
## [181] "agent_3A_3" "agent_3B_1" "agent_3B_2"
## [184] "agent_3B_3" "agent_3B_4" "agent_3B_5"
## [187] "agent_3C_1" "agent_3C_2" "agent_3C_3"
## [190] "agent_3D_1" "agent_3D_2" "agent_3D_3"
## [193] "agent_3E_1" "agent_3E_2" "agent_3E_3"
## [196] "agent_4A_1" "agent_4A_2" "agent_4A_3"
## [199] "agent_4B_1" "agent_4B_2" "agent_4B_3"
## [202] "agent_4B_4" "agent_4B_5" "agent_4C_1"
## [205] "agent_4C_2" "agent_4C_3" "agent_4D_1"
## [208] "agent_4D_2" "agent_4D_3" "agent_5C_1"
## [211] "agent_5C_2" "agent_5C_3" "agent_5C_4"
## [214] "agent_5C_5" "agent_5D_1" "agent_5D_2"
## [217] "agent_5D_3" "agent_5D_4" "agent_5D_5"
## [220] "agent_2C_1" "agent_2C_2" "agent_2C_3"
## [223] "agent_2C_4" "agent_2C_5" "agent_5A_1"
## [226] "agent_5A_2" "agent_5A_3" "agent_5A_4"
## [229] "agent_5A_5" "agent_5B_1" "agent_5B_2"
## [232] "agent_5B_3" "agent_5B_4" "agent_5B_5"
## [235] "agent_5E_1" "agent_5E_2" "agent_5E_3"
## [238] "agent_5E_4" "agent_5E_5" "agent_2E_1"
## [241] "agent_2E_2" "agent_2E_3" "agent_2E_4"
dat <- data_agentnarc[c(grepl("agent_", names(data_agentnarc)))]
names(dat)
## [1] "agent_1C_1" "agent_1C_2" "agent_1C_3" "agent_1C_4" "agent_1C_5"
## [6] "agent_1D_1" "agent_1D_2" "agent_1D_3" "agent_1D_4" "agent_1D_5"
## [11] "agent_1A_1" "agent_1A_2" "agent_1A_3" "agent_1A_4" "agent_1A_5"
## [16] "agent_1E_1" "agent_1E_2" "agent_1E_3" "agent_1E_4" "agent_1E_5"
## [21] "agent_1B_1" "agent_1B_2" "agent_1B_3" "agent_1B_4" "agent_1B_5"
## [26] "agent_2E_5" "agent_2B_1" "agent_2B_2" "agent_2B_3" "agent_2B_4"
## [31] "agent_2B_5" "agent_2A_1" "agent_2A_2" "agent_2A_3" "agent_2A_4"
## [36] "agent_2A_5" "agent_2D_1" "agent_2D_2" "agent_2D_3" "agent_2D_4"
## [41] "agent_2D_5" "agent_3A_1" "agent_3A_2" "agent_3A_3" "agent_3B_1"
## [46] "agent_3B_2" "agent_3B_3" "agent_3B_4" "agent_3B_5" "agent_3C_1"
## [51] "agent_3C_2" "agent_3C_3" "agent_3D_1" "agent_3D_2" "agent_3D_3"
## [56] "agent_3E_1" "agent_3E_2" "agent_3E_3" "agent_4A_1" "agent_4A_2"
## [61] "agent_4A_3" "agent_4B_1" "agent_4B_2" "agent_4B_3" "agent_4B_4"
## [66] "agent_4B_5" "agent_4C_1" "agent_4C_2" "agent_4C_3" "agent_4D_1"
## [71] "agent_4D_2" "agent_4D_3" "agent_5C_1" "agent_5C_2" "agent_5C_3"
## [76] "agent_5C_4" "agent_5C_5" "agent_5D_1" "agent_5D_2" "agent_5D_3"
## [81] "agent_5D_4" "agent_5D_5" "agent_2C_1" "agent_2C_2" "agent_2C_3"
## [86] "agent_2C_4" "agent_2C_5" "agent_5A_1" "agent_5A_2" "agent_5A_3"
## [91] "agent_5A_4" "agent_5A_5" "agent_5B_1" "agent_5B_2" "agent_5B_3"
## [96] "agent_5B_4" "agent_5B_5" "agent_5E_1" "agent_5E_2" "agent_5E_3"
## [101] "agent_5E_4" "agent_5E_5" "agent_2E_1" "agent_2E_2" "agent_2E_3"
## [106] "agent_2E_4"
dim(dat)
## [1] 375 106
# ---- Rekodierung (Invertierung) ----
# Items, die im Codebook als "recoded" (= ja) markiert sind, werden hier invertiert.
# Invertierung pro Item über den beobachteten Wertebereich: new = (min + max) - old
rec_col <- names(Codebook)[grepl("^recoded$", names(Codebook), ignore.case = TRUE) |
grepl("rekod|recode|reverse", names(Codebook), ignore.case = TRUE)][1]
items_recoded <- character(0)
if (!is.na(rec_col)) {
items_recoded <- Codebook$Itemname[
!is.na(Codebook[[rec_col]]) &
trimws(tolower(as.character(Codebook[[rec_col]]))) %in% c("ja","j","yes","y","true","1")
]
items_recoded <- intersect(trimws(as.character(items_recoded)), names(dat))
}
recode_scales <- data.frame(Itemname = items_recoded, min = NA_real_, max = NA_real_)
for (i in seq_along(items_recoded)) {
it <- items_recoded[i]
x <- dat[[it]]
if (is.factor(x)) x <- as.numeric(as.character(x))
if (is.character(x)) x <- as.numeric(x)
mn <- suppressWarnings(min(x, na.rm = TRUE))
mx <- suppressWarnings(max(x, na.rm = TRUE))
recode_scales$min[i] <- mn
recode_scales$max[i] <- mx
dat[[it]] <- (mn + mx) - x
}
recode_scales
## Itemname min max
## 1 agent_1C_2 1 6
## 2 agent_1A_1 1 6
## 3 agent_1A_2 1 6
## 4 agent_1A_3 1 6
## 5 agent_1E_2 1 6
## 6 agent_3B_5 1 6
options(scipen = 999) #ohne wissenschaftliche Notation (e^)
Untersuchung der agentischen Narzissmus-Skala (Items auf einer 6‑Punkte-Likert-Skala) in einer Stichprobe von N = 375 Personen. Ziel ist es, die Faktorstruktur zu überprüfen und die mittleren Itemwerte in Abhängigkeit von der wahrgenommenen psychologischen „Leichtigkeit“ der Items zu beschreiben. ## Hypothesen:
Psychologisch leichte Items → hohe Mittelwerte, rechtssteile Verteilung (majority zustimmend, z.B. Mittelwert ~5–6).
Psychologisch Schwere Items → niedrige Mittelwerte, linkssteile Verteilung (majority ablehnend, Mittelwert ~1–2).
Psychologisch mittlere Items – also Items, die weder leicht noch schwer sind – zeigen typischerweise eine symmetrische, ungefähr normalverteilte Verteilung in der Stichprobe.
Das bedeutet, dass die Antworten über die Skala von 1–6 verteilt sind, mit dem Häufigkeitsgipfel in der Mitte (z.B. bei 3–4).
Die Faktoren der Skala hängen positiv miteinander (interkorrelieren moderat), da sie dasselbe übergeordnete Konstrukt messen. ## Variablen:
Itemwerte: agent_1 bis agent_106, 6‑Punkte-Likert-Skala (1 = stimme überhaupt nicht zu, 6 = stimme völlig zu)
Itempsychologische „Leichtigkeit“: theoretisch eingeschätzter Schwierigkeitsgrad jedes Items.
| Itemtyp | Erwartete Verteilung auf Skala 1–6 |
|---|---|
| Leicht | Rechtssteil (Mehrheit stimmt zu, Mittelwert hoch ~5–6) |
| Mittel | Symmetrisch, normalverteilt (Mittelwert ~3–4) |
| Schwer | Linkssteil (Mehrheit stimmt nicht zu, Mittelwert niedrig ~1–2) |
Vorbereiten der Daten, Datensatz mit agent_Items; recodieren; Voraussetzungen für EFA prüfen, EFA durchführen; Ergebnisse INterpretieren
Faktorzugehörigkeit: wird über explorative Faktorenanalyse (EFA, oblique Rotation) bestimmt.
Für die explorative Faktorenanalyse (EFA) wird eine oblique Rotation angewendet, da wir erwarten, dass die extrahierten Faktoren korreliert sind. Konkret wird die Promax-Rotation genutzt, um die Faktorladungen zu optimieren und eine interpretierbare, einfache Struktur zu erreichen.
Promax: Startet mit Varimax (orthogonal) und „kippt“ dann in eine oblique Lösung. Sie ist schnell, stabil und gut für viele psychologische Items.
GeominQ: Minimiert eine geometrische Funktion, die Faktoren „einfach“ machen soll. Sie kann bei komplexeren oder sehr großen Skalen nützlich sein, ist aber rechenintensiver und kann bei kleinen Stichproben instabil sein.
Praktische Faustregel:
Wenn die Skala überschaubar ist (z.B. 5 Faktoren, 100 Items) und die Faktoren erwartbar korreliert sind → Promax reicht aus.
Bei sehr vielen Items, komplexen Faktorstrukturen oder wenn man die sauberste „Einfache Struktur“ will → GeominQ ist überlegen.
Begründung: Oblique Rotationen erlauben die Korrelation zwischen Faktoren, was psychologisch plausibel ist, da Persönlichkeits- oder Verhaltensdimensionen häufig nicht orthogonal sind.
Erwartung: Die Rotation soll die Hauptladungen jedes Items auf einen Faktor maximieren, gleichzeitig geringe Kreuzladungen erzeugen, sodass jeder Faktor klar interpretiert werden kann.
dim(dat)
## [1] 375 106
#Parallelanalyse
fa_parallel1 <- fa.parallel(dat, fm="ml", fa="pc", n.iter=2000, SMC=FALSE, sim=TRUE, quant=0.95, plot=TRUE)
## Parallel analysis suggests that the number of factors = NA and the number of components = 7
## 10000 wäre besser, dauert aber länger
abline(h=1)
print(fa_parallel1)
## Call: fa.parallel(x = dat, fm = "ml", fa = "pc", n.iter = 2000, SMC = FALSE,
## sim = TRUE, quant = 0.95, plot = TRUE)
## Parallel analysis suggests that the number of factors = NA and the number of components = 7
##
## Eigen Values of
##
## eigen values of factors
## [1] 41.65 4.89 3.49 3.03 2.29 1.63 1.35 0.92 0.88 0.77 0.59 0.54
## [13] 0.47 0.42 0.40 0.36 0.34 0.28 0.26 0.24 0.23 0.19 0.16 0.14
## [25] 0.10 0.09 0.09 0.07 0.06 0.03 0.02 -0.02 -0.02 -0.04 -0.05 -0.06
## [37] -0.08 -0.09 -0.11 -0.11 -0.13 -0.13 -0.14 -0.15 -0.15 -0.15 -0.16 -0.17
## [49] -0.19 -0.20 -0.21 -0.21 -0.22 -0.23 -0.24 -0.24 -0.24 -0.25 -0.27 -0.27
## [61] -0.27 -0.28 -0.29 -0.30 -0.31 -0.31 -0.32 -0.32 -0.33 -0.34 -0.35 -0.35
## [73] -0.36 -0.36 -0.37 -0.37 -0.38 -0.39 -0.39 -0.40 -0.40 -0.41 -0.42 -0.42
## [85] -0.43 -0.44 -0.44 -0.45 -0.45 -0.46 -0.46 -0.47 -0.49 -0.49 -0.50 -0.51
## [97] -0.51 -0.53 -0.54 -0.55 -0.58 -0.59 -0.60 -0.62 -0.64 -0.69
##
## eigen values of simulated factors
## [1] NA
##
## eigen values of components
## [1] 42.20 5.55 4.15 3.59 2.95 2.28 2.02 1.52 1.50 1.45 1.29 1.20
## [13] 1.17 1.16 1.06 1.03 0.96 0.93 0.91 0.90 0.85 0.83 0.82 0.78
## [25] 0.75 0.73 0.71 0.69 0.66 0.63 0.61 0.60 0.59 0.57 0.56 0.55
## [37] 0.52 0.51 0.50 0.48 0.48 0.46 0.45 0.43 0.42 0.42 0.41 0.40
## [49] 0.39 0.38 0.38 0.37 0.36 0.35 0.34 0.34 0.33 0.31 0.30 0.29
## [61] 0.29 0.28 0.27 0.27 0.27 0.25 0.25 0.25 0.24 0.24 0.23 0.22
## [73] 0.22 0.21 0.21 0.20 0.19 0.18 0.18 0.18 0.17 0.17 0.16 0.16
## [85] 0.15 0.15 0.15 0.14 0.13 0.13 0.13 0.12 0.11 0.11 0.11 0.10
## [97] 0.10 0.09 0.09 0.09 0.08 0.08 0.07 0.07 0.06 0.06
##
## eigen values of simulated components
## [1] 2.27 2.19 2.13 2.07 2.03 1.98 1.94 1.90 1.87 1.83 1.80 1.77 1.74 1.71 1.68
## [16] 1.65 1.62 1.60 1.57 1.55 1.52 1.50 1.48 1.45 1.43 1.41 1.38 1.36 1.34 1.32
## [31] 1.30 1.28 1.26 1.24 1.22 1.20 1.18 1.17 1.15 1.13 1.11 1.09 1.08 1.06 1.04
## [46] 1.03 1.01 0.99 0.98 0.96 0.95 0.93 0.91 0.90 0.88 0.87 0.86 0.84 0.83 0.81
## [61] 0.80 0.78 0.77 0.76 0.74 0.73 0.71 0.70 0.69 0.68 0.66 0.65 0.64 0.62 0.61
## [76] 0.60 0.59 0.57 0.56 0.55 0.54 0.53 0.52 0.50 0.49 0.48 0.47 0.46 0.45 0.43
## [91] 0.42 0.41 0.40 0.39 0.38 0.37 0.35 0.34 0.33 0.32 0.31 0.29 0.28 0.27 0.25
## [106] 0.23
pa1 <- 7
fa_parallel2 <- fa.parallel(dat, fm="ml", fa="fa", n.iter=2000, SMC=TRUE, sim=FALSE, quant=0.95, plot=TRUE)
## Parallel analysis suggests that the number of factors = 8 and the number of components = NA
print(fa_parallel2)
## Call: fa.parallel(x = dat, fm = "ml", fa = "fa", n.iter = 2000, SMC = TRUE,
## sim = FALSE, quant = 0.95, plot = TRUE)
## Parallel analysis suggests that the number of factors = 8 and the number of components = NA
##
## Eigen Values of
##
## eigen values of factors
## [1] 41.95 5.29 3.87 3.35 2.66 2.02 1.72 1.27 1.20 1.13 0.95 0.90
## [13] 0.85 0.80 0.74 0.70 0.65 0.62 0.60 0.58 0.55 0.52 0.50 0.48
## [25] 0.42 0.42 0.41 0.40 0.36 0.35 0.33 0.30 0.29 0.29 0.26 0.23
## [37] 0.23 0.22 0.21 0.20 0.19 0.18 0.18 0.17 0.15 0.15 0.14 0.14
## [49] 0.13 0.11 0.11 0.10 0.09 0.08 0.07 0.07 0.06 0.06 0.05 0.04
## [61] 0.03 0.03 0.02 0.02 0.01 0.00 0.00 -0.01 -0.01 -0.02 -0.03 -0.03
## [73] -0.03 -0.04 -0.05 -0.05 -0.05 -0.06 -0.06 -0.07 -0.07 -0.07 -0.08 -0.09
## [85] -0.09 -0.10 -0.10 -0.10 -0.11 -0.11 -0.12 -0.12 -0.12 -0.13 -0.14 -0.14
## [97] -0.14 -0.15 -0.15 -0.16 -0.17 -0.18 -0.18 -0.19 -0.20 -0.20
##
## eigen values of simulated factors
## [1] NA
##
## eigen values of components
## [1] 42.20 5.55 4.15 3.59 2.95 2.28 2.02 1.52 1.50 1.45 1.29 1.20
## [13] 1.17 1.16 1.06 1.03 0.96 0.93 0.91 0.90 0.85 0.83 0.82 0.78
## [25] 0.75 0.73 0.71 0.69 0.66 0.63 0.61 0.60 0.59 0.57 0.56 0.55
## [37] 0.52 0.51 0.50 0.48 0.48 0.46 0.45 0.43 0.42 0.42 0.41 0.40
## [49] 0.39 0.38 0.38 0.37 0.36 0.35 0.34 0.34 0.33 0.31 0.30 0.29
## [61] 0.29 0.28 0.27 0.27 0.27 0.25 0.25 0.25 0.24 0.24 0.23 0.22
## [73] 0.22 0.21 0.21 0.20 0.19 0.18 0.18 0.18 0.17 0.17 0.16 0.16
## [85] 0.15 0.15 0.15 0.14 0.13 0.13 0.13 0.12 0.11 0.11 0.11 0.10
## [97] 0.10 0.09 0.09 0.09 0.08 0.08 0.07 0.07 0.06 0.06
##
## eigen values of simulated components
## [1] NA
pa2<-8
#Eigenwert > 1 Kriterium
fa_parallel1$pc.values
## [1] 42.19660440 5.54890703 4.15451266 3.58898633 2.94670409 2.28047474
## [7] 2.01750306 1.52266556 1.50223153 1.44607769 1.29018770 1.19516164
## [13] 1.17125850 1.16082208 1.05501068 1.03000121 0.96442988 0.93202000
## [19] 0.91438949 0.90084754 0.85389348 0.83165722 0.81645868 0.78271462
## [25] 0.75198171 0.72553529 0.71357589 0.68970178 0.65530208 0.62846645
## [31] 0.61400740 0.60382002 0.58743387 0.56896284 0.55502856 0.54562756
## [37] 0.52290652 0.51253847 0.49994881 0.48463157 0.47626114 0.46070524
## [43] 0.45407299 0.43150000 0.42092535 0.41598116 0.40768889 0.39548999
## [49] 0.39276970 0.38337369 0.37692284 0.37308741 0.35622607 0.34625538
## [55] 0.34149540 0.33671440 0.32921769 0.31431529 0.30477511 0.29344117
## [61] 0.28760061 0.28469445 0.27285902 0.27064911 0.26844473 0.25261141
## [67] 0.24938859 0.24769214 0.23909210 0.23596623 0.22556708 0.21882634
## [73] 0.21646534 0.20999619 0.20907838 0.19586282 0.19114179 0.18486885
## [79] 0.18230231 0.18036944 0.17400838 0.16899351 0.16496100 0.16068553
## [85] 0.15490880 0.14626851 0.14502589 0.13947964 0.13471404 0.13149909
## [91] 0.12924642 0.12162721 0.11359640 0.11157666 0.10765570 0.10393756
## [97] 0.09783901 0.09397074 0.08989775 0.08635208 0.07957046 0.07897185
## [103] 0.07402640 0.07046107 0.06467730 0.05836454
which(fa_parallel1$pc.values>1)
## [1] 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
ew<-10
scree <-7
map <- VSS(dat, n=106)
# n bezieht sich hier auf die Anzahl der möglichen Faktoren die man extrahieren kann. Dies entspricht hier also der Anzahl der Items, also 106.
map
##
## Very Simple Structure
## Call: vss(x = x, n = n, rotate = rotate, diagonal = diagonal, fm = fm,
## n.obs = n.obs, plot = plot, title = title, use = use, cor = cor)
## VSS complexity 1 achieves a maximimum of 0.94 with 1 factors
## VSS complexity 2 achieves a maximimum of 0.96 with 2 factors
##
## The Velicer MAP achieves a minimum of 0.01 with 10 factors
## BIC achieves a minimum of -19650.75 with 7 factors
## Sample Size adjusted BIC achieves a minimum of -5019 with 15 factors
##
## Statistics by number of factors
## vss1 vss2 map dof chisq
## 1 0.94 0.00 0.0177 5459 17236.7188810
## 2 0.60 0.96 0.0141 5354 15037.4387454
## 3 0.51 0.88 0.0122 5250 13487.5954628
## 4 0.33 0.73 0.0093 5147 11661.9861849
## 5 0.29 0.63 0.0078 5045 10516.8177549
## 6 0.28 0.62 0.0071 4944 9714.2887307
## 7 0.25 0.58 0.0066 4844 9059.2842748
## 8 0.25 0.57 0.0064 4745 8524.9432530
## 9 0.25 0.57 0.0062 4647 8048.9159739
## 10 0.25 0.57 0.0060 4550 7648.8593157
## 11 0.26 0.56 0.0061 4454 7335.2091509
## 12 0.25 0.55 0.0061 4359 7024.5096890
## 13 0.24 0.55 0.0061 4265 6752.8344122
## 14 0.26 0.55 0.0062 4172 6501.3725028
## 15 0.25 0.55 0.0063 4080 6218.1039092
## 16 0.24 0.55 0.0063 3989 6002.3009386
## 17 0.24 0.55 0.0064 3899 5751.0186615
## 18 0.24 0.54 0.0065 3810 5507.6403122
## 19 0.23 0.54 0.0065 3722 5305.9789278
## 20 0.23 0.53 0.0066 3635 5112.0361909
## 21 0.23 0.53 0.0067 3549 4913.7442234
## 22 0.23 0.54 0.0068 3464 4756.1941551
## 23 0.23 0.53 0.0070 3380 4575.8924396
## 24 0.23 0.52 0.0071 3297 4408.5000690
## 25 0.22 0.51 0.0073 3215 4266.0579406
## 26 0.22 0.52 0.0074 3134 4117.1124922
## 27 0.21 0.50 0.0075 3054 3943.4650132
## 28 0.20 0.50 0.0077 2975 3777.3865868
## 29 0.20 0.50 0.0079 2897 3623.3079324
## 30 0.21 0.50 0.0081 2820 3478.6565008
## 31 0.20 0.50 0.0082 2744 3349.0270517
## 32 0.20 0.49 0.0084 2669 3223.9566719
## 33 0.21 0.49 0.0086 2595 3101.1170181
## 34 0.19 0.48 0.0088 2522 2989.6041018
## 35 0.19 0.47 0.0090 2450 2888.0270962
## 36 0.19 0.46 0.0092 2379 2795.7262055
## 37 0.18 0.47 0.0094 2309 2718.9630560
## 38 0.18 0.46 0.0096 2240 2623.3097516
## 39 0.18 0.47 0.0099 2172 2517.0252505
## 40 0.18 0.46 0.0102 2105 2415.0093581
## 41 0.18 0.45 0.0104 2039 2329.8727592
## 42 0.18 0.44 0.0107 1974 2234.5682464
## 43 0.18 0.44 0.0110 1910 2162.2781262
## 44 0.18 0.44 0.0113 1847 2079.2483890
## 45 0.18 0.44 0.0116 1785 2002.9969462
## 46 0.18 0.44 0.0119 1724 1927.0559906
## 47 0.17 0.43 0.0122 1664 1840.1634586
## 48 0.18 0.43 0.0125 1605 1753.4582457
## 49 0.18 0.43 0.0129 1547 1673.1536895
## 50 0.18 0.43 0.0133 1490 1594.9252314
## 51 0.17 0.42 0.0137 1434 1520.2313176
## 52 0.17 0.41 0.0140 1379 1457.5316080
## 53 0.18 0.42 0.0144 1325 1387.9166379
## 54 0.18 0.41 0.0147 1272 1317.8887099
## 55 0.17 0.41 0.0152 1220 1255.5238431
## 56 0.17 0.41 0.0156 1169 1188.0829223
## 57 0.18 0.41 0.0160 1119 1115.0234879
## 58 0.17 0.41 0.0165 1070 1057.8493728
## 59 0.17 0.41 0.0170 1022 991.0536102
## 60 0.17 0.40 0.0176 975 928.1778075
## 61 0.17 0.41 0.0180 929 877.6801285
## 62 0.17 0.41 0.0186 884 820.5004719
## 63 0.17 0.40 0.0192 840 774.8302624
## 64 0.16 0.41 0.0199 797 722.9500889
## 65 0.16 0.40 0.0204 755 680.5697994
## 66 0.16 0.39 0.0211 714 631.5272208
## 67 0.16 0.38 0.0218 674 581.6574876
## 68 0.16 0.38 0.0225 635 538.7567763
## 69 0.15 0.38 0.0234 597 490.9686035
## 70 0.14 0.37 0.0240 560 441.1964193
## 71 0.15 0.38 0.0250 524 406.7193069
## 72 0.15 0.44 0.0258 489 373.4438778
## 73 0.15 0.43 0.0269 455 338.6191405
## 74 0.16 0.43 0.0280 422 310.4824129
## 75 0.15 0.44 0.0292 390 276.0994002
## 76 0.15 0.43 0.0302 359 250.0329779
## 77 0.16 0.44 0.0317 329 218.2026267
## 78 0.14 0.43 0.0333 300 201.0195212
## 79 0.15 0.45 0.0346 272 171.6693790
## 80 0.14 0.38 0.0364 245 156.2088363
## 81 0.14 0.42 0.0379 219 139.3730300
## 82 0.13 0.42 0.0397 194 123.2868356
## 83 0.13 0.43 0.0418 170 103.2570058
## 84 0.13 0.42 0.0437 147 78.8393854
## 85 0.12 0.42 0.0460 125 66.6075625
## 86 0.12 0.42 0.0486 104 49.6243543
## 87 0.12 0.40 0.0515 84 43.9984212
## 88 0.13 0.38 0.0548 65 28.2607936
## 89 0.13 0.38 0.0581 47 18.8471153
## 90 0.13 0.40 0.0616 30 15.6330012
## 91 0.13 0.38 0.0655 14 9.7399608
## 92 0.13 0.36 0.0704 -1 4.9212757
## 93 0.13 0.40 0.0767 -15 1.1536258
## 94 0.11 0.36 0.0831 -28 0.1776878
## 95 0.11 0.38 0.0899 -40 0.0006878
## 96 0.12 0.38 0.0992 -51 0.0007887
## 97 0.12 0.36 0.1113 -61 0.0001785
## 98 0.13 0.38 0.1239 -70 0.0000286
## 99 0.13 0.37 0.1420 -78 0.0000354
## 100 0.13 0.37 0.1674 -85 0.0000099
## 101 0.13 0.37 0.2032 -91 0.0000064
## 102 0.12 0.37 0.2537 -96 0.0000069
## 103 0.12 0.36 0.3394 -100 0.0000096
## 104 0.12 0.37 0.4959 -103 0.0000035
## 105 0.12 0.36 1.0000 -105 0.0000000
## 106 0.12 0.36 NA -106 0.0000000
## prob
## 1 0.00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
## 2 0.00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
## 3 0.00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
## 4 0.00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
## 5 0.00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
## 6 0.00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000015
## 7 0.00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000014072646312955291276417663093312171440629754215478897095
## 8 0.00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000079878351844046332555504541517166217090561985969543457031250000000000000000000000000000000000000
## 9 0.00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000043765489526979243655770146492756111911148764193058013916015625000000000000000000000000000000000000000000000000000000000000000000
## 10 0.00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000243375473933133127388619976194661376212025061249732971191406250000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
## 11 0.00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000009456794403397913050500489529071046490571461617946624755859375000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
## 12 0.00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000098246547886492925283005717140838441991945728659629821777343750000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
## 13 0.00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000331123654372717884670357380016980641812551766633987426757812500000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
## 14 0.00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000018484961660103120565171175293528449401492252945899963378906250000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
## 15 0.00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000181011373794523702795977726509590866044163703918457031250000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
## 16 0.00000000000000000000000000000000000000000000000000000000000000000000000000000000000009893999964234163770233909662721316635725088417530059814453125000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
## 17 0.00000000000000000000000000000000000000000000000000000000000000000000000000150023677743255952734061087205930107302265241742134094238281250000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
## 18 0.00000000000000000000000000000000000000000000000000000000000000000354649148789134470048309277778741943620843812823295593261718750000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
## 19 0.00000000000000000000000000000000000000000000000000000000000896153624261157870367622280127761769108474254608154296875000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
## 20 0.00000000000000000000000000000000000000000000000000000600609951544512169450962257855053394450806081295013427734375000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
## 21 0.00000000000000000000000000000000000000000000000612584411197885479825192556191382209362927824258804321289062500000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
## 22 0.00000000000000000000000000000000000000000001886757281166440920923410562437538828817196190357208251953125000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
## 23 0.00000000000000000000000000000000000000121780242613572725184772904682972693990450352430343627929687500000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
## 24 0.00000000000000000000000000000000001249434685900214529936415319699705150924273766577243804931640625000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
## 25 0.00000000000000000000000000000000525189274056661858317701652154596558830235153436660766601562500000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
## 26 0.00000000000000000000000000000504364260935718273625577579011292073118966072797775268554687500000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
## 27 0.00000000000000000000000008058958567194191699965821751305838915868662297725677490234375000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
## 28 0.00000000000000000000040100592736825132278780103867177331267157569527626037597656250000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
## 29 0.00000000000000000042781234903516385885934025790078294448903761804103851318359375000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
## 30 0.00000000000000014612919464122188627046294850941876575234346091747283935546875000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
## 31 0.00000000000001063228532097314345543533076199338438527774997055530548095703125000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
## 32 0.00000000000048699745919000223984100805818542312408681027591228485107421875000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
## 33 0.00000000001753476871546849037795251513571770374255720525979995727539062500000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
## 34 0.00000000024019014066667115500034596076162074496096465736627578735351562500000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
## 35 0.00000000152734752752433906755440140567969820040161721408367156982421875000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
## 36 0.00000000501095658945024970739451353196614036278333514928817749023437500000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
## 37 0.00000000537754422758236039431647967923311171034583821892738342285156250000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
## 38 0.00000002677143567204204328835448478685066220350563526153564453125000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
## 39 0.00000030061467739003475856725960957405163753719534724950790405273437500000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
## 40 0.00000241820696452680223968202677387040466783219017088413238525390625000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
## 41 0.00000635484325765306602446652606275279140390921384096145629882812500000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
## 42 0.00003333073526849985413833890590673547649203101173043251037597656250000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
## 43 0.00004328984253709137596660694313754902395885437726974487304687500000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
## 44 0.00011536893485783729861558766227602745857439003884792327880859375000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
## 45 0.00021512668918938622443157160368798486160812899470329284667968750000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
## 46 0.00041563899462510446678603637238325063663069158792495727539062500000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
## 47 0.00152040572169934878367369979201839669258333742618560791015625000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
## 48 0.00530496080455555244631549172140694281551986932754516601562500000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
## 49 0.01317493989920291014295017362201178912073373794555664062500000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
## 50 0.02932058611809161899341980017652531387284398078918457031250000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
## 51 0.05576169019589242298318154666958434972912073135375976562500000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
## 52 0.06936697109764997093428462449082871899008750915527343750000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
## 53 0.11194010231925378973105722479886026121675968170166015625000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
## 54 0.18081448115547218979948240757948951795697212219238281250000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
## 55 0.23399307464507812515108753359527327120304107666015625000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
## 56 0.34226045744700106521207771947956643998622894287109375000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
## 57 0.52792684189840377584346242656465619802474975585937500000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
## 58 0.59841010495798330204308967950055375695228576660156250000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
## 59 0.75071559159525169668825128610478714108467102050781250000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
## 60 0.85602279378338042015172959509072825312614440917968750000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
## 61 0.88448506291243833565829390863655135035514831542968750000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
## 62 0.93723684909749604976525461097480729222297668457031250000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
## 63 0.94702751452056876946272723216679878532886505126953125000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
## 64 0.97119590230860319834960137086454778909683227539062500000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
## 65 0.97528899120814938061840848604333586990833282470703125000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
## 66 0.98792809957936611464646148306201212108135223388671875000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
## 67 0.99562604175998425937166302901459857821464538574218750000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
## 68 0.99768446405164712409430194384185597300529479980468750000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
## 69 0.99942726683298865886229123134398832917213439941406250000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
## 70 0.99993251676231265090422084540477953851222991943359375000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
## 71 0.99995461144334774061803727818187326192855834960937500000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
## 72 0.99996996593996045898933289208798669278621673583984375000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
## 73 0.99998801324681607649580428187618963420391082763671875000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
## 74 0.99998764127917383337518231201102025806903839111328125000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
## 75 0.99999713576629900568804032445768825709819793701171875000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
## 76 0.99999725848354781287952164348098449409008026123046875000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
## 77 0.99999952760265353735036342186504043638706207275390625000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
## 78 0.99999755060931960759518233317066915333271026611328125000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
## 79 0.99999963951744297308010800406918860971927642822265625000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
## 80 0.99999791692250705743560956761939451098442077636718750000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
## 81 0.99999364712097182916750170988962054252624511718750000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
## 82 0.99998077208715585584286600351333618164062500000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
## 83 0.99998705531346776531620434980140998959541320800781250000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
## 84 0.99999920307342970993147446279181167483329772949218750000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
## 85 0.99999591767571105460632452377467416226863861083984375000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
## 86 0.99999871575749676999578241520794108510017395019531250000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
## 87 0.99990427611542076480333207655348815023899078369140625000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
## 88 0.99998016759512875495374828460626304149627685546875000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
## 89 0.99991945338923382546880702648195438086986541748046875000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
## 90 0.98564080418405775052548278836184181272983551025390625000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
## 91 0.78093746784316409037529638226260431110858917236328125000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
## 92 NA
## 93 NA
## 94 NA
## 95 NA
## 96 NA
## 97 NA
## 98 NA
## 99 NA
## 100 NA
## 101 NA
## 102 NA
## 103 NA
## 104 NA
## 105 NA
## 106 NA
## sqresid fit RMSEA BIC SABIC complex eChisq
## 1 110.4 0.94 0.0758 -15118 2202 1.0 25872.31591807451331988
## 2 79.9 0.96 0.0694 -16695 291 1.5 16688.95967378960995120
## 3 62.9 0.97 0.0646 -17629 -972 1.9 11911.46945053323543107
## 4 50.2 0.97 0.0580 -18844 -2514 2.2 8227.33522634988912614
## 5 41.6 0.98 0.0537 -19385 -3378 2.5 5996.36303343471354310
## 6 36.6 0.98 0.0507 -19588 -3902 2.7 4788.68030172842190950
## 7 32.6 0.98 0.0481 -19651 -4282 2.8 3906.44600429156980681
## 8 30.5 0.98 0.0460 -19598 -4544 2.9 3447.38347668648930266
## 9 28.4 0.98 0.0441 -19494 -4750 3.0 3045.45259298223072619
## 10 26.5 0.99 0.0425 -19319 -4883 3.2 2687.24886975082836216
## 11 25.0 0.99 0.0414 -19063 -4932 3.3 2448.42973890789608049
## 12 23.7 0.99 0.0403 -18811 -4981 3.4 2229.11926703099516089
## 13 22.5 0.99 0.0393 -18526 -4994 3.5 2036.05495222739364181
## 14 21.3 0.99 0.0385 -18226 -4989 3.4 1862.22337339440991855
## 15 20.3 0.99 0.0373 -17964 -5019 3.5 1709.26942324145670682
## 16 19.3 0.99 0.0366 -17640 -4984 3.5 1577.08804239022242655
## 17 18.5 0.99 0.0355 -17358 -4988 3.6 1456.21221807692563743
## 18 17.8 0.99 0.0344 -17074 -4986 3.7 1345.07120111923063632
## 19 17.0 0.99 0.0336 -16754 -4945 3.7 1243.12448745442043219
## 20 16.2 0.99 0.0328 -16432 -4899 3.7 1145.13545094038977368
## 21 15.5 0.99 0.0319 -16121 -4861 3.7 1057.63007906897450994
## 22 14.8 0.99 0.0314 -15775 -4784 3.9 981.81567461655185980
## 23 14.2 0.99 0.0306 -15457 -4733 3.9 909.16984688129537062
## 24 13.6 0.99 0.0299 -15133 -4672 4.0 839.05560273425021478
## 25 13.1 0.99 0.0294 -14789 -4589 4.2 784.25262560381679577
## 26 12.6 0.99 0.0288 -14458 -4515 4.0 729.35119524897595511
## 27 12.1 0.99 0.0277 -14157 -4468 4.2 674.28970369782211947
## 28 11.7 0.99 0.0267 -13855 -4416 4.3 623.72597089908629187
## 29 11.3 0.99 0.0257 -13547 -4356 4.3 580.58124893065189553
## 30 10.8 0.99 0.0248 -13235 -4288 4.1 537.80007557597525647
## 31 10.4 0.99 0.0241 -12914 -4208 4.2 500.64857307086754190
## 32 10.0 0.99 0.0234 -12595 -4127 4.2 468.20308101240726728
## 33 9.7 0.99 0.0226 -12279 -4046 4.1 436.03488255913180183
## 34 9.3 1.00 0.0221 -11958 -3956 4.4 406.18531976220060642
## 35 8.9 1.00 0.0217 -11633 -3860 4.4 379.31680345021919720
## 36 8.6 1.00 0.0214 -11304 -3756 4.4 356.30432954860458494
## 37 8.1 1.00 0.0216 -10966 -3640 4.4 333.71808471599251789
## 38 7.9 1.00 0.0212 -10653 -3546 4.3 313.62140247534716764
## 39 7.7 1.00 0.0204 -10356 -3465 4.3 293.49815563775621285
## 40 7.4 1.00 0.0196 -10061 -3383 4.4 273.86068592723387383
## 41 7.2 1.00 0.0193 -9755 -3286 4.4 256.17425009241816269
## 42 7.0 1.00 0.0186 -9465 -3202 4.4 239.85234930229697170
## 43 6.7 1.00 0.0186 -9158 -3098 4.4 223.76612833737775077
## 44 6.5 1.00 0.0181 -8868 -3008 4.4 208.41098829244376134
## 45 6.3 1.00 0.0178 -8577 -2913 4.4 195.60736898487294866
## 46 6.0 1.00 0.0175 -8291 -2821 4.4 182.82168429418905475
## 47 5.8 1.00 0.0166 -8022 -2743 4.5 170.17223842651881682
## 48 5.7 1.00 0.0155 -7759 -2667 4.4 157.82713203161179649
## 49 5.4 1.00 0.0145 -7496 -2588 4.4 146.20002476193320717
## 50 5.3 1.00 0.0134 -7236 -2509 4.4 135.80228592333699567
## 51 5.2 1.00 0.0124 -6979 -2429 4.4 125.09318224046361934
## 52 5.0 1.00 0.0120 -6716 -2340 4.5 115.72782144085194034
## 53 4.8 1.00 0.0109 -6465 -2261 4.4 106.94327589707529569
## 54 4.6 1.00 0.0094 -6221 -2185 4.4 98.41384900830398408
## 55 4.4 1.00 0.0084 -5975 -2105 4.4 90.66220868978503233
## 56 4.3 1.00 0.0060 -5740 -2032 4.3 83.10087209254699303
## 57 4.2 1.00 0.0000 -5517 -1967 4.4 76.22815176121321201
## 58 4.0 1.00 0.0000 -5284 -1889 4.4 69.93550043513596393
## 59 3.9 1.00 0.0000 -5066 -1824 4.3 64.34747717389132049
## 60 3.8 1.00 0.0000 -4851 -1757 4.3 58.85045176597833461
## 61 3.7 1.00 0.0000 -4628 -1681 4.3 54.04106622475261901
## 62 3.6 1.00 0.0000 -4419 -1614 4.2 49.12159847972156967
## 63 3.5 1.00 0.0000 -4204 -1539 4.1 45.13469650211213491
## 64 3.4 1.00 0.0000 -4001 -1472 4.2 40.92047091282605464
## 65 3.2 1.00 0.0000 -3794 -1399 4.2 37.18385081692495220
## 66 3.0 1.00 0.0000 -3600 -1335 4.2 33.24322606173659977
## 67 2.9 1.00 0.0000 -3413 -1275 4.2 29.79281864236032007
## 68 2.8 1.00 0.0000 -3225 -1210 4.2 26.64923046634939752
## 69 2.8 1.00 0.0000 -3047 -1153 4.2 23.33570429092407394
## 70 2.7 1.00 0.0000 -2878 -1101 4.2 20.26921357997823492
## 71 2.6 1.00 0.0000 -2699 -1036 4.1 18.07435794847568999
## 72 2.5 1.00 0.0000 -2525 -973 4.4 16.12075591821209031
## 73 2.5 1.00 0.0000 -2358 -915 4.5 14.35036765902541056
## 74 2.4 1.00 0.0000 -2191 -852 4.5 12.50252932996249022
## 75 2.3 1.00 0.0000 -2035 -798 4.5 11.01175711920742017
## 76 2.3 1.00 0.0000 -1878 -739 4.6 9.62990945253087816
## 77 2.3 1.00 0.0000 -1732 -688 4.6 8.40245157058028447
## 78 2.1 1.00 0.0000 -1577 -625 4.4 7.34030907056735593
## 79 2.1 1.00 0.0000 -1440 -577 4.5 6.31745431425128867
## 80 2.0 1.00 0.0000 -1296 -519 4.1 5.34556379979636365
## 81 1.9 1.00 0.0000 -1159 -464 4.5 4.60505688714877870
## 82 1.8 1.00 0.0000 -1027 -411 4.4 3.77099949743362828
## 83 1.7 1.00 0.0000 -904 -365 4.3 3.02217796733488342
## 84 1.8 1.00 0.0000 -792 -326 4.3 2.30212714842441679
## 85 1.8 1.00 0.0000 -674 -278 4.2 1.85925787459161151
## 86 1.7 1.00 0.0000 -567 -237 4.3 1.43052035402430189
## 87 1.6 1.00 0.0000 -454 -187 4.5 1.16694481154090357
## 88 1.4 1.00 0.0000 -357 -151 4.4 0.79637964804050332
## 89 1.4 1.00 0.0000 -260 -111 4.5 0.50033689114673574
## 90 1.4 1.00 0.0000 -162 -67 4.5 0.40487867452754162
## 91 1.4 1.00 0.0000 -73 -29 4.4 0.24301775717320867
## 92 1.3 1.00 NA NA NA 4.5 0.11911250397626495
## 93 1.2 1.00 NA NA NA 4.4 0.02735398259514563
## 94 1.3 1.00 NA NA NA 4.5 0.00448685979795078
## 95 1.3 1.00 NA NA NA 4.4 0.00001533462867733
## 96 1.5 1.00 NA NA NA 4.4 0.00001659310314613
## 97 1.4 1.00 NA NA NA 4.4 0.00000394198613447
## 98 1.4 1.00 NA NA NA 4.3 0.00000064543435060
## 99 1.5 1.00 NA NA NA 4.4 0.00000074222707420
## 100 1.5 1.00 NA NA NA 4.4 0.00000020252708435
## 101 1.4 1.00 NA NA NA 4.3 0.00000013156420474
## 102 1.4 1.00 NA NA NA 4.2 0.00000011543776114
## 103 1.4 1.00 NA NA NA 4.2 0.00000014666891540
## 104 1.4 1.00 NA NA NA 4.2 0.00000006000242285
## 105 1.4 1.00 NA NA NA 4.2 0.00000000000000015
## 106 1.4 1.00 NA NA NA 4.2 0.00000000000000015
## SRMR eCRMS eBIC
## 1 0.0787325715954 0.0795 -6483
## 2 0.0632341109053 0.0645 -15044
## 3 0.0534219153191 0.0550 -19205
## 4 0.0443983040030 0.0462 -22279
## 5 0.0379036246806 0.0398 -23905
## 6 0.0338723014666 0.0359 -24514
## 7 0.0305933966073 0.0328 -24804
## 8 0.0287396570786 0.0311 -24676
## 9 0.0270123709881 0.0296 -24497
## 10 0.0253741047510 0.0281 -24280
## 11 0.0242203607189 0.0271 -23950
## 12 0.0231101857829 0.0261 -23606
## 13 0.0220867355652 0.0252 -23242
## 14 0.0211228575700 0.0244 -22865
## 15 0.0202368096100 0.0236 -22473
## 16 0.0194385897190 0.0230 -22065
## 17 0.0186788063341 0.0223 -21653
## 18 0.0179518586942 0.0217 -21237
## 19 0.0172581442504 0.0211 -20817
## 20 0.0165639996945 0.0205 -20399
## 21 0.0159185564117 0.0199 -19977
## 22 0.0153374007232 0.0194 -19549
## 23 0.0147590803062 0.0189 -19124
## 24 0.0141785609619 0.0184 -18702
## 25 0.0137077057930 0.0180 -18271
## 26 0.0132191987878 0.0176 -17846
## 27 0.0127104242880 0.0172 -17427
## 28 0.0122245729865 0.0167 -17009
## 29 0.0117941945296 0.0163 -16590
## 30 0.0113513421274 0.0159 -16176
## 31 0.0109522481519 0.0156 -15763
## 32 0.0105914133514 0.0153 -15351
## 33 0.0102210944376 0.0150 -14944
## 34 0.0098650410229 0.0147 -14542
## 35 0.0095331806980 0.0144 -14142
## 36 0.0092394758314 0.0141 -13744
## 37 0.0089418350705 0.0139 -13352
## 38 0.0086684137084 0.0137 -12963
## 39 0.0083857029121 0.0134 -12580
## 40 0.0081003098145 0.0132 -12202
## 41 0.0078343779784 0.0129 -11829
## 42 0.0075806906119 0.0127 -11460
## 43 0.0073220713710 0.0125 -11097
## 44 0.0070663816636 0.0123 -10739
## 45 0.0068458816958 0.0121 -10384
## 46 0.0066183638139 0.0119 -10035
## 47 0.0063852975483 0.0117 -9692
## 48 0.0061493274023 0.0115 -9355
## 49 0.0059184843976 0.0112 -9023
## 50 0.0057041419444 0.0110 -8695
## 51 0.0054746152684 0.0108 -8374
## 52 0.0052656946671 0.0106 -8058
## 53 0.0050618995850 0.0104 -7746
## 54 0.0048558458641 0.0102 -7441
## 55 0.0046606869323 0.0100 -7140
## 56 0.0044621028324 0.0097 -6845
## 57 0.0042736059855 0.0095 -6556
## 58 0.0040934135932 0.0093 -6272
## 59 0.0039264723744 0.0092 -5993
## 60 0.0037550150717 0.0090 -5720
## 61 0.0035983113481 0.0088 -5452
## 62 0.0034306232722 0.0086 -5190
## 63 0.0032884560901 0.0085 -4933
## 64 0.0031311732308 0.0083 -4683
## 65 0.0029847912711 0.0081 -4438
## 66 0.0028222037554 0.0079 -4199
## 67 0.0026717300824 0.0077 -3965
## 68 0.0025268480125 0.0075 -3737
## 69 0.0023645431315 0.0072 -3515
## 70 0.0022037140139 0.0069 -3299
## 71 0.0020809815195 0.0068 -3088
## 72 0.0019653028556 0.0066 -2882
## 73 0.0018542500437 0.0065 -2682
## 74 0.0017307555183 0.0063 -2489
## 75 0.0016242957048 0.0061 -2300
## 76 0.0015189654155 0.0060 -2118
## 77 0.0014188608088 0.0058 -1942
## 78 0.0013261539931 0.0057 -1771
## 79 0.0012302909771 0.0056 -1606
## 80 0.0011317058025 0.0054 -1447
## 81 0.0010503989615 0.0053 -1293
## 82 0.0009505282335 0.0051 -1146
## 83 0.0008509358120 0.0049 -1005
## 84 0.0007426795014 0.0046 -869
## 85 0.0006674313448 0.0045 -739
## 86 0.0005854419042 0.0043 -615
## 87 0.0005287640772 0.0043 -497
## 88 0.0004368143130 0.0040 -384
## 89 0.0003462326716 0.0038 -278
## 90 0.0003114578169 0.0042 -177
## 91 0.0002412991478 0.0048 -83
## 92 0.0001689333760 NA NA
## 93 0.0000809556319 NA NA
## 94 0.0000327874796 NA NA
## 95 0.0000019167851 NA NA
## 96 0.0000019938872 NA NA
## 97 0.0000009718390 NA NA
## 98 0.0000003932446 NA NA
## 99 0.0000004217015 NA NA
## 100 0.0000002202817 NA NA
## 101 0.0000001775439 NA NA
## 102 0.0000001663071 NA NA
## 103 0.0000001874588 NA NA
## 104 0.0000001199006 NA NA
## 105 0.0000000000059 NA NA
## 106 0.0000000000059 NA NA
mapn <- 7
knitr::kable(rbind(pa1,pa2,ew,scree, mapn))
| pa1 | 7 |
| pa2 | 8 |
| ew | 10 |
| scree | 7 |
| mapn | 7 |
desc <- round(psych::describe(dat), 2)
knitr::kable(desc)
| vars | n | mean | sd | median | trimmed | mad | min | max | range | skew | kurtosis | se | |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
| agent_1C_1 | 1 | 372 | 3.07 | 1.14 | 3 | 3.06 | 1.48 | 1 | 6 | 5 | -0.01 | -0.74 | 0.06 |
| agent_1C_2 | 2 | 372 | 2.76 | 1.16 | 3 | 2.71 | 1.48 | 1 | 6 | 5 | 0.36 | -0.27 | 0.06 |
| agent_1C_3 | 3 | 370 | 3.17 | 1.19 | 3 | 3.13 | 1.48 | 1 | 6 | 5 | 0.23 | -0.48 | 0.06 |
| agent_1C_4 | 4 | 371 | 2.88 | 1.15 | 3 | 2.87 | 1.48 | 1 | 6 | 5 | 0.29 | -0.29 | 0.06 |
| agent_1C_5 | 5 | 372 | 2.43 | 1.08 | 2 | 2.36 | 1.48 | 1 | 6 | 5 | 0.43 | -0.36 | 0.06 |
| agent_1D_1 | 6 | 372 | 2.46 | 1.15 | 2 | 2.38 | 1.48 | 1 | 6 | 5 | 0.48 | -0.33 | 0.06 |
| agent_1D_2 | 7 | 372 | 2.92 | 1.29 | 3 | 2.89 | 1.48 | 1 | 6 | 5 | 0.08 | -0.95 | 0.07 |
| agent_1D_3 | 8 | 372 | 2.34 | 1.11 | 2 | 2.24 | 1.48 | 1 | 6 | 5 | 0.55 | -0.28 | 0.06 |
| agent_1D_4 | 9 | 372 | 2.81 | 1.14 | 3 | 2.81 | 1.48 | 1 | 6 | 5 | 0.05 | -0.83 | 0.06 |
| agent_1D_5 | 10 | 371 | 2.60 | 1.19 | 3 | 2.55 | 1.48 | 1 | 6 | 5 | 0.31 | -0.59 | 0.06 |
| agent_1A_1 | 11 | 370 | 3.99 | 1.21 | 4 | 4.01 | 1.48 | 1 | 6 | 5 | -0.19 | -0.60 | 0.06 |
| agent_1A_2 | 12 | 371 | 4.19 | 1.09 | 4 | 4.19 | 1.48 | 1 | 6 | 5 | -0.16 | -0.38 | 0.06 |
| agent_1A_3 | 13 | 371 | 4.12 | 1.16 | 4 | 4.13 | 1.48 | 1 | 6 | 5 | -0.27 | -0.44 | 0.06 |
| agent_1A_4 | 14 | 371 | 2.55 | 1.14 | 2 | 2.49 | 1.48 | 1 | 5 | 4 | 0.32 | -0.69 | 0.06 |
| agent_1A_5 | 15 | 371 | 2.76 | 1.28 | 3 | 2.68 | 1.48 | 1 | 6 | 5 | 0.30 | -0.76 | 0.07 |
| agent_1E_1 | 16 | 371 | 2.47 | 1.14 | 2 | 2.41 | 1.48 | 1 | 5 | 4 | 0.40 | -0.69 | 0.06 |
| agent_1E_2 | 17 | 371 | 3.07 | 1.18 | 3 | 2.97 | 1.48 | 1 | 6 | 5 | 0.71 | 0.39 | 0.06 |
| agent_1E_3 | 18 | 370 | 1.83 | 0.86 | 2 | 1.73 | 1.48 | 1 | 5 | 4 | 0.81 | 0.00 | 0.04 |
| agent_1E_4 | 19 | 371 | 2.18 | 1.07 | 2 | 2.05 | 1.48 | 1 | 5 | 4 | 0.80 | 0.05 | 0.06 |
| agent_1E_5 | 20 | 372 | 1.73 | 0.88 | 2 | 1.60 | 1.48 | 1 | 5 | 4 | 1.12 | 0.75 | 0.05 |
| agent_1B_1 | 21 | 371 | 2.52 | 1.21 | 2 | 2.45 | 1.48 | 1 | 6 | 5 | 0.39 | -0.84 | 0.06 |
| agent_1B_2 | 22 | 371 | 2.24 | 1.08 | 2 | 2.14 | 1.48 | 1 | 6 | 5 | 0.57 | -0.45 | 0.06 |
| agent_1B_3 | 23 | 371 | 2.02 | 1.03 | 2 | 1.89 | 1.48 | 1 | 6 | 5 | 0.89 | 0.41 | 0.05 |
| agent_1B_4 | 24 | 371 | 3.36 | 1.14 | 4 | 3.39 | 1.48 | 1 | 6 | 5 | -0.29 | -0.25 | 0.06 |
| agent_1B_5 | 25 | 371 | 2.58 | 1.12 | 3 | 2.55 | 1.48 | 1 | 6 | 5 | 0.34 | -0.31 | 0.06 |
| agent_2E_5 | 26 | 371 | 2.30 | 1.16 | 2 | 2.20 | 1.48 | 1 | 6 | 5 | 0.63 | -0.37 | 0.06 |
| agent_2B_1 | 27 | 372 | 2.32 | 1.13 | 2 | 2.23 | 1.48 | 1 | 5 | 4 | 0.49 | -0.70 | 0.06 |
| agent_2B_2 | 28 | 371 | 2.03 | 1.07 | 2 | 1.89 | 1.48 | 1 | 5 | 4 | 0.82 | -0.22 | 0.06 |
| agent_2B_3 | 29 | 371 | 2.73 | 1.23 | 3 | 2.69 | 1.48 | 1 | 6 | 5 | 0.21 | -0.77 | 0.06 |
| agent_2B_4 | 30 | 372 | 2.32 | 1.15 | 2 | 2.22 | 1.48 | 1 | 6 | 5 | 0.64 | -0.31 | 0.06 |
| agent_2B_5 | 31 | 372 | 1.82 | 0.95 | 2 | 1.68 | 1.48 | 1 | 5 | 4 | 1.09 | 0.70 | 0.05 |
| agent_2A_1 | 32 | 371 | 2.42 | 1.24 | 2 | 2.32 | 1.48 | 1 | 6 | 5 | 0.59 | -0.50 | 0.06 |
| agent_2A_2 | 33 | 371 | 2.98 | 1.28 | 3 | 2.95 | 1.48 | 1 | 6 | 5 | 0.15 | -0.68 | 0.07 |
| agent_2A_3 | 34 | 371 | 2.49 | 1.20 | 2 | 2.41 | 1.48 | 1 | 6 | 5 | 0.54 | -0.35 | 0.06 |
| agent_2A_4 | 35 | 371 | 2.33 | 1.15 | 2 | 2.22 | 1.48 | 1 | 6 | 5 | 0.65 | -0.35 | 0.06 |
| agent_2A_5 | 36 | 370 | 3.62 | 1.23 | 4 | 3.71 | 1.48 | 1 | 6 | 5 | -0.58 | -0.29 | 0.06 |
| agent_2D_1 | 37 | 371 | 2.94 | 1.29 | 3 | 2.90 | 1.48 | 1 | 6 | 5 | 0.04 | -0.91 | 0.07 |
| agent_2D_2 | 38 | 371 | 2.87 | 1.27 | 3 | 2.83 | 1.48 | 1 | 6 | 5 | 0.13 | -0.92 | 0.07 |
| agent_2D_3 | 39 | 371 | 2.56 | 1.19 | 2 | 2.52 | 1.48 | 1 | 5 | 4 | 0.24 | -1.00 | 0.06 |
| agent_2D_4 | 40 | 370 | 3.18 | 1.23 | 3 | 3.22 | 1.48 | 1 | 6 | 5 | -0.35 | -0.85 | 0.06 |
| agent_2D_5 | 41 | 371 | 2.22 | 1.11 | 2 | 2.11 | 1.48 | 1 | 5 | 4 | 0.60 | -0.52 | 0.06 |
| agent_3A_1 | 42 | 371 | 3.17 | 1.29 | 3 | 3.18 | 1.48 | 1 | 6 | 5 | -0.20 | -0.78 | 0.07 |
| agent_3A_2 | 43 | 371 | 2.50 | 1.31 | 2 | 2.40 | 1.48 | 1 | 6 | 5 | 0.40 | -0.97 | 0.07 |
| agent_3A_3 | 44 | 371 | 2.57 | 1.27 | 2 | 2.49 | 1.48 | 1 | 6 | 5 | 0.37 | -0.81 | 0.07 |
| agent_3B_1 | 45 | 371 | 2.93 | 1.29 | 3 | 2.89 | 1.48 | 1 | 6 | 5 | 0.10 | -0.78 | 0.07 |
| agent_3B_2 | 46 | 371 | 2.99 | 1.18 | 3 | 3.00 | 1.48 | 1 | 6 | 5 | -0.06 | -0.69 | 0.06 |
| agent_3B_3 | 47 | 371 | 1.58 | 0.87 | 1 | 1.41 | 0.00 | 1 | 5 | 4 | 1.48 | 1.52 | 0.05 |
| agent_3B_4 | 48 | 371 | 2.10 | 1.11 | 2 | 1.98 | 1.48 | 1 | 5 | 4 | 0.68 | -0.60 | 0.06 |
| agent_3B_5 | 49 | 370 | 2.86 | 1.19 | 3 | 2.80 | 1.48 | 1 | 6 | 5 | 0.51 | 0.13 | 0.06 |
| agent_3C_1 | 50 | 367 | 2.18 | 1.11 | 2 | 2.07 | 1.48 | 1 | 6 | 5 | 0.67 | -0.38 | 0.06 |
| agent_3C_2 | 51 | 370 | 3.19 | 1.12 | 3 | 3.18 | 1.48 | 1 | 6 | 5 | -0.08 | -0.66 | 0.06 |
| agent_3C_3 | 52 | 368 | 1.77 | 0.95 | 1 | 1.63 | 0.00 | 1 | 5 | 4 | 1.14 | 0.75 | 0.05 |
| agent_3D_1 | 53 | 369 | 2.13 | 1.08 | 2 | 2.02 | 1.48 | 1 | 6 | 5 | 0.65 | -0.38 | 0.06 |
| agent_3D_2 | 54 | 370 | 3.29 | 1.04 | 3 | 3.32 | 1.48 | 1 | 6 | 5 | -0.35 | -0.26 | 0.05 |
| agent_3D_3 | 55 | 370 | 3.34 | 1.29 | 4 | 3.39 | 1.48 | 1 | 6 | 5 | -0.19 | -0.69 | 0.07 |
| agent_3E_1 | 56 | 369 | 2.85 | 1.18 | 3 | 2.86 | 1.48 | 1 | 6 | 5 | -0.05 | -0.85 | 0.06 |
| agent_3E_2 | 57 | 369 | 2.14 | 1.05 | 2 | 2.02 | 1.48 | 1 | 6 | 5 | 0.69 | -0.06 | 0.05 |
| agent_3E_3 | 58 | 370 | 2.16 | 1.13 | 2 | 2.02 | 1.48 | 1 | 6 | 5 | 0.86 | 0.09 | 0.06 |
| agent_4A_1 | 59 | 369 | 3.75 | 1.24 | 4 | 3.82 | 1.48 | 1 | 6 | 5 | -0.49 | -0.11 | 0.06 |
| agent_4A_2 | 60 | 369 | 2.00 | 1.08 | 2 | 1.83 | 1.48 | 1 | 6 | 5 | 1.11 | 0.99 | 0.06 |
| agent_4A_3 | 61 | 368 | 3.15 | 1.04 | 3 | 3.21 | 1.48 | 1 | 6 | 5 | -0.38 | -0.49 | 0.05 |
| agent_4B_1 | 62 | 372 | 2.31 | 1.19 | 2 | 2.20 | 1.48 | 1 | 6 | 5 | 0.57 | -0.60 | 0.06 |
| agent_4B_2 | 63 | 372 | 2.56 | 1.20 | 2 | 2.49 | 1.48 | 1 | 6 | 5 | 0.32 | -0.79 | 0.06 |
| agent_4B_3 | 64 | 372 | 2.17 | 1.09 | 2 | 2.05 | 1.48 | 1 | 5 | 4 | 0.70 | -0.34 | 0.06 |
| agent_4B_4 | 65 | 372 | 3.17 | 1.26 | 3 | 3.20 | 1.48 | 1 | 6 | 5 | -0.24 | -0.81 | 0.07 |
| agent_4B_5 | 66 | 371 | 2.11 | 1.05 | 2 | 1.99 | 1.48 | 1 | 5 | 4 | 0.73 | -0.17 | 0.05 |
| agent_4C_1 | 67 | 371 | 1.83 | 1.02 | 2 | 1.65 | 1.48 | 1 | 6 | 5 | 1.27 | 1.22 | 0.05 |
| agent_4C_2 | 68 | 372 | 1.68 | 0.91 | 1 | 1.53 | 0.00 | 1 | 6 | 5 | 1.46 | 2.25 | 0.05 |
| agent_4C_3 | 69 | 372 | 2.40 | 1.18 | 2 | 2.31 | 1.48 | 1 | 6 | 5 | 0.53 | -0.53 | 0.06 |
| agent_4D_1 | 70 | 371 | 1.85 | 0.95 | 2 | 1.72 | 1.48 | 1 | 5 | 4 | 0.98 | 0.28 | 0.05 |
| agent_4D_2 | 71 | 370 | 1.76 | 0.92 | 2 | 1.61 | 1.48 | 1 | 5 | 4 | 1.11 | 0.53 | 0.05 |
| agent_4D_3 | 72 | 370 | 2.39 | 1.27 | 2 | 2.29 | 1.48 | 1 | 6 | 5 | 0.53 | -0.73 | 0.07 |
| agent_5C_1 | 73 | 372 | 3.13 | 1.34 | 3 | 3.14 | 1.48 | 1 | 6 | 5 | -0.10 | -0.89 | 0.07 |
| agent_5C_2 | 74 | 372 | 3.74 | 1.17 | 4 | 3.86 | 1.48 | 1 | 6 | 5 | -0.77 | 0.21 | 0.06 |
| agent_5C_3 | 75 | 371 | 2.94 | 1.17 | 3 | 2.94 | 1.48 | 1 | 6 | 5 | 0.00 | -0.70 | 0.06 |
| agent_5C_4 | 76 | 372 | 3.25 | 1.24 | 3 | 3.28 | 1.48 | 1 | 6 | 5 | -0.07 | -0.54 | 0.06 |
| agent_5C_5 | 77 | 372 | 2.30 | 1.13 | 2 | 2.20 | 1.48 | 1 | 5 | 4 | 0.51 | -0.66 | 0.06 |
| agent_5D_1 | 78 | 371 | 1.77 | 1.03 | 1 | 1.58 | 0.00 | 1 | 5 | 4 | 1.27 | 0.84 | 0.05 |
| agent_5D_2 | 79 | 372 | 3.23 | 1.35 | 3 | 3.24 | 1.48 | 1 | 6 | 5 | -0.02 | -0.84 | 0.07 |
| agent_5D_3 | 80 | 372 | 3.23 | 1.24 | 3 | 3.28 | 1.48 | 1 | 6 | 5 | -0.26 | -0.74 | 0.06 |
| agent_5D_4 | 81 | 371 | 2.49 | 1.27 | 2 | 2.38 | 1.48 | 1 | 6 | 5 | 0.53 | -0.54 | 0.07 |
| agent_5D_5 | 82 | 370 | 1.98 | 1.06 | 2 | 1.82 | 1.48 | 1 | 5 | 4 | 0.95 | 0.16 | 0.05 |
| agent_2C_1 | 83 | 370 | 3.05 | 1.36 | 3 | 3.03 | 1.48 | 1 | 6 | 5 | 0.01 | -1.01 | 0.07 |
| agent_2C_2 | 84 | 372 | 1.98 | 1.01 | 2 | 1.84 | 1.48 | 1 | 6 | 5 | 1.07 | 1.09 | 0.05 |
| agent_2C_3 | 85 | 370 | 2.33 | 1.14 | 2 | 2.25 | 1.48 | 1 | 6 | 5 | 0.51 | -0.68 | 0.06 |
| agent_2C_4 | 86 | 372 | 2.36 | 1.12 | 2 | 2.28 | 1.48 | 1 | 6 | 5 | 0.51 | -0.42 | 0.06 |
| agent_2C_5 | 87 | 372 | 2.43 | 1.28 | 2 | 2.31 | 1.48 | 1 | 6 | 5 | 0.62 | -0.52 | 0.07 |
| agent_5A_1 | 88 | 372 | 3.09 | 1.25 | 3 | 3.09 | 1.48 | 1 | 6 | 5 | -0.04 | -0.78 | 0.06 |
| agent_5A_2 | 89 | 372 | 2.83 | 1.33 | 3 | 2.77 | 1.48 | 1 | 6 | 5 | 0.24 | -0.84 | 0.07 |
| agent_5A_3 | 90 | 372 | 2.19 | 1.18 | 2 | 2.04 | 1.48 | 1 | 6 | 5 | 0.94 | 0.40 | 0.06 |
| agent_5A_4 | 91 | 372 | 2.40 | 1.21 | 2 | 2.31 | 1.48 | 1 | 6 | 5 | 0.54 | -0.64 | 0.06 |
| agent_5A_5 | 92 | 372 | 1.90 | 0.98 | 2 | 1.76 | 1.48 | 1 | 6 | 5 | 0.90 | 0.19 | 0.05 |
| agent_5B_1 | 93 | 371 | 2.57 | 1.17 | 2 | 2.51 | 1.48 | 1 | 6 | 5 | 0.44 | -0.44 | 0.06 |
| agent_5B_2 | 94 | 371 | 4.28 | 1.08 | 4 | 4.36 | 1.48 | 1 | 6 | 5 | -0.89 | 1.10 | 0.06 |
| agent_5B_3 | 95 | 370 | 3.70 | 1.20 | 4 | 3.77 | 1.48 | 1 | 6 | 5 | -0.39 | -0.26 | 0.06 |
| agent_5B_4 | 96 | 371 | 2.53 | 1.27 | 2 | 2.43 | 1.48 | 1 | 6 | 5 | 0.55 | -0.32 | 0.07 |
| agent_5B_5 | 97 | 371 | 3.47 | 1.13 | 4 | 3.49 | 1.48 | 1 | 6 | 5 | -0.25 | -0.15 | 0.06 |
| agent_5E_1 | 98 | 370 | 3.11 | 1.22 | 3 | 3.12 | 1.48 | 1 | 6 | 5 | -0.07 | -0.68 | 0.06 |
| agent_5E_2 | 99 | 371 | 2.57 | 1.17 | 3 | 2.52 | 1.48 | 1 | 6 | 5 | 0.32 | -0.62 | 0.06 |
| agent_5E_3 | 100 | 371 | 2.09 | 1.05 | 2 | 1.95 | 1.48 | 1 | 6 | 5 | 0.81 | 0.14 | 0.05 |
| agent_5E_4 | 101 | 371 | 2.19 | 1.13 | 2 | 2.07 | 1.48 | 1 | 6 | 5 | 0.72 | -0.22 | 0.06 |
| agent_5E_5 | 102 | 371 | 2.26 | 1.16 | 2 | 2.15 | 1.48 | 1 | 6 | 5 | 0.64 | -0.48 | 0.06 |
| agent_2E_1 | 103 | 371 | 3.68 | 1.15 | 4 | 3.76 | 1.48 | 1 | 6 | 5 | -0.59 | 0.08 | 0.06 |
| agent_2E_2 | 104 | 372 | 2.80 | 1.17 | 3 | 2.78 | 1.48 | 1 | 6 | 5 | 0.18 | -0.68 | 0.06 |
| agent_2E_3 | 105 | 371 | 2.47 | 1.18 | 2 | 2.39 | 1.48 | 1 | 6 | 5 | 0.56 | -0.29 | 0.06 |
| agent_2E_4 | 106 | 370 | 2.29 | 1.17 | 2 | 2.19 | 1.48 | 1 | 6 | 5 | 0.62 | -0.43 | 0.06 |
cor.plot(cor(dat, use="pairwise.complete.obs"))
# Bartlett-Test
cortest.bartlett(dat, n = nrow(dat))
## R was not square, finding R from data
## $chisq
## [1] 34759.12
##
## $p.value
## [1] 0
##
## $df
## [1] 5565
kmo <- KMO(dat)
kmo
## Kaiser-Meyer-Olkin factor adequacy
## Call: KMO(r = dat)
## Overall MSA = 0.96
## MSA for each item =
## agent_1C_1 agent_1C_2 agent_1C_3 agent_1C_4 agent_1C_5 agent_1D_1 agent_1D_2
## 0.96 0.75 0.96 0.95 0.96 0.97 0.95
## agent_1D_3 agent_1D_4 agent_1D_5 agent_1A_1 agent_1A_2 agent_1A_3 agent_1A_4
## 0.96 0.96 0.96 0.96 0.96 0.96 0.96
## agent_1A_5 agent_1E_1 agent_1E_2 agent_1E_3 agent_1E_4 agent_1E_5 agent_1B_1
## 0.96 0.96 0.88 0.97 0.95 0.96 0.97
## agent_1B_2 agent_1B_3 agent_1B_4 agent_1B_5 agent_2E_5 agent_2B_1 agent_2B_2
## 0.97 0.97 0.91 0.97 0.96 0.97 0.96
## agent_2B_3 agent_2B_4 agent_2B_5 agent_2A_1 agent_2A_2 agent_2A_3 agent_2A_4
## 0.95 0.97 0.96 0.98 0.95 0.96 0.96
## agent_2A_5 agent_2D_1 agent_2D_2 agent_2D_3 agent_2D_4 agent_2D_5 agent_3A_1
## 0.96 0.94 0.96 0.97 0.97 0.86 0.95
## agent_3A_2 agent_3A_3 agent_3B_1 agent_3B_2 agent_3B_3 agent_3B_4 agent_3B_5
## 0.97 0.98 0.96 0.97 0.96 0.98 0.88
## agent_3C_1 agent_3C_2 agent_3C_3 agent_3D_1 agent_3D_2 agent_3D_3 agent_3E_1
## 0.96 0.93 0.97 0.97 0.94 0.89 0.97
## agent_3E_2 agent_3E_3 agent_4A_1 agent_4A_2 agent_4A_3 agent_4B_1 agent_4B_2
## 0.97 0.97 0.76 0.96 0.94 0.96 0.97
## agent_4B_3 agent_4B_4 agent_4B_5 agent_4C_1 agent_4C_2 agent_4C_3 agent_4D_1
## 0.98 0.97 0.98 0.97 0.97 0.97 0.97
## agent_4D_2 agent_4D_3 agent_5C_1 agent_5C_2 agent_5C_3 agent_5C_4 agent_5C_5
## 0.96 0.97 0.96 0.93 0.97 0.91 0.98
## agent_5D_1 agent_5D_2 agent_5D_3 agent_5D_4 agent_5D_5 agent_2C_1 agent_2C_2
## 0.98 0.95 0.96 0.97 0.98 0.92 0.98
## agent_2C_3 agent_2C_4 agent_2C_5 agent_5A_1 agent_5A_2 agent_5A_3 agent_5A_4
## 0.98 0.99 0.95 0.96 0.96 0.95 0.97
## agent_5A_5 agent_5B_1 agent_5B_2 agent_5B_3 agent_5B_4 agent_5B_5 agent_5E_1
## 0.96 0.98 0.94 0.91 0.94 0.90 0.96
## agent_5E_2 agent_5E_3 agent_5E_4 agent_5E_5 agent_2E_1 agent_2E_2 agent_2E_3
## 0.98 0.98 0.98 0.98 0.91 0.96 0.96
## agent_2E_4
## 0.98
# ist irgendeiner der MSAi Werte kleiner als .50?
any(kmo$MSAi < 0.50)
## [1] FALSE
# Robust: Spalten im Codebook auch bei Umlaut/Encoding-Unterschieden finden
resolve_codebook_col <- function(df, preferred, patterns = NULL) {
nms <- names(df)
if (!is.null(preferred) && preferred %in% nms) return(preferred)
# 1) direkter Regex-Match
if (!is.null(patterns)) {
hit <- nms[Reduce(`|`, lapply(patterns, function(p) grepl(p, nms, ignore.case = TRUE)))]
if (length(hit) > 0) return(hit[1])
}
# 2) ASCII-Transliteration (Ü -> Ue/U)
nms_ascii <- iconv(nms, to = "ASCII//TRANSLIT")
if (!is.null(preferred)) {
pref_ascii <- iconv(preferred, to = "ASCII//TRANSLIT")
if (!is.na(pref_ascii)) {
idx <- which(tolower(nms_ascii) == tolower(pref_ascii))
if (length(idx) > 0) return(nms[idx[1]])
}
}
if (!is.null(patterns)) {
hit <- nms[Reduce(`|`, lapply(patterns, function(p) grepl(p, nms_ascii, ignore.case = TRUE)))]
if (length(hit) > 0) return(hit[1])
}
NA_character_
}
matchItems <- function(efa.result,
Codebooka,
shortitem = "Itemname",
longitem = "Itemtext",
overlap_construct = "Überlappendes Konstrukt",
digits = 3,
cut = NULL) {
fs <- psych::fa.sort(efa.result)
loadings <- as.data.frame(unclass(fs$loadings))
short_vec <- trimws(as.character(Codebooka[[shortitem]]))
long_vec <- as.character(Codebooka[[longitem]])
overlap_col <- resolve_codebook_col(Codebooka, overlap_construct,
patterns = c("Überlappendes", "Ueberlappendes", "Uberlappendes", "Overlapp"))
overlap_vec <- if (!is.na(overlap_col)) as.character(Codebooka[[overlap_col]]) else rep(NA_character_, nrow(Codebooka))
rn <- trimws(rownames(loadings))
matches <- match(rn, short_vec)
if (anyNA(matches)) {
warning("Nicht im Codebook gefunden: ",
paste(rn[is.na(matches)], collapse = ", "))
}
resultEFA <- data.frame(
Itemtext = long_vec[matches],
`Überlappendes Konstrukt` = overlap_vec[matches],
loadings,
check.names = FALSE
)
# NAs optisch leeren
if ("Überlappendes Konstrukt" %in% names(resultEFA)) resultEFA$`Überlappendes Konstrukt`[is.na(resultEFA$`Überlappendes Konstrukt`)] <- ""
is.num <- sapply(resultEFA, is.numeric)
resultEFA[is.num] <- lapply(resultEFA[is.num], round, digits)
if (!is.null(cut)) {
tmp <- as.matrix(resultEFA[is.num])
tmp[abs(tmp) < cut] <- NA
resultEFA[is.num] <- as.data.frame(tmp)
}
resultEFA
}
# Zuordnung: Faktoren -> Kernkonstrukt (basierend auf stärkster Ladung pro Item)
assignFactorConstruct <- function(efa.result,
Codebooka,
shortitem = "Itemname",
construct_col = "Kernkonstrukt",
cut = 0.30) {
fs <- psych::fa.sort(efa.result)
L <- as.matrix(unclass(fs$loadings))
rn <- trimws(rownames(L))
short_vec <- trimws(as.character(Codebooka[[shortitem]]))
construct_vec <- if (construct_col %in% names(Codebooka)) as.character(Codebooka[[construct_col]]) else rep(NA_character_, nrow(Codebooka))
matches <- match(rn, short_vec)
absL <- abs(L)
best_factor <- apply(absL, 1, function(x) {
if (all(is.na(x))) return(NA_character_)
ix <- which.max(x)
if (!is.null(cut) && x[ix] < cut) return(NA_character_)
colnames(L)[ix]
})
best_loading <- apply(absL, 1, function(x) if (all(is.na(x))) NA_real_ else max(x, na.rm = TRUE))
df <- data.frame(
Itemname = rn,
Kernkonstrukt = construct_vec[matches],
Faktor = best_factor,
Ladung = best_loading,
stringsAsFactors = FALSE
)
df <- df[!is.na(df$Faktor) & !is.na(df$Kernkonstrukt) & df$Kernkonstrukt != "", ]
out <- do.call(rbind, lapply(split(df, df$Faktor), function(d) {
counts <- sort(table(d$Kernkonstrukt), decreasing = TRUE)
top <- names(counts)[1]
# Tie-Breaker: höchste aufsummierte absolute Ladung
if (length(counts) > 1 && counts[1] == counts[2]) {
w <- tapply(d$Ladung, d$Kernkonstrukt, sum, na.rm = TRUE)
top <- names(sort(w, decreasing = TRUE))[1]
}
data.frame(
Faktor = unique(d$Faktor),
Zugeordnetes_Konstrukt = top,
n_items = nrow(d),
stringsAsFactors = FALSE
)
}))
out <- out[order(out$Faktor), ]
rownames(out) <- NULL
out
}
# Items nach Faktor clustern (z.B. PA1 > .30)
factorClusters <- function(efa.result,
Codebooka,
cut = 0.30,
shortitem = "Itemname",
longitem = "Itemtext",
overlap_construct = "Überlappendes Konstrukt",
core_construct = "Kernkonstrukt",
expected_difficulty = "erwartete Schwierigkeit (psychologisch)",
digits = 3) {
fs <- psych::fa.sort(efa.result)
loadings <- as.data.frame(unclass(fs$loadings))
item_vec <- trimws(rownames(loadings))
key_vec <- trimws(as.character(Codebooka[[shortitem]]))
idx <- match(item_vec, key_vec)
overlap_col <- resolve_codebook_col(Codebooka, overlap_construct,
patterns = c("Überlappendes", "Ueberlappendes", "Uberlappendes", "Overlapp"))
core_col <- resolve_codebook_col(Codebooka, core_construct,
patterns = c("Kernkonstrukt", "Core"))
diff_col <- resolve_codebook_col(Codebooka, expected_difficulty,
patterns = c("erwartete.*Schwierigkeit", "Schwierigkeit", "difficulty"))
if (anyNA(idx)) {
warning("Nicht im Codebook gefunden: ",
paste(item_vec[is.na(idx)], collapse = ", "))
}
base_df <- data.frame(
Item = item_vec,
Itemtext = as.character(Codebooka[[longitem]])[idx],
`Überlappendes Konstrukt` = if (!is.na(overlap_col)) as.character(Codebooka[[overlap_col]])[idx] else NA_character_,
Kernkonstrukt = if (!is.na(core_col)) as.character(Codebooka[[core_col]])[idx] else NA_character_,
`Erwartete Schwierigkeit` = if (!is.na(diff_col)) as.character(Codebooka[[diff_col]])[idx] else NA_character_,
stringsAsFactors = FALSE,
check.names = FALSE
)
# NAs optisch leeren (im Codebook sind Überlappungen oft nicht vergeben)
if ("Überlappendes Konstrukt" %in% names(base_df)) base_df$`Überlappendes Konstrukt`[is.na(base_df$`Überlappendes Konstrukt`)] <- ""
if ("Kernkonstrukt" %in% names(base_df)) base_df$Kernkonstrukt[is.na(base_df$Kernkonstrukt)] <- ""
if ("Erwartete Schwierigkeit" %in% names(base_df)) base_df$`Erwartete Schwierigkeit`[is.na(base_df$`Erwartete Schwierigkeit`)] <- ""
fac_names <- names(loadings)
clusters <- lapply(fac_names, function(fac) {
lad <- loadings[[fac]]
keep <- which(!is.na(lad) & lad > cut)
if (length(keep) == 0) return(NULL)
out <- base_df[keep, c("Item", "Itemtext", "Erwartete Schwierigkeit", "Überlappendes Konstrukt", "Kernkonstrukt")]
out$Ladung <- round(lad[keep], digits)
out <- out[order(-out$Ladung), ]
rownames(out) <- NULL
out
})
names(clusters) <- fac_names
clusters <- clusters[!sapply(clusters, is.null)]
clusters
}
printFactorClusters <- function(clusters, solution_label = "EFA", cut = 0.30) {
for (fac in names(clusters)) {
cat("\n\n#### ", solution_label, " – ", fac, " (Ladung > ", cut, ")\n\n", sep = "")
print(knitr::kable(clusters[[fac]]))
}
}
# Cluster ohne Codebook (z.B. EFA 2. Ordnung, wenn "Items" keine echten Itemnamen sind)
factorClustersSimple <- function(efa.result, cut = 0.30, digits = 3) {
fs <- psych::fa.sort(efa.result)
loadings <- as.data.frame(unclass(fs$loadings))
item_vec <- trimws(rownames(loadings))
fac_names <- names(loadings)
clusters <- lapply(fac_names, function(fac) {
lad <- loadings[[fac]]
keep <- which(!is.na(lad) & lad > cut)
if (length(keep) == 0) return(NULL)
out <- data.frame(
Item = item_vec[keep],
Ladung = round(lad[keep], digits),
stringsAsFactors = FALSE
)
out <- out[order(-out$Ladung), ]
rownames(out) <- NULL
out
})
names(clusters) <- fac_names
clusters <- clusters[!sapply(clusters, is.null)]
clusters
}
# EFA mit 7 Faktoren und Promax-Rotation (oblique)
efa7 <- fa(dat, nfactors = 7, fm = "pa", rotate = "promax")
print(efa7, sort = TRUE, cut = .30)
## Factor Analysis using method = pa
## Call: fa(r = dat, nfactors = 7, rotate = "promax", fm = "pa")
## Standardized loadings (pattern matrix) based upon correlation matrix
## item PA1 PA4 PA5 PA2 PA3 PA7 PA6 h2 u2 com
## agent_4D_2 71 0.94 0.72 0.28 1.1
## agent_4C_2 68 0.93 0.71 0.29 1.1
## agent_4C_1 67 0.83 0.57 0.43 1.1
## agent_3C_3 52 0.83 0.68 0.32 1.2
## agent_2B_5 31 0.81 0.64 0.36 1.1
## agent_5A_5 92 0.79 0.33 0.67 0.33 1.5
## agent_4D_1 70 0.78 0.70 0.30 1.1
## agent_2C_2 84 0.73 0.69 0.31 1.2
## agent_5D_1 78 0.73 0.35 0.69 0.31 1.6
## agent_5E_3 100 0.72 0.73 0.27 1.3
## agent_4B_5 66 0.63 0.69 0.31 1.3
## agent_3E_2 57 0.62 0.58 0.42 1.6
## agent_5D_5 82 0.58 0.38 0.67 0.33 2.1
## agent_4A_2 60 0.58 0.42 0.58 1.2
## agent_3B_3 47 0.57 0.40 0.60 1.4
## agent_5E_4 101 0.53 0.72 0.28 1.5
## agent_4B_3 64 0.49 0.63 0.37 1.5
## agent_1E_3 18 0.48 0.30 0.53 0.47 2.7
## agent_1E_5 20 0.44 0.39 0.47 0.53 2.7
## agent_1B_2 22 0.40 0.51 0.49 2.3
## agent_1E_4 19 0.40 0.45 0.55 3.8
## agent_1E_1 16 0.39 0.68 0.32 3.6
## agent_5A_3 90 0.37 0.31 0.69 2.1
## agent_2C_5 87 0.37 0.34 0.66 2.5
## agent_2C_4 86 0.33 0.67 0.33 3.1
## agent_1C_5 5 0.60 0.40 4.7
## agent_3B_5 49 0.13 0.87 3.9
## agent_5B_4 96 0.22 0.78 3.7
## agent_2D_3 39 0.84 0.74 0.26 1.1
## agent_3D_1 53 0.80 0.78 0.22 1.1
## agent_2B_1 27 0.79 0.73 0.27 1.1
## agent_1C_4 4 0.78 0.67 0.33 1.4
## agent_3B_4 48 0.78 0.75 0.25 1.2
## agent_1B_5 25 0.77 0.61 0.39 1.1
## agent_3C_1 50 0.76 0.67 0.33 1.2
## agent_1A_2 12 -0.75 0.56 0.44 1.5
## agent_2A_3 34 0.74 0.60 0.40 1.3
## agent_3E_3 58 0.72 0.69 0.31 1.2
## agent_4B_1 62 0.70 0.56 0.44 1.1
## agent_2C_3 85 0.69 0.73 0.27 1.2
## agent_1D_1 6 0.63 0.59 0.41 1.5
## agent_5A_4 91 0.56 0.65 0.35 1.7
## agent_5B_3 95 0.51 0.32 0.68 2.0
## agent_5C_5 77 0.36 0.42 0.71 0.29 2.3
## agent_5E_5 102 0.35 0.42 0.70 0.30 2.9
## agent_3C_2 51 0.41 0.29 0.71 2.0
## agent_2E_2 104 0.40 0.38 0.62 1.9
## agent_1C_2 2 0.34 0.13 0.87 2.1
## agent_5D_2 79 0.31 0.30 0.38 0.62 3.0
## agent_5A_2 89 0.74 0.71 0.29 1.1
## agent_1C_3 3 0.74 0.62 0.38 1.2
## agent_4D_3 72 0.33 0.70 0.74 0.26 1.7
## agent_5C_1 73 0.69 0.60 0.40 1.3
## agent_5D_4 81 0.33 0.66 0.73 0.27 1.6
## agent_1A_3 13 -0.63 0.57 0.43 1.3
## agent_2A_1 32 0.61 0.67 0.33 1.4
## agent_3A_2 43 0.31 0.61 0.59 0.41 1.5
## agent_1A_1 11 -0.55 -0.39 0.63 0.37 2.1
## agent_5C_3 75 0.51 0.35 0.65 0.35 2.2
## agent_2E_4 106 0.49 0.63 0.37 1.6
## agent_3A_3 44 0.47 0.58 0.42 2.0
## agent_4C_3 69 0.47 0.62 0.38 2.0
## agent_5B_2 94 0.46 0.28 0.72 2.0
## agent_5E_2 99 0.44 0.64 0.36 2.2
## agent_2B_2 28 0.42 0.58 0.42 2.6
## agent_2E_5 26 0.35 0.47 0.53 3.7
## agent_3B_1 45 0.34 0.31 0.43 0.57 3.5
## agent_5C_2 74 0.73 0.63 0.37 1.6
## agent_5C_4 76 0.72 0.56 0.44 1.3
## agent_3D_3 55 0.64 0.50 0.50 1.8
## agent_2A_5 36 0.63 0.55 0.45 1.6
## agent_5A_1 88 0.60 0.57 0.43 1.3
## agent_5D_3 80 0.60 0.55 0.45 1.1
## agent_5E_1 98 0.50 0.62 0.38 2.3
## agent_4B_4 65 0.50 0.31 0.55 0.45 2.0
## agent_4B_2 63 0.46 0.64 0.36 2.0
## agent_5B_1 93 0.42 0.58 0.42 3.7
## agent_5B_5 97 0.42 0.32 0.68 3.2
## agent_4A_3 61 0.73 0.62 0.38 1.4
## agent_3D_2 54 0.71 0.62 0.38 1.3
## agent_4A_1 59 0.64 0.34 0.66 1.4
## agent_3E_1 56 0.62 0.66 0.34 1.8
## agent_2C_1 83 0.35 0.54 0.57 0.43 2.7
## agent_1D_4 9 0.50 0.48 0.52 1.6
## agent_2D_1 37 0.44 0.50 0.64 0.36 2.9
## agent_1B_4 24 0.48 0.35 0.65 2.5
## agent_2D_2 38 0.47 0.55 0.45 2.2
## agent_3B_2 46 0.42 0.49 0.51 2.2
## agent_3A_1 42 0.38 0.30 0.40 0.60 3.1
## agent_1B_1 21 0.51 0.49 5.0
## agent_1E_2 17 0.34 -0.56 0.40 0.60 2.4
## agent_2A_4 35 0.41 0.55 0.63 0.37 2.3
## agent_2E_3 105 0.43 0.52 0.60 0.40 2.2
## agent_1A_5 15 0.51 0.55 0.45 1.6
## agent_2D_4 40 0.48 0.58 0.42 2.9
## agent_2A_2 33 0.45 0.45 0.50 0.50 2.5
## agent_1A_4 14 0.41 0.60 0.40 2.0
## agent_1D_2 7 0.30 0.32 0.68 3.3
## agent_1D_3 8 0.63 0.37 5.3
## agent_1C_1 1 0.32 0.63 0.62 0.38 1.5
## agent_2B_3 29 0.39 0.62 0.68 0.32 1.7
## agent_1D_5 10 0.61 0.64 0.36 1.5
## agent_2E_1 103 0.43 0.52 0.47 0.53 2.6
## agent_1B_3 23 0.30 0.50 0.61 0.39 2.4
## agent_2D_5 41 0.36 0.33 -0.42 0.35 0.65 4.0
## agent_2B_4 30 0.34 0.36 0.45 0.55 2.6
##
## PA1 PA4 PA5 PA2 PA3 PA7 PA6
## SS loadings 15.96 12.33 9.65 6.91 6.07 4.48 4.34
## Proportion Var 0.15 0.12 0.09 0.07 0.06 0.04 0.04
## Cumulative Var 0.15 0.27 0.36 0.42 0.48 0.52 0.56
## Proportion Explained 0.27 0.21 0.16 0.12 0.10 0.08 0.07
## Cumulative Proportion 0.27 0.47 0.64 0.75 0.85 0.93 1.00
##
## With factor correlations of
## PA1 PA4 PA5 PA2 PA3 PA7 PA6
## PA1 1.00 0.69 0.61 0.51 0.43 0.49 0.41
## PA4 0.69 1.00 0.57 0.46 0.47 0.40 0.23
## PA5 0.61 0.57 1.00 0.45 0.39 0.45 0.27
## PA2 0.51 0.46 0.45 1.00 0.34 0.32 0.27
## PA3 0.43 0.47 0.39 0.34 1.00 0.43 0.30
## PA7 0.49 0.40 0.45 0.32 0.43 1.00 0.47
## PA6 0.41 0.23 0.27 0.27 0.30 0.47 1.00
##
## Mean item complexity = 2.1
## Test of the hypothesis that 7 factors are sufficient.
##
## df null model = 5565 with the objective function = 102.89 with Chi Square = 34759.12
## df of the model are 4844 and the objective function was 27.19
##
## The root mean square of the residuals (RMSR) is 0.03
## The df corrected root mean square of the residuals is 0.03
##
## The harmonic n.obs is 370 with the empirical chi square 3855.81 with prob < 1
## The total n.obs was 375 with Likelihood Chi Square = 9059.31 with prob < 0.00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000014
##
## Tucker Lewis Index of factoring reliability = 0.831
## RMSEA index = 0.048 and the 90 % confidence intervals are 0.047 0.05
## BIC = -19650.72
## Fit based upon off diagonal values = 0.99
## Measures of factor score adequacy
## PA1 PA4 PA5 PA2 PA3 PA7
## Correlation of (regression) scores with factors 0.99 0.98 0.98 0.96 0.96 0.94
## Multiple R square of scores with factors 0.97 0.97 0.95 0.92 0.92 0.89
## Minimum correlation of possible factor scores 0.95 0.94 0.91 0.84 0.84 0.78
## PA6
## Correlation of (regression) scores with factors 0.94
## Multiple R square of scores with factors 0.88
## Minimum correlation of possible factor scores 0.76
Codebooka<- Codebook[grepl("^agent_", Codebook$Itemname), ]
# Faktoren einem Kernkonstrukt zuordnen
efa7_factor_map <- assignFactorConstruct(efa7, Codebooka, cut = 0.30)
knitr::kable(efa7_factor_map, caption = "EFA (7 Faktoren, promax): Zuordnung Faktor → Kernkonstrukt (Mehrheit; Tie-Breaker = Ladungssumme)")
| Faktor | Zugeordnetes_Konstrukt | n_items |
|---|---|---|
| PA1 | ja | 25 |
| PA2 | ja | 11 |
| PA3 | ja | 11 |
| PA4 | ja | 21 |
| PA5 | ja | 18 |
| PA6 | ja | 7 |
| PA7 | ja | 8 |
# Cluster: pro Faktor nur Items mit Ladung > .30
efa7_clusters <- factorClusters(efa7, Codebooka, cut = 0.30)
printFactorClusters(efa7_clusters, solution_label = "EFA (7 Faktoren, promax)", cut = 0.30)
| Item | Itemtext | Erwartete Schwierigkeit | Überlappendes Konstrukt | Kernkonstrukt | Ladung |
|---|---|---|---|---|---|
| agent_4D_2 | Ich verdiene eine bevorzugte Behandlung. | schwer | Antagonismus | nein | 0.939 |
| agent_4C_2 | Ich sollte stets im Mittelpunkt stehen. | schwer | Extraversion | nein | 0.931 |
| agent_4C_1 | Meine Bedürfnisse sind wichtiger als die der anderen. | mittel | Verträglichkeit | nein | 0.835 |
| agent_3C_3 | Ich bin wertvoller als alle anderen. | schwer | ja | 0.833 | |
| agent_2B_5 | Ich sollte bevorzugt behandelt werden. | schwer | eventuell antagonistsich ? | ja | 0.813 |
| agent_5A_5 | Ich habe eine besondere Behandlung verdient. | schwer | Anspruchsdenken (Selbstwert) | nein | 0.787 |
| agent_4D_1 | Ich sollte im Mittelpunkt der Aufmerksamkeit stehen. | mittel | Extraversion | nein | 0.776 |
| agent_2C_2 | Ich verdiene mehr Anerkennung als die Menschen in meinem Umfeld. | mittel | ja | 0.732 | |
| agent_5D_1 | Ich bin der Meinung, dass ich besser bin als alle anderen Menschen. | schwer | Selbstwert | ja | 0.727 |
| agent_5E_3 | Ich sollte im Mittelpunkt stehen, weil ich ein besonderer Mensch bin. | leicht | ja | 0.718 | |
| agent_4B_5 | Ich erwarte, dass meine Beiträge in Gruppen mehr Beachtung finden als die der anderen. | schwer | Antagonistischer Narzissmus | nein | 0.630 |
| agent_3E_2 | In einer Gruppe bin ich immer der interessanteste Mensch. | schwer | Sebstwertgefühl | ja | 0.617 |
| agent_5D_5 | Ich bin davon überzeugt, dass ich nur erfolgreich bin, wenn ich die Führung im Team habe. | schwer | Dominanzstreben | ja | 0.583 |
| agent_4A_2 | Ich möchte Menschen kontrollieren. | schwer | ja | 0.576 | |
| agent_3B_3 | Ich sehe mich selbst als allwissend an. | schwer | Selbstüberschätzung | nein | 0.568 |
| agent_5E_4 | Aufgrund meiner herausragenden Fähigkeiten verdiene ich es, mich in Gruppen durchzusetzen. | mittel | ja | 0.530 | |
| agent_4B_3 | Ich habe das Gefühl, dass mir außergewöhnliche Chancen und Positionen eher zustehen als anderen. | schwer | ja | 0.495 | |
| agent_1E_3 | Ich tue alles dafür, um im Mittelpunkt zu stehen. | schwer | Extraversion | nein | 0.478 |
| agent_1E_5 | Ich bin die einzige Person die eine Gruppenarbeit zum Erfolg führen kann. | schwer | ja | 0.437 | |
| agent_2E_3 | Ich habe das Gefühl, mir steht mehr zu, als ich bekomme. | mittel | ja | 0.432 | |
| agent_2A_4 | Ich habe das Gefühle, mir steht mehr zu als ich bekomme. | mittel | ja | 0.413 | |
| agent_1B_2 | In Gesprächen verdiene ich es immer die Aufmerksamkeit und Anerkennung aller Anwesenden zu bekommen. | mittel | ja | 0.404 | |
| agent_1E_4 | Wenn andere mich kritisieren, liegt es daran, dass sie mein Niveau nicht verstehen. | schwer | ja | 0.395 | |
| agent_1E_1 | Ich verdiene es bewundert zu werden. | schwer | ja | 0.391 | |
| agent_5A_3 | Ich tendiere zu Lügen, wenn sich daraus ein Vorteil für mich ergibt. | mittel | Verträglichkeit | nein | 0.372 |
| agent_2C_5 | Ich finde, zu viel Rücksicht auf andere hält einen davon ab, wirklich erfolgreich zu sein. | schwer | Psychopathie | nein | 0.368 |
| agent_5C_5 | Aufgrund meiner überragender Talente und Fähigkeiten, verdiene ich es, Entscheidungen in Gruppen zu treffen. | mittel | Dominanzstreben | ja | 0.361 |
| agent_2D_5 | Selbstzweifel sind mir fremd. | schwer | ja | 0.355 | |
| agent_5E_5 | In Gruppensituationen bin ich die Person, die mit der meisten Komptetenz ausgestattet ist. | mittel | Self esteem | nein | 0.350 |
| agent_1E_2 | Es steht mir zu, dass meine Leistungen anerkannt werden. | schwer | ja | 0.344 | |
| agent_2B_4 | Ich begebe mich gezielt in soziale Situationen, um mich positiv hervorzutun. | mittel | ja | 0.341 | |
| agent_2C_4 | Ich lege Wert darauf, dass andere meine überlegene Kompetenz erkennen. | schwer | ja | 0.333 | |
| agent_4D_3 | Ich bin talentierter als meine Mitmenschen. | schwer | Selbstüberschätzuzng | ja | 0.331 |
| agent_5D_4 | Ich halte mich für deutlich intelligenter als die Menschen in meinem Umwelt. | schwer | ja | 0.327 | |
| agent_3A_2 | Ich bin intelligenter als alle anderen. | mittel | ja | 0.310 | |
| agent_1B_3 | Ich strebe aktiv danach, in jeder Situation stark bewundert zu werden. | schwer | Soziale Annerkennung | nein | 0.302 |
| Item | Itemtext | Erwartete Schwierigkeit | Überlappendes Konstrukt | Kernkonstrukt | Ladung |
|---|---|---|---|---|---|
| agent_2D_3 | Ich bin die Person, die bei Teamarbeiten die Führung übernehmen sollte. | schwer | ja | 0.843 | |
| agent_3D_1 | In Gruppen übernehme ich selbstverständlich die Führung, weil ich am besten dafür geeignet bin. | mittel | Extraversion | nein | 0.800 |
| agent_2B_1 | Ich sollte in Gruppen die Leitung übernehmen, weil ich am geeignetsten bin. | schwer | ja | 0.785 | |
| agent_1C_4 | Es ist am besten für alle, wenn ich die Führung bei schwierigen Entscheidungen übernehme. | Mittel | Ja | 0.778 | |
| agent_3B_4 | In Gruppen muss ich die Führung übernehmen, weil ich dafür am besten geeignet bin. | mittel | Dominanz, Extraversion | nein | 0.778 |
| agent_1B_5 | In Gruppenkontexten werden die besten Ergebnisse erzielt, wenn ich die führende Rolle übernehme. | mittel | Soziale Dominanz | nein | 0.772 |
| agent_3C_1 | Ich halte es für selbstverständlich, dass ich in Gruppen die Führung übernehme. | mittel | soziale Dominanz | nein | 0.760 |
| agent_2A_3 | Ich übernehme oft die Führung, weil ich befürchte, dass wir ohne mich scheitern. | mittel | ja | 0.736 | |
| agent_3E_3 | In Gruppen übernehme ich immer die Führung, weil es sonst nicht funktioniert. | mittel | Soziale Dominanz | ja | 0.717 |
| agent_4B_1 | Ich sehe mich als geborene Führungspersönlichkeit. | leicht | Extraversion | nein | 0.701 |
| agent_2C_3 | Bei Teamarbeit sollte ich die Führung übernehmen, um erfolgreiche Ergebnisse zu erzielen. | mittel | Selbstbewusstsein | nein | 0.691 |
| agent_1D_1 | Ich bin überzeugt, dass ich als Führungskraft anderen überlegen bin. | mittel | ja | 0.627 | |
| agent_5A_4 | Aufgrund meiner besonderen Fähigkeiten sollte ich Führungspositionen übernehmen. | mittel | Anspruchsdenken (Selbstwert) | nein | 0.557 |
| agent_5B_3 | Ich übernehme gerne die Kontrolle und treffe Entscheidungen eigenständig. | mittel | ja | 0.513 | |
| agent_5C_5 | Aufgrund meiner überragender Talente und Fähigkeiten, verdiene ich es, Entscheidungen in Gruppen zu treffen. | mittel | Dominanzstreben | ja | 0.423 |
| agent_5E_5 | In Gruppensituationen bin ich die Person, die mit der meisten Komptetenz ausgestattet ist. | mittel | Self esteem | nein | 0.421 |
| agent_3C_2 | Ich habe keine Schwierigkeit, meinen Willen durchzusetzen. | mittel | Durchsetzungsfähigkeit | nein | 0.409 |
| agent_2E_2 | Ich genieße es, mich anderen gegenüber durchzusetzen. | mittel | ja | 0.397 | |
| agent_1E_5 | Ich bin die einzige Person die eine Gruppenarbeit zum Erfolg führen kann. | schwer | ja | 0.393 | |
| agent_5D_5 | Ich bin davon überzeugt, dass ich nur erfolgreich bin, wenn ich die Führung im Team habe. | schwer | Dominanzstreben | ja | 0.377 |
| agent_1C_2 | Eine harmonische Atmosphäre ist mir wichtiger als recht zu behalten. | Schwierig | Verträglichkeit | Nein | 0.336 |
| agent_5D_2 | In meinem Arbeitsumfeld gehöre ich zu den kompetentesten Arbeiterinnen. | mittel | ja | 0.311 |
| Item | Itemtext | Erwartete Schwierigkeit | Überlappendes Konstrukt | Kernkonstrukt | Ladung |
|---|---|---|---|---|---|
| agent_5A_2 | Ich bin weitaus kompetenter als der Durchschnittsmensch. | mittel | antagonistischer Narzissmus | nein | 0.744 |
| agent_1C_3 | Ich bin überzeugt, dass ich kompetenter als die meisten anderen Menschen bin. | Schwierig | Ja | 0.737 | |
| agent_4D_3 | Ich bin talentierter als meine Mitmenschen. | schwer | Selbstüberschätzuzng | ja | 0.702 |
| agent_5C_1 | Manchmal denke ich, ich sei anderen überlegen (z.B. intelligenter, fähiger). | schwer | Selbstbild | ja | 0.695 |
| agent_5D_4 | Ich halte mich für deutlich intelligenter als die Menschen in meinem Umwelt. | schwer | ja | 0.663 | |
| agent_2A_1 | Ich bin deutlich fähiger als andere. | schwer | ja | 0.614 | |
| agent_3A_2 | Ich bin intelligenter als alle anderen. | mittel | ja | 0.611 | |
| agent_5C_3 | Durch meine Kompetenz steche ich aus der Masse hervor. | schwer | Einzigartigkeitsglaube | ja | 0.506 |
| agent_2E_4 | Ich bin anderen überlegen. | schwer | ja | 0.491 | |
| agent_3A_3 | Meine Meinung ist meistens besser als die Meinung anderer. | mittel | ja | 0.470 | |
| agent_4C_3 | Ich bin besser geeignet als andere, um schwierige Aufgaben zu lösen. | leicht | ja | 0.467 | |
| agent_5B_2 | Ich halte mich für intelligent. | mittel | Selbstwert | nein | 0.459 |
| agent_5E_2 | Ich halte mich für außergewöhnlich klug und möchte dass andere dies erkennen. | mittel | ja | 0.441 | |
| agent_2D_1 | Ich bin außergewöhnlich. | leicht | Selbstwertgefühl | ja | 0.438 |
| agent_2B_2 | Andere sind mir unterlegen. | schwer | ja | 0.423 | |
| agent_2E_5 | Ich erwarte, dass Andere schlechter abschneiden, als ich. | schwer | ja | 0.354 | |
| agent_5D_1 | Ich bin der Meinung, dass ich besser bin als alle anderen Menschen. | schwer | Selbstwert | ja | 0.351 |
| agent_2C_1 | Ich bin außergewöhnlich. | mittel | ja | 0.350 | |
| agent_3B_1 | Ich will immer besser als andere sein. | mittel | Leistungsmotivation | ja | 0.341 |
| agent_5D_2 | In meinem Arbeitsumfeld gehöre ich zu den kompetentesten Arbeiterinnen. | mittel | ja | 0.301 |
| Item | Itemtext | Erwartete Schwierigkeit | Überlappendes Konstrukt | Kernkonstrukt | Ladung |
|---|---|---|---|---|---|
| agent_5C_2 | Ich genieße die Anerkennung und Bewunderung anderer. | mittel | Extraversion | ja | 0.726 |
| agent_5C_4 | Ich erzähle anderen gern von meinen Erfolgen. | leicht | Selbstwert | ja | 0.715 |
| agent_3D_3 | Ich berichte anderen Menschen gerne von meinen Erfolgen. | leicht | ja | 0.643 | |
| agent_2A_5 | Ich genieße es, wenn andere mich für meine Leistungen bewundern. | leicht | ja | 0.628 | |
| agent_5A_1 | Ich genieße es, mit meiner Leistung Aufmerksamkeit auf mich zu ziehen. | leicht | ja | 0.604 | |
| agent_5D_3 | Es ist mir wichtig, bewusst einen Eindruck zu hinterlassen, der meinen Erfolg und meine Kompetenz zeigt. | leicht | Soziale Erwünschtheit | ja | 0.600 |
| agent_5E_1 | Ich möchte, dass andere meine besonderen Fähigkeiten anerkennen und bewundern. | mittel | ja | 0.502 | |
| agent_4B_4 | Mir ist es wichtig, dass meine besonderen Fähigkeiten gesehen und angemessen anerkannt werden. | mittel | ja | 0.496 | |
| agent_4B_2 | Ich habe das Bedürfnis, meine besonderen Stärken sichtbar zu machen, damit andere erkennen, dass ich herausrage. | mittel | Selbstwert | nein | 0.460 |
| agent_2A_2 | Es stört mich extrem, wenn meine Leistungen von anderen nicht bemerkt und anerkannt werden. | schwer | ja | 0.450 | |
| agent_2E_1 | Ich strebe danach, dass Andere mich positiv bewerten. | leicht | ja | 0.434 | |
| agent_5B_1 | Ich genieße es, im Mittelpunkt zu stehen und meine Erfolge zu präsentieren. | leicht | Extraversion | nein | 0.424 |
| agent_5B_5 | Ich reagiere empfindlich, wenn jemand meine Leistung in Frage stellt. | mittel | ja | 0.423 | |
| agent_2B_3 | Es ist mir wichtig, dass andere Menschen mich bewundern. | mittel | ja | 0.385 | |
| agent_1C_1 | Mir ist es wichtig, von anderen bewundert zu werden. | Mittel | Ja | 0.319 |
| Item | Itemtext | Erwartete Schwierigkeit | Überlappendes Konstrukt | Kernkonstrukt | Ladung |
|---|---|---|---|---|---|
| agent_4A_3 | Ich werde von meinen Mitmenschen bewundert. | mittel | ja | 0.725 | |
| agent_3D_2 | Menschen sind von mir beeindruckt. | schwer | nein | 0.710 | |
| agent_4A_1 | Ich bin großartig, so wie ich bin. | schwer | ja | 0.638 | |
| agent_3E_1 | Ich bin bewundernswert. | mittel | Sebstwertgefühl | ja | 0.615 |
| agent_2C_1 | Ich bin außergewöhnlich. | mittel | ja | 0.541 | |
| agent_1D_4 | Andere Menschen wollen so sein wie ich. | schwer | ja | 0.501 | |
| agent_2D_1 | Ich bin außergewöhnlich. | leicht | Selbstwertgefühl | ja | 0.495 |
| agent_1B_4 | Ich schaffe es sehr leicht andere für mich zu gewinnen. | mittel | ja | 0.479 | |
| agent_2D_2 | Zu wissen, dass ich etwas Besonderes bin, gibt mir viel Kraft. | mittel | Selbstwertgefühl | ja | 0.471 |
| agent_3B_2 | Ich hinterlasse durch meine herausragenden Leistungen Eindruck bei anderen. | schwer | Selbstbewertung | ja | 0.424 |
| agent_3A_1 | Mir steht eine grandiose Zukunft zu. | schwer | Anspruchsdenken | nein | 0.380 |
| agent_5C_3 | Durch meine Kompetenz steche ich aus der Masse hervor. | schwer | Einzigartigkeitsglaube | ja | 0.354 |
| agent_2D_5 | Selbstzweifel sind mir fremd. | schwer | ja | 0.333 |
| Item | Itemtext | Erwartete Schwierigkeit | Überlappendes Konstrukt | Kernkonstrukt | Ladung |
|---|---|---|---|---|---|
| agent_2A_4 | Ich habe das Gefühle, mir steht mehr zu als ich bekomme. | mittel | ja | 0.551 | |
| agent_2E_3 | Ich habe das Gefühl, mir steht mehr zu, als ich bekomme. | mittel | ja | 0.522 | |
| agent_1A_5 | Ich empfinde es als unfair wenn Leute meine Leistungen nicht anerkennen oder mit Neid oder Distanz reagieren, da mir die Bewunderung und Anerkennung zusteht. | mittelschwer | ja | 0.515 | |
| agent_2D_4 | Meine Leistungen rechtfertigen Respekt und Anerkennung von anderen Personen. | mittel | ja | 0.477 | |
| agent_2A_2 | Es stört mich extrem, wenn meine Leistungen von anderen nicht bemerkt und anerkannt werden. | schwer | ja | 0.455 | |
| agent_1A_4 | Ich erwarte für meine außergewöhnlichen Fähigkeiten und Leistungen Anerkennung anderer zu bekommen, da ich diese für mein Tun auch verdiene. | mittelschwer | ja | 0.415 | |
| agent_5A_5 | Ich habe eine besondere Behandlung verdient. | schwer | Anspruchsdenken (Selbstwert) | nein | 0.330 |
| agent_4B_4 | Mir ist es wichtig, dass meine besonderen Fähigkeiten gesehen und angemessen anerkannt werden. | mittel | ja | 0.313 | |
| agent_1D_2 | Wenn ich die Führung übernehme, erwarte ich Gehorsam und Respekt. | schwer | ja | 0.302 | |
| agent_3A_1 | Mir steht eine grandiose Zukunft zu. | schwer | Anspruchsdenken | nein | 0.300 |
| Item | Itemtext | Erwartete Schwierigkeit | Überlappendes Konstrukt | Kernkonstrukt | Ladung |
|---|---|---|---|---|---|
| agent_1C_1 | Mir ist es wichtig, von anderen bewundert zu werden. | Mittel | Ja | 0.631 | |
| agent_2B_3 | Es ist mir wichtig, dass andere Menschen mich bewundern. | mittel | ja | 0.622 | |
| agent_1D_5 | Ich strebe aktiv nach Bewunderung. | schwer | ja | 0.608 | |
| agent_2E_1 | Ich strebe danach, dass Andere mich positiv bewerten. | leicht | ja | 0.522 | |
| agent_1B_3 | Ich strebe aktiv danach, in jeder Situation stark bewundert zu werden. | schwer | Soziale Annerkennung | nein | 0.497 |
| agent_2B_4 | Ich begebe mich gezielt in soziale Situationen, um mich positiv hervorzutun. | mittel | ja | 0.364 | |
| agent_3B_1 | Ich will immer besser als andere sein. | mittel | Leistungsmotivation | ja | 0.313 |
| agent_1E_3 | Ich tue alles dafür, um im Mittelpunkt zu stehen. | schwer | Extraversion | nein | 0.302 |
PA1 = (allgemeiner / agentischer) Narzismus PA2 = Anerkennungsbestreben PA3 = Selbstwertgefühl PA4 = Soziale Dominanz PA5 = Kompetenzerleben PA6 = Bewunderungsbestreben PA7 = Anspruchsdenken
agent3b_items <- c("agent_3B_1","agent_3B_2","agent_3B_3","agent_3B_4","agent_3B_5")
# Ladungen aus der 7-Faktor-Promax-Lösung
L <- as.data.frame(unclass(efa7$loadings))
L$Itemname <- rownames(L)
L_sub <- L[L$Itemname %in% agent3b_items, , drop = FALSE]
L_sub <- L_sub[match(agent3b_items, L_sub$Itemname), , drop = FALSE] # in fester Reihenfolge
# Codebook-Metadaten dazu (Itemtext, Kern-/Overlap-Konstrukt, erwartete Schwierigkeit)
short_vec <- trimws(as.character(Codebooka$Itemname))
matches <- match(L_sub$Itemname, short_vec)
diff_col <- resolve_codebook_col(Codebooka, "erwartete Schwierigkeit (psychologisch)",
patterns = c("erwartete", "Schwierigkeit"))
over_col <- resolve_codebook_col(Codebooka, "Überlappendes Konstrukt",
patterns = c("Überlappendes", "Ueberlappendes", "Uberlappendes", "Overlapp"))
meta <- data.frame(
Itemname = L_sub$Itemname,
Itemtext = as.character(Codebooka$Itemtext[matches]),
Kernkonstrukt = as.character(Codebooka$Kernkonstrukt[matches]),
`Überlappendes Konstrukt` = if (!is.na(over_col)) as.character(Codebooka[[over_col]][matches]) else NA_character_,
`Erwartete Schwierigkeit` = if (!is.na(diff_col)) as.character(Codebooka[[diff_col]][matches]) else NA_character_,
stringsAsFactors = FALSE
)
load_mat <- L_sub[, setdiff(names(L_sub), "Itemname"), drop = FALSE]
load_mat <- as.data.frame(lapply(load_mat, function(x) round(as.numeric(x), 3)))
out_agent3b <- cbind(meta, load_mat)
knitr::kable(out_agent3b,
caption = "Faktorladungen der fünf agent_3B Items (EFA: 7 Faktoren, Promax)")
| Itemname | Itemtext | Kernkonstrukt | Überlappendes.Konstrukt | Erwartete.Schwierigkeit | PA1 | PA4 | PA5 | PA2 | PA3 | PA7 | PA6 |
|---|---|---|---|---|---|---|---|---|---|---|---|
| agent_3B_1 | Ich will immer besser als andere sein. | ja | Leistungsmotivation | mittel | -0.070 | 0.153 | 0.341 | 0.241 | -0.036 | -0.078 | 0.313 |
| agent_3B_2 | Ich hinterlasse durch meine herausragenden Leistungen Eindruck bei anderen. | ja | Selbstbewertung | schwer | -0.031 | 0.146 | 0.267 | 0.122 | 0.424 | -0.045 | -0.016 |
| agent_3B_3 | Ich sehe mich selbst als allwissend an. | nein | Selbstüberschätzung | schwer | 0.568 | 0.011 | 0.210 | -0.100 | -0.029 | -0.101 | 0.043 |
| agent_3B_4 | In Gruppen muss ich die Führung übernehmen, weil ich dafür am besten geeignet bin. | nein | Dominanz, Extraversion | mittel | 0.124 | 0.778 | 0.041 | -0.029 | -0.083 | -0.047 | 0.137 |
| agent_3B_5 | Mir ist Anerkennung egal, solange das Team Erfolg hat. | nein | NA | mittel | 0.208 | -0.066 | 0.068 | 0.183 | -0.167 | 0.017 | 0.097 |
# EFA mit 7 Faktoren und Promax-Rotation (oblique)
efa1.2 <- fa(dat, nfactors = 7, fm="pa", rotate="geominQ") # geominQ oblique
print(efa7, sort = TRUE, cut = .30)
## Factor Analysis using method = pa
## Call: fa(r = dat, nfactors = 7, rotate = "promax", fm = "pa")
## Standardized loadings (pattern matrix) based upon correlation matrix
## item PA1 PA4 PA5 PA2 PA3 PA7 PA6 h2 u2 com
## agent_4D_2 71 0.94 0.72 0.28 1.1
## agent_4C_2 68 0.93 0.71 0.29 1.1
## agent_4C_1 67 0.83 0.57 0.43 1.1
## agent_3C_3 52 0.83 0.68 0.32 1.2
## agent_2B_5 31 0.81 0.64 0.36 1.1
## agent_5A_5 92 0.79 0.33 0.67 0.33 1.5
## agent_4D_1 70 0.78 0.70 0.30 1.1
## agent_2C_2 84 0.73 0.69 0.31 1.2
## agent_5D_1 78 0.73 0.35 0.69 0.31 1.6
## agent_5E_3 100 0.72 0.73 0.27 1.3
## agent_4B_5 66 0.63 0.69 0.31 1.3
## agent_3E_2 57 0.62 0.58 0.42 1.6
## agent_5D_5 82 0.58 0.38 0.67 0.33 2.1
## agent_4A_2 60 0.58 0.42 0.58 1.2
## agent_3B_3 47 0.57 0.40 0.60 1.4
## agent_5E_4 101 0.53 0.72 0.28 1.5
## agent_4B_3 64 0.49 0.63 0.37 1.5
## agent_1E_3 18 0.48 0.30 0.53 0.47 2.7
## agent_1E_5 20 0.44 0.39 0.47 0.53 2.7
## agent_1B_2 22 0.40 0.51 0.49 2.3
## agent_1E_4 19 0.40 0.45 0.55 3.8
## agent_1E_1 16 0.39 0.68 0.32 3.6
## agent_5A_3 90 0.37 0.31 0.69 2.1
## agent_2C_5 87 0.37 0.34 0.66 2.5
## agent_2C_4 86 0.33 0.67 0.33 3.1
## agent_1C_5 5 0.60 0.40 4.7
## agent_3B_5 49 0.13 0.87 3.9
## agent_5B_4 96 0.22 0.78 3.7
## agent_2D_3 39 0.84 0.74 0.26 1.1
## agent_3D_1 53 0.80 0.78 0.22 1.1
## agent_2B_1 27 0.79 0.73 0.27 1.1
## agent_1C_4 4 0.78 0.67 0.33 1.4
## agent_3B_4 48 0.78 0.75 0.25 1.2
## agent_1B_5 25 0.77 0.61 0.39 1.1
## agent_3C_1 50 0.76 0.67 0.33 1.2
## agent_1A_2 12 -0.75 0.56 0.44 1.5
## agent_2A_3 34 0.74 0.60 0.40 1.3
## agent_3E_3 58 0.72 0.69 0.31 1.2
## agent_4B_1 62 0.70 0.56 0.44 1.1
## agent_2C_3 85 0.69 0.73 0.27 1.2
## agent_1D_1 6 0.63 0.59 0.41 1.5
## agent_5A_4 91 0.56 0.65 0.35 1.7
## agent_5B_3 95 0.51 0.32 0.68 2.0
## agent_5C_5 77 0.36 0.42 0.71 0.29 2.3
## agent_5E_5 102 0.35 0.42 0.70 0.30 2.9
## agent_3C_2 51 0.41 0.29 0.71 2.0
## agent_2E_2 104 0.40 0.38 0.62 1.9
## agent_1C_2 2 0.34 0.13 0.87 2.1
## agent_5D_2 79 0.31 0.30 0.38 0.62 3.0
## agent_5A_2 89 0.74 0.71 0.29 1.1
## agent_1C_3 3 0.74 0.62 0.38 1.2
## agent_4D_3 72 0.33 0.70 0.74 0.26 1.7
## agent_5C_1 73 0.69 0.60 0.40 1.3
## agent_5D_4 81 0.33 0.66 0.73 0.27 1.6
## agent_1A_3 13 -0.63 0.57 0.43 1.3
## agent_2A_1 32 0.61 0.67 0.33 1.4
## agent_3A_2 43 0.31 0.61 0.59 0.41 1.5
## agent_1A_1 11 -0.55 -0.39 0.63 0.37 2.1
## agent_5C_3 75 0.51 0.35 0.65 0.35 2.2
## agent_2E_4 106 0.49 0.63 0.37 1.6
## agent_3A_3 44 0.47 0.58 0.42 2.0
## agent_4C_3 69 0.47 0.62 0.38 2.0
## agent_5B_2 94 0.46 0.28 0.72 2.0
## agent_5E_2 99 0.44 0.64 0.36 2.2
## agent_2B_2 28 0.42 0.58 0.42 2.6
## agent_2E_5 26 0.35 0.47 0.53 3.7
## agent_3B_1 45 0.34 0.31 0.43 0.57 3.5
## agent_5C_2 74 0.73 0.63 0.37 1.6
## agent_5C_4 76 0.72 0.56 0.44 1.3
## agent_3D_3 55 0.64 0.50 0.50 1.8
## agent_2A_5 36 0.63 0.55 0.45 1.6
## agent_5A_1 88 0.60 0.57 0.43 1.3
## agent_5D_3 80 0.60 0.55 0.45 1.1
## agent_5E_1 98 0.50 0.62 0.38 2.3
## agent_4B_4 65 0.50 0.31 0.55 0.45 2.0
## agent_4B_2 63 0.46 0.64 0.36 2.0
## agent_5B_1 93 0.42 0.58 0.42 3.7
## agent_5B_5 97 0.42 0.32 0.68 3.2
## agent_4A_3 61 0.73 0.62 0.38 1.4
## agent_3D_2 54 0.71 0.62 0.38 1.3
## agent_4A_1 59 0.64 0.34 0.66 1.4
## agent_3E_1 56 0.62 0.66 0.34 1.8
## agent_2C_1 83 0.35 0.54 0.57 0.43 2.7
## agent_1D_4 9 0.50 0.48 0.52 1.6
## agent_2D_1 37 0.44 0.50 0.64 0.36 2.9
## agent_1B_4 24 0.48 0.35 0.65 2.5
## agent_2D_2 38 0.47 0.55 0.45 2.2
## agent_3B_2 46 0.42 0.49 0.51 2.2
## agent_3A_1 42 0.38 0.30 0.40 0.60 3.1
## agent_1B_1 21 0.51 0.49 5.0
## agent_1E_2 17 0.34 -0.56 0.40 0.60 2.4
## agent_2A_4 35 0.41 0.55 0.63 0.37 2.3
## agent_2E_3 105 0.43 0.52 0.60 0.40 2.2
## agent_1A_5 15 0.51 0.55 0.45 1.6
## agent_2D_4 40 0.48 0.58 0.42 2.9
## agent_2A_2 33 0.45 0.45 0.50 0.50 2.5
## agent_1A_4 14 0.41 0.60 0.40 2.0
## agent_1D_2 7 0.30 0.32 0.68 3.3
## agent_1D_3 8 0.63 0.37 5.3
## agent_1C_1 1 0.32 0.63 0.62 0.38 1.5
## agent_2B_3 29 0.39 0.62 0.68 0.32 1.7
## agent_1D_5 10 0.61 0.64 0.36 1.5
## agent_2E_1 103 0.43 0.52 0.47 0.53 2.6
## agent_1B_3 23 0.30 0.50 0.61 0.39 2.4
## agent_2D_5 41 0.36 0.33 -0.42 0.35 0.65 4.0
## agent_2B_4 30 0.34 0.36 0.45 0.55 2.6
##
## PA1 PA4 PA5 PA2 PA3 PA7 PA6
## SS loadings 15.96 12.33 9.65 6.91 6.07 4.48 4.34
## Proportion Var 0.15 0.12 0.09 0.07 0.06 0.04 0.04
## Cumulative Var 0.15 0.27 0.36 0.42 0.48 0.52 0.56
## Proportion Explained 0.27 0.21 0.16 0.12 0.10 0.08 0.07
## Cumulative Proportion 0.27 0.47 0.64 0.75 0.85 0.93 1.00
##
## With factor correlations of
## PA1 PA4 PA5 PA2 PA3 PA7 PA6
## PA1 1.00 0.69 0.61 0.51 0.43 0.49 0.41
## PA4 0.69 1.00 0.57 0.46 0.47 0.40 0.23
## PA5 0.61 0.57 1.00 0.45 0.39 0.45 0.27
## PA2 0.51 0.46 0.45 1.00 0.34 0.32 0.27
## PA3 0.43 0.47 0.39 0.34 1.00 0.43 0.30
## PA7 0.49 0.40 0.45 0.32 0.43 1.00 0.47
## PA6 0.41 0.23 0.27 0.27 0.30 0.47 1.00
##
## Mean item complexity = 2.1
## Test of the hypothesis that 7 factors are sufficient.
##
## df null model = 5565 with the objective function = 102.89 with Chi Square = 34759.12
## df of the model are 4844 and the objective function was 27.19
##
## The root mean square of the residuals (RMSR) is 0.03
## The df corrected root mean square of the residuals is 0.03
##
## The harmonic n.obs is 370 with the empirical chi square 3855.81 with prob < 1
## The total n.obs was 375 with Likelihood Chi Square = 9059.31 with prob < 0.00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000014
##
## Tucker Lewis Index of factoring reliability = 0.831
## RMSEA index = 0.048 and the 90 % confidence intervals are 0.047 0.05
## BIC = -19650.72
## Fit based upon off diagonal values = 0.99
## Measures of factor score adequacy
## PA1 PA4 PA5 PA2 PA3 PA7
## Correlation of (regression) scores with factors 0.99 0.98 0.98 0.96 0.96 0.94
## Multiple R square of scores with factors 0.97 0.97 0.95 0.92 0.92 0.89
## Minimum correlation of possible factor scores 0.95 0.94 0.91 0.84 0.84 0.78
## PA6
## Correlation of (regression) scores with factors 0.94
## Multiple R square of scores with factors 0.88
## Minimum correlation of possible factor scores 0.76
fa2<- efa1.2$Phi
round(fa2,2)
## PA4 PA5 PA1 PA2 PA6 PA3 PA7
## PA4 1.00 0.56 0.48 0.40 0.42 0.26 0.14
## PA5 0.56 1.00 0.43 0.40 0.43 0.21 0.24
## PA1 0.48 0.43 1.00 0.40 0.37 0.19 0.07
## PA2 0.40 0.40 0.40 1.00 0.38 0.31 0.21
## PA6 0.42 0.43 0.37 0.38 1.00 0.32 0.27
## PA3 0.26 0.21 0.19 0.31 0.32 1.00 0.13
## PA7 0.14 0.24 0.07 0.21 0.27 0.13 1.00
cor.plot(fa2)
fa_parallel.ho <- fa.parallel(fa2,fm="ml", fa="pc", n.iter=2000, SMC=FALSE, sim=TRUE, quant=0.95, plot=TRUE, n.obs = nrow(dat)) # da wir an dieser Stelle eine Korrelationsmatrix als Datengrundlage verwenden, müssen wir hier angeben, wie viele Leute in dem Ursprungsdatensatz vorhanden waren (= n.obs)
## Parallel analysis suggests that the number of factors = NA and the number of components = 1
print(fa_parallel.ho)
## Call: fa.parallel(x = fa2, n.obs = nrow(dat), fm = "ml", fa = "pc",
## n.iter = 2000, SMC = FALSE, sim = TRUE, quant = 0.95, plot = TRUE)
## Parallel analysis suggests that the number of factors = NA and the number of components = 1
##
## Eigen Values of
##
## eigen values of factors
## [1] 2.39 0.23 0.10 -0.01 -0.04 -0.11 -0.17
##
## eigen values of simulated factors
## [1] NA
##
## eigen values of components
## [1] 2.98 0.99 0.86 0.63 0.58 0.53 0.42
##
## eigen values of simulated components
## [1] 1.20 1.11 1.05 1.00 0.94 0.88 0.81
which(fa_parallel.ho$pc.values>1)
## [1] 1
efa2.ord <- fa(fa2, nfactors = 2, fm="pa", rotate="geominQ")
efa2.ord.2 <- fa(fa2, nfactors = 2, fm="pa", rotate="varimax")
print(efa2.ord, digits=2, sort=TRUE, cut=.3)
## Factor Analysis using method = pa
## Call: fa(r = fa2, nfactors = 2, rotate = "geominQ", fm = "pa")
## Standardized loadings (pattern matrix) based upon correlation matrix
## item PA1 PA2 h2 u2 com
## PA4 1 0.76 0.58 0.42 1.0
## PA1 3 0.68 0.43 0.57 1.0
## PA5 2 0.57 0.49 0.51 1.2
## PA2 4 0.35 0.33 0.38 0.62 2.0
## PA6 5 0.52 0.47 0.53 1.4
## PA7 7 0.50 0.19 0.81 1.1
## PA3 6 0.36 0.19 0.81 1.2
##
## PA1 PA2
## SS loadings 1.73 0.99
## Proportion Var 0.25 0.14
## Cumulative Var 0.25 0.39
## Proportion Explained 0.64 0.36
## Cumulative Proportion 0.64 1.00
##
## With factor correlations of
## PA1 PA2
## PA1 1.00 0.62
## PA2 0.62 1.00
##
## Mean item complexity = 1.3
## Test of the hypothesis that 2 factors are sufficient.
##
## df null model = 21 with the objective function = 1.57
## df of the model are 8 and the objective function was 0.05
##
## The root mean square of the residuals (RMSR) is 0.03
## The df corrected root mean square of the residuals is 0.05
##
## Fit based upon off diagonal values = 0.99
## Measures of factor score adequacy
## PA1 PA2
## Correlation of (regression) scores with factors 0.89 0.80
## Multiple R square of scores with factors 0.79 0.64
## Minimum correlation of possible factor scores 0.57 0.29
print(efa2.ord.2, digits=2, sort=TRUE)
## Factor Analysis using method = pa
## Call: fa(r = fa2, nfactors = 2, rotate = "varimax", fm = "pa")
## Standardized loadings (pattern matrix) based upon correlation matrix
## item PA1 PA2 h2 u2 com
## PA4 1 0.73 0.22 0.58 0.42 1.2
## PA1 3 0.64 0.15 0.43 0.57 1.1
## PA5 2 0.61 0.33 0.49 0.51 1.5
## PA2 4 0.46 0.41 0.38 0.62 2.0
## PA6 5 0.41 0.55 0.47 0.53 1.9
## PA7 7 0.06 0.43 0.19 0.81 1.0
## PA3 6 0.23 0.37 0.19 0.81 1.7
##
## PA1 PA2
## SS loadings 1.76 0.97
## Proportion Var 0.25 0.14
## Cumulative Var 0.25 0.39
## Proportion Explained 0.64 0.36
## Cumulative Proportion 0.64 1.00
##
## Mean item complexity = 1.5
## Test of the hypothesis that 2 factors are sufficient.
##
## df null model = 21 with the objective function = 1.57
## df of the model are 8 and the objective function was 0.05
##
## The root mean square of the residuals (RMSR) is 0.03
## The df corrected root mean square of the residuals is 0.05
##
## Fit based upon off diagonal values = 0.99
## Measures of factor score adequacy
## PA1 PA2
## Correlation of (regression) scores with factors 0.83 0.68
## Multiple R square of scores with factors 0.69 0.46
## Minimum correlation of possible factor scores 0.37 -0.08
agent_4D_2 Ich verdiene eine bevorzugte Behandlung. -schwer agent_4C_2 Ich sollte stets im Mittelpunkt stehen.- schwer agent_5A_5 Ich habe eine besondere Behandlung verdient. - schwer agent_5E_3 Ich sollte im Mittelpunkt stehen, weil ich ein besonderer Mensch bin. - leicht agent_3E_2 In einer Gruppe bin ich immer der interessanteste Mensch. - schwer
agent_5C_2 Ich genieße die Anerkennung und Bewunderung anderer. - mittel agent_5C_4 Ich erzähle anderen gern von meinen Erfolgen. - leicht agent_2A_5 Ich genieße es, wenn andere mich für meine Leistungen bewundern. - leicht agent_5D_3 Es ist mir wichtig, bewusst einen Eindruck zu hinterlassen, der meinen Erfolg und meine Kompetenz zeigt.- leicht agent_5E_1 Ich möchte, dass andere meine besonderen Fähigkeiten anerkennen und bewundern. - mittel agent_4B_4 Mir ist es wichtig, dass meine besonderen Fähigkeiten gesehen und angemessen anerkannt werden. mittel agent_2A_2 Es stört mich extrem, wenn meine Leistungen von anderen nicht bemerkt und anerkannt werden. - schwer
agent_4A_1 Ich bin großartig, so wie ich bin. - schwer agent_4A_3 Ich werde von meinen Mitmenschen bewundert. - mittel agent_3D_2 Menschen sind von mir beeindruckt. - schwer agent_3E_1 Ich bin bewundernswert. - mittel agent_1D_4 Andere Menschen wollen so sein wie ich.- schwer agent_2D_1 Ich bin außergewöhnlich. - leicht agent_2D_2 Zu wissen, dass ich etwas Besonderes bin, gibt mir viel Kraft. - mittel agent_3B_2 Ich hinterlasse durch meine herausragenden Leistungen Eindruck bei anderen. - schwer
agent_2D_3 Ich bin die Person, die bei Teamarbeiten die Führung übernehmen sollte. - schwer agent_3D_1 In Gruppen übernehme ich selbstverständlich die Führung, weil ich am besten dafür geeignet bin. - mittel agent_2B_1 Ich sollte in Gruppen die Leitung übernehmen, weil ich am geeignetsten bin. - schwer agent_1C_4 Es ist am besten für alle, wenn ich die Führung bei schwierigen Entscheidungen übernehme. - Mittel agent_1B_5 In Gruppenkontexten werden die besten Ergebnisse erzielt, wenn ich die führende Rolle übernehme. - mittel agent_3C_1 Ich halte es für selbstverständlich, dass ich in Gruppen die Führung übernehme. - mittel agent_2A_3 Ich übernehme oft die Führung, weil ich befürchte, dass wir ohne mich scheitern. - mittel agent_1D_1 Ich bin überzeugt, dass ich als Führungskraft anderen überlegen bin. - mittel agent_5B_3 Ich übernehme gerne die Kontrolle und treffe Entscheidungen eigenständig. - mittel
agent_5A_2 Ich bin weitaus kompetenter als der Durchschnittsmensch. - mittel agent_1C_3 Ich bin überzeugt, dass ich kompetenter als die meisten anderen Menschen bin. - Schwierig agent_4D_3 Ich bin talentierter als meine Mitmenschen. - schwer agent_3A_2 Ich bin intelligenter als alle anderen. - mittel agent_3A_3 Meine Meinung ist meistens besser als die Meinung anderer. - mittel agent_4C_3 Ich bin besser geeignet als andere, um schwierige Aufgaben zu lösen. - leicht agent_5B_2 Ich halte mich für intelligent. - mittel
agent_1C_1 Mir ist es wichtig, von anderen bewundert zu werden. - Mittel agent_2B_3 Es ist mir wichtig, dass andere Menschen mich bewundern. - mittel agent_1D_5 Ich strebe aktiv nach Bewunderung. - schwer agent_2E_1 Ich strebe danach, dass Andere mich positiv bewerten. - leicht agent_1B_3 Ich strebe aktiv danach, in jeder Situation stark bewundert zu werden. - schwer agent_2B_4 Ich begebe mich gezielt in soziale Situationen, um mich positiv hervorzutun. - mittel
agent_2A_4 Ich habe das Gefühle, mir steht mehr zu als ich bekomme. - mittel agent_2E_3 Ich habe das Gefühl, mir steht mehr zu, als ich bekomme. - mittel agent_1A_5 Ich empfinde es als unfair wenn Leute meine Leistungen nicht anerkennen oder mit Neid oder Distanz reagieren, da mir die Bewunderung und Anerkennung zusteht. - mittelschwer agent_2D_4 Meine Leistungen rechtfertigen Respekt und Anerkennung von anderen Personen. - mittel agent_5A_5 Ich habe eine besondere Behandlung verdient. - schwer
Item 1 lädt auf Faktor PA5 mit 0,341 ✔ Inhaltlich klar Kompetenzerleben ⚠ Statistisch nur moderat ➡ akzeptabel, aber kein starkes Ankeritem
Item 2 läd auf Faktor PA3 mit 0,424 ✔ Inhaltlich kompetenzsbezogen + soziale Wirkung ⚠ Ebenfalls grenzwertige Ladung ➡ Trägt zum Faktor bei, aber nicht sehr trennscharf
Item 3 läd auf Faktor PA1 mit 0,568 ✔ Saubere, klare Ladung ✔ Inhaltlich stark selbstüberhöhter Selbstwert ➡ gutes Item, klar faktorzugehörig
Item 4 läd auf Faktor PA2 mit 0,778 ✔ Saubere, klare Ladung ✔ Inhaltlich stark anhand des Anerkennungsbestrebend ➡ gutes Item, klar faktorzugehörig
Item 5 läd auf Faktor PA2 mit 0,183 ❌ Sehr schwach ❌ Inhaltlich eher anti-narzisstisch / kollektivistisch ➡ problematisch, misst vermutlich etwas anderes
Das letzte Item ist eindeutig „nicht so gut“ – sowohl statistisch als auch inhaltlich.