Setup

library(pacman); p_load(lavaan, psych, semPlot, dynamic, DT)

CONGO <- function(F1, F2) {
  PHI = sum(F1*F2) / sqrt(sum(F1^2)*sum(F2^2))
  return(PHI)}

CRITR <- function(n, alpha = .05) {
  df <- n - 2; CRITT <- qt(alpha/2, df, lower.tail = T)
  CRITR <- sqrt((CRITT^2)/((CRITT^2) + df ))
  return(CRITR)}

NP <- function(N, S = 2) {
  NP = 1-pnorm(qnorm(1-(N^(-6/5))/S))
  return(NP)}

FITM <- c("chisq", "df", "nPar", "cfi", "rmsea", "rmsea.ci.lower", "rmsea.ci.upper", "aic", "bic", "srmr")

NP(c(47, 50, 79, 56, 39, 35, 41, 76, 459))
## [1] 0.0049255255 0.0045730505 0.0026412986 0.0039915757 0.0061615855
## [6] 0.0070159801 0.0058026901 0.0027669015 0.0003197384
CRITR(c(47, 50, 79, 56, 39, 35, 41, 76, 459))
## [1] 0.28756298 0.27871059 0.22129818 0.26320921 0.31603193 0.33384462 0.30813060
## [8] 0.22565419 0.09154074
CRITR(c(47, 50, 79, 56, 39, 35, 41, 76, 459), c(0.0049, 0.0046, 0.0026, 0.0040, 0.0062, 0.0070, 0.0058, 0.0028, 0.00032))
## [1] 0.4036447 0.3943510 0.3343366 0.3787136 0.4306888 0.4477047 0.4234795
## [8] 0.3382857 0.1672374
#DeVries & Kohlberg (1977); n = 47

lowerDK77 <- '
1                                                       
0.45    1                                                   
0.1 0.11    1                                               
0.18    0.32    -0.02   1                                           
0.22    0.25    0.11    0.2 1                                       
0.17    0.4 0.22    0.38    0.21    1                                   
0.3 0.35    0.17    0.26    0.15    0.34    1                               
0.19    0.21    0.01    0.45    -0.15   0.23    0   1                           
0.32    0.45    0.14    0.38    0.1 0.51    0.3 0.48    1                       
0.13    0.06    0.05    0.05    0.06    0.32    0.16    0.05    0.13    1                   
0.11    0.35    0.29    0.25    0.2 0.34    0.22    0.13    0.32    0.09    1               
0.1 0.43    0.05    0.25    0.16    0.45    0.4 0.23    0.45    0.05    0.27    1           
0.12    0.12    -0.04   0.11    0.23    0.25    0.21    0.03    0.17    0.26    0.25    0.12    1       
0.2 0.15    0.31    0.42    0.32    0.31    0.23    0.13    0.11    0.17    0.48    0.28    0.31    1   
0.18    0.44    0.08    0.29    0.45    0.4 0.33    0.16    0.33    0.16    0.32    0.37    0.37    0.33    1'

##LO correlations were reflected because it was reverse-scored; the direction was kept consistently positive, however.

lowerDK77Psy <- '
1                               
0.45    1                           
0.1 0.11    1                       
0.18    0.32    -0.02   1                   
0.22    0.25    0.11    0.2 1               
0.17    0.4 0.22    0.38    0.21    1           
0.3 0.35    0.17    0.26    0.15    0.34    1       
0.19    0.21    0.01    0.45    -0.15   0.23    0   1   
0.32    0.45    0.14    0.38    0.1 0.51    0.3 0.48    1'

lowerDK77Pia <- '
1                   
0.09    1               
0.05    0.27    1           
0.26    0.25    0.12    1       
0.17    0.48    0.28    0.31    1   
0.16    0.32    0.37    0.37    0.33    1'

nDK77 <- 47

DK77.cor = getCov(lowerDK77, names = c("QT", "IN", "MC", "DS", "CO", "S1", "MT", "LD", "PS", "LO", "AS", "LC", "NC", "S2", "AI"))
DK77Psy.cor = getCov(lowerDK77Psy, names = c("QT", "IN", "MC", "DS", "CO", "S1", "MT", "LD", "PS"))
DK77Pia.cor = getCov(lowerDK77Pia, names = c("LO", "AS", "LC", "NC", "S2", "AI"))

#DeVries (1974) - 1; n = 50

lowerD74A <- '
1                                                                                   
0.56    1                                                                               
0.62    0.47    1                                                                           
0.54    0.57    0.58    1                                                                       
0.32    0.36    0.38    0.38    1                                                                   
0.42    0.32    0.4 0.32    0.1 1                                                               
0.23    0.27    0.02    0.19    0.15    0.24    1                                                           
0.33    0.35    0.33    0.33    0.14    0.35    0.24    1                                                       
0.36    0.14    0.2 0.26    0.06    0.28    0.07    0.61    1                                                   
0.42    0.31    0.36    0.23    0.24    0.35    0.17    0.68    0.47    1                                               
0.21    0.3 0.25    0.29    0.05    0.21    0.22    0.6 0.57    0.4 1                                           
0.58    0.48    0.4 0.47    0.28    0.39    0.33    0.62    0.44    0.54    0.32    1                                       
0.4 0.31    0.39    0.5 0.33    0.37    0.17    0.53    0.39    0.47    0.38    0.46    1                                   
0.43    0.33    0.36    0.45    0.33    0.14    0.2 0.56    0.37    0.4 0.39    0.5 0.75    1                               
0.31    0.3 0.27    0.23    0.2 0.18    0.2 0.38    0.35    0.33    0.38    0.45    0.26    0.36    1                           
0.45    0.27    0.12    0.02    0.17    0.34    0.18    0.23    -0.01   0.1 0.02    0.33    0.19    0.15    0.22    1                       
0.05    0.13    0.06    0.07    -0.16   0.22    0.12    0.46    0.24    0.09    0.24    0.18    0.19    0.27    0.02    0.2 1                   
-0.12   -0.03   -0.02   0.09    0.07    0.17    -0.08   -0.03   0.23    -0.01   0.01    0   0.17    0.1 0.1 0   0.15    1               
0.55    0.28    0.36    0.26    0.23    0.55    0.2 0.21    0.29    0.39    0.28    0.28    0.37    0.37    0.42    0.34    -0.06   0.22    1           
0.43    0.27    0.33    0.22    0.23    0.57    0.16    0.34    0.32    0.33    0.27    0.31    0.41    0.4 0.32    0.23    0.06    0.1 0.7 1       
0.34    0.39    0.34    0.22    0.29    0.54    0.36    0.2 0.34    0.07    0.21    0.07    0.28    0.17    0.26    0.22    0.1 0.28    0.51    0.51    1   
0.29    0.24    0.3 0.16    0.15    0.25    0.07    0.08    0.07    0.07    0.16    0.1 0.22    0.14    0.05    0.2 0.02    0.14    0.55    0.61    0.4 1'

lowerD74PsyA <- '
1       
0.56    1   
0.62    0.47    1
0.55    0.28    0.36 1
0.43    0.27    0.33 0.7    1
0.34    0.39    0.34 0.51   0.51    1
0.29    0.24    0.3 0.55    0.61    0.4 1'

lowerD74PiaA <- '
1                                                       
0.38    1                                                   
0.32    0.1 1                                               
0.19    0.15    0.24    1                                           
0.33    0.14    0.35    0.24    1                                       
0.26    0.06    0.28    0.07    0.61    1                                   
0.23    0.24    0.35    0.17    0.68    0.47    1                               
0.29    0.05    0.21    0.22    0.6 0.57    0.4 1                           
0.47    0.28    0.39    0.33    0.62    0.44    0.54    0.32    1                       
0.5 0.33    0.37    0.17    0.53    0.39    0.47    0.38    0.46    1                   
0.45    0.33    0.14    0.2 0.56    0.37    0.4 0.39    0.5 0.75    1               
0.23    0.2 0.18    0.2 0.38    0.35    0.33    0.38    0.45    0.26    0.36    1           
0.02    0.17    0.34    0.18    0.23    -0.01   0.1 0.02    0.33    0.19    0.15    0.22    1       
0.07    -0.16   0.22    0.12    0.46    0.24    0.09    0.24    0.18    0.19    0.27    0.02    0.2 1   
0.09    0.07    0.17    -0.08   -0.03   0.23    -0.01   0.01    0   0.17    0.1 0.1 0   0.15    1'

nD74A <- 50

D74A.cor = getCov(lowerD74A, names = c("SB", "CL", "CN", "PM", "PC", "PD", "PL", "PN", "PI", "PE", "PA", "PR", "PG", "PS", "PSI", "PT", "PGG", "POS", "MW", "MK", "MA", "MR"))
D74PsyA.cor = getCov(lowerD74PsyA, names = c("SB", "CL", "CN", "MW", "MK", "MA", "MR"))
D74PiaA.cor = getCov(lowerD74PiaA, names = c("PM", "PC", "PD", "PL", "PN", "PI", "PE", "PA", "PR", "PG", "PS", "PSI", "PT", "PGG", "POS"))

#DeVries (1974) - 2; n = 79

lowerD74B <- '
1                                                                   
0.58    1                                                               
0.65    0.55    1                                                           
0.42    0.51    0.45    1                                                       
0.38    0.36    0.34    0.41    1                                                   
0.51    0.43    0.44    0.28    0.12    1                                               
0.25    0.16    0.11    0.12    0.15    0.1 1                                           
0.31    0.31    0.33    0.23    0.1 0.38    0.06    1                                       
0.31    0.19    0.22    0.19    0   0.24    -0.01   0.59    1                                   
0.29    0.27    0.26    0.17    0.05    0.33    -0.06   0.67    0.55    1                               
0.21    0.32    0.28    0.18    0   0.12    0.07    0.56    0.58    0.46    1                           
0.54    0.47    0.46    0.35    0.2 0.38    0.18    0.58    0.51    0.58    0.37    1                       
0.45    0.31    0.39    0.4 0.28    0.39    0.06    0.42    0.35    0.42    0.33    0.47    1                   
0.42    0.31    0.34    0.44    0.3 0.22    0.07    0.37    0.3 0.35    0.3 0.43    0.8 1               
0.26    0.24    0.25    0.19    0.17    0.1 0.17    0.22    0.34    0.18    0.31    0.42    0.2 0.22    1           
0.39    0.18    0.07    0.06    0.22    0.16    0.22    0.24    0.01    0.05    0.05    0.17    0.17    0.18    0.02    1       
0.15    0.15    0.14    0.11    -0.02   0.2 0.18    0.33    0.2 0.09    0.06    0.17    0.2 0.25    -0.01   0.28    1   
-0.13   -0.06   -0.03   0   -0.02   -0.02   0   0   0.16    -0.05   0.07    -0.08   0.08    0.05    0.04    0.15    0.21    1'

lowerD74PsyB <- '
1       
0.58    1   
0.65    0.55    1'

lowerD74PiaB <- '
1                                                       
0.41    1                                                   
0.28    0.12    1                                               
0.12    0.15    0.1 1                                           
0.23    0.1 0.38    0.06    1                                       
0.19    0   0.24    -0.01   0.59    1                                   
0.17    0.05    0.33    -0.06   0.67    0.55    1                               
0.18    0   0.12    0.07    0.56    0.58    0.46    1                           
0.35    0.2 0.38    0.18    0.58    0.51    0.58    0.37    1                       
0.4 0.28    0.39    0.06    0.42    0.35    0.42    0.33    0.47    1                   
0.44    0.3 0.22    0.07    0.37    0.3 0.35    0.3 0.43    0.8 1               
0.19    0.17    0.1 0.17    0.22    0.34    0.18    0.31    0.42    0.2 0.22    1           
0.06    0.22    0.16    0.22    0.24    0.01    0.05    0.05    0.17    0.17    0.18    0.02    1       
0.11    -0.02   0.2 0.18    0.33    0.2 0.09    0.06    0.17    0.2 0.25    -0.01   0.28    1   
0   -0.02   -0.02   0   0   0.16    -0.05   0.07    -0.08   0.08    0.05    0.04    0.15    0.21    1'

nD74B <- 79

D74B.cor = getCov(lowerD74B, names = c("SB", "CL", "CN", "PM", "PC", "PD", "PL", "PN", "PI", "PE", "PA", "PR", "PG", "PS", "PSI", "PT", "PGG", "POS"))
D74PsyB.cor = getCov(lowerD74PsyB, names = c("SB", "CL", "CN"))
D74PiaB.cor = getCov(lowerD74PiaB, names = c("PM", "PC", "PD", "PL", "PN", "PI", "PE", "PA", "PR", "PG", "PS", "PSI", "PT", "PGG", "POS"))

#DeVries (1974) - 3; n = 126

lowerD74C <- '
1                                                           
0.53    1                                                       
0.55    0.44    1                                                   
0.55    0.41    0.34    1                                               
0.44    0.23    0.3 0.26    1                                           
0.36    0.3 0.23    0.46    0.06    1                                       
0.42    0.27    0.17    0.39    0.06    0.63    1                                   
0.36    0.25    0.18    0.44    0.07    0.64    0.53    1                               
0.29    0.22    0.08    0.2 0.07    0.57    0.61    0.43    1                           
0.6 0.42    0.3 0.46    0.31    0.56    0.56    0.57    0.4 1                       
0.4 0.35    0.26    0.36    0.06    0.34    0.35    0.33    0.22    0.44    1                   
0.32    0.38    0.23    0.31    0.15    0.32    0.27    0.32    0.2 0.43    0.7 1               
0.29    0.18    0.1 0.15    0.11    0.11    0.25    0.12    0.19    0.26    0.21    0.19    1           
0.33    0.15    0.23    0.16    0.22    0.2 0.08    0.07    0   0.25    0.16    0.17    -0.07   1       
0.45    0.25    0.26    0.35    0.31    0.34    0.3 0.25    0.15    0.28    0.2 0.16    0.11    0.26    1   
0.04    0.04    0.03    0.05    -0.05   0.03    0.13    -0.03   0.01    -0.02   0.09    0.03    0.09    0.19    0.18    1'

D74C.cor = getCov(lowerD74C, names = c("SB", "PM", "PC", "PD", "PL", "PN", "PI", "PE", "PA", "PR", "PG", "PS", "PSI", "PT", "PGG", "POS"))

nD74C <- 126

#Hathaway (1972) - Kindergarten; n = 56

lowerH72K <- '
1                                                                                       
0.11    1                                                                                   
0.2 -0.08   1                                                                               
0.35    0.2 0.28    1                                                                           
0.36    0.27    -0.03   0.28    1                                                                       
0.2 0.05    0.01    0.27    0.3 1                                                                   
0.32    0.07    0.17    0.44    0.4 0.33    1                                                               
0.32    0.14    0.09    0.41    0.28    0.11    0.4 1                                                           
0.12    0.11    0.05    0.26    0.24    0.14    0.18    0.35    1                                                       
0.05    -0.22   0.1 0.14    0.28    0.25    0.35    0.24    0.24    1                                                   
0.32    0.03    0.05    0.24    0.4 0.1 0.31    0.55    0.44    0.43    1                                               
0.22    0.03    0.12    0.27    0.18    0.07    0.37    0.53    0.26    0.23    0.42    1                                           
0   -0.06   0.02    0.14    0.25    0.2 0.2 0.4 0.4 0.3 0.54    0.53    1                                       
0.16    -0.01   0.08    0.1 0.05    0   0.1 0.27    0.21    0.22    0.44    0.23    0.26    1                                   
0.32    0.11    0.2 0.25    0.24    0.1 0.38    0.42    0.28    0.11    0.27    0.17    0.23    0.26    1                               
0.31    0.08    0.12    0.34    0.29    0.13    0.34    0.41    0.16    0.32    0.42    0.34    0.14    0.35    0.3 1                           
0.25    0.03    -0.02   0.36    0.35    0.22    0.13    0.35    0.32    0.13    0.36    0.37    0.35    0.09    0.34    0.19    1                       
0.15    0.37    -0.11   0.26    0.28    0.26    0.17    0.1 0.35    0.15    0.11    0.2 0.27    0.15    0.25    0.31    0.31    1                   
0.09    0.08    0.17    0.32    0.08    -0.1    0.22    0.07    -0.01   0.02    0.12    0.12    0.11    0.25    0.32    0.32    0.23    0.33    1               
0.06    0.12    0.1 0.24    0.02    0.04    -0.04   -0.03   0.1 -0.17   -0.01   0.1 -0.1    0.19    0.08    0.28    0.29    0.17    0.4 1           
0.21    0.19    -0.08   0.24    0.19    0.03    0.08    0.27    0.43    0.02    0.32    0.23    0.26    0.06    0.37    0.05    0.32    0.15    0   0.01    1       
0.26    0   0.13    -0.07   0.02    0.17    0.23    0.07    0.09    0.13    0.19    0.1 0.04    0.09    0.28    0.37    0.03    0.25    0.22    0.15    -0.1    1   
0.24    0.12    0.13    0.46    0.12    0.16    0.31    0.24    0.22    0.1 0.3 0.36    0.34    0.2 0.38    0.29    0.35    0.27    0.4 0.3 0.56    0.11    1'

lowerH72KPsy <- '
1                                                   
0.11    1                                               
0.2 -0.08   1                                           
0.35    0.2 0.28    1                                       
0.36    0.27    -0.03   0.28    1                                   
0.2 0.05    0.01    0.27    0.3 1                               
0.32    0.07    0.17    0.44    0.4 0.33    1                           
0.32    0.14    0.09    0.41    0.28    0.11    0.4 1                       
0.12    0.11    0.05    0.26    0.24    0.14    0.18    0.35    1                   
0.05    -0.22   0.1 0.14    0.28    0.25    0.35    0.24    0.24    1               
0.32    0.03    0.05    0.24    0.4 0.1 0.31    0.55    0.44    0.43    1           
0.22    0.03    0.12    0.27    0.18    0.07    0.37    0.53    0.26    0.23    0.42    1       
0   -0.06   0.02    0.14    0.25    0.2 0.2 0.4 0.4 0.3 0.54    0.53    1   
0.16    -0.01   0.08    0.1 0.05    0   0.1 0.27    0.21    0.22    0.44    0.23    0.26    1'

lowerH72KPia <- '
1                               
0.3 1                           
0.34    0.19    1                       
0.25    0.31    0.31    1                   
0.32    0.32    0.23    0.33    1               
0.08    0.28    0.29    0.17    0.4 1           
0.37    0.05    0.32    0.15    0   0.01    1       
0.28    0.37    0.03    0.25    0.22    0.15    -0.1    1   
0.38    0.29    0.35    0.27    0.4 0.3 0.56    0.11    1'

H72K.cor = getCov(lowerH72K, names = c("WINF", "WCOM", "WARI", "WSIM", "WVOC", "WPICCO", "WPICAR", "WBD", "WMAZ", "WOA", "LTOV", "LTCO", "LTP", "LOMD", "PS", "PT", "PN", "PD", "PCQ", "PCS", "PI", "PM", "PSE"))
H72KPsy.cor = getCov(lowerH72KPsy, names = c("WINF", "WCOM", "WARI", "WSIM", "WVOC", "WPICCO", "WPICAR", "WBD", "WMAZ", "WOA", "LTOV", "LTCO", "LTP", "LOMD"))
H72KPia.cor = getCov(lowerH72KPia, names = c("PS", "PT", "PN", "PD", "PCQ", "PCS", "PI", "PM", "PSE"))

nH72 <- 56

# Hathaway (1972) - First Grade; n = 56

lowerH72I <- '
1                                                                                                               
0.45    1                                                                                                           
0.54    0.19    1                                                                                                       
0.46    0.30    0.19    1                                                                                                   
0.49    0.42    0.29    0.35    1                                                                                               
0.33    0.09    0.23    0.29    0.18    1                                                                                           
0.59    0.26    0.39    0.39    0.33    0.24    1                                                                                       
0.39    0.24    0.27    0.20    0.04    0.23    0.23    1                                                                                   
0.20    0.00    0.08    0.27    0.17    0.44    0.28    0.37    1                                                                               
-0.02   0.04    -0.02   -0.06   0.21    -0.03   -0.13   -0.05   0.03    1                                                                           
0.33    0.23    0.15    0.38    0.45    0.22    0.16    0.16    0.16    0.11    1                                                                       
0.40    0.18    0.43    0.26    0.11    0.24    0.41    0.28    0.11    0.05    0.34    1                                                                   
0.41    0.30    0.29    0.15    0.11    0.01    0.36    0.18    0.10    -0.07   0.25    0.41    1                                                               
0.26    0.04    0.20    0.09    0.00    -0.15   0.27    0.08    0.18    0.19    0.14    0.20    0.08    1                                                           
0.50    0.30    0.33    0.36    0.13    0.23    0.44    0.40    0.41    -0.12   0.27    0.27    0.23    0.28    1                                                       
0.46    0.31    0.40    0.31    0.34    0.43    0.48    0.35    0.20    -0.10   0.46    0.41    0.20    0.06    0.54    1                                                   
0.35    0.03    0.33    0.20    0.14    0.36    0.29    0.19    0.13    -0.05   0.37    0.32    0.22    0.10    0.33    0.41    1                                               
0.33    0.24    0.22    0.11    0.21    0.04    0.23    0.30    0.24    -0.30   0.32    0.25    0.24    0.25    0.32    0.30    0.28    1                                           
0.31    0.02    0.23    0.20    0.00    0.16    0.27    0.15    0.20    -0.24   0.29    0.36    0.21    0.22    0.43    0.45    0.29    0.20    1                                       
0.32    0.02    0.28    0.12    -0.05   0.18    0.18    0.36    0.22    -0.30   0.06    0.09    0.06    0.00    0.39    0.40    0.27    0.21    0.47    1                                   
0.34    0.09    0.26    0.42    0.17    0.14    0.37    0.04    0.32    0.00    0.29    0.41    0.27    0.25    0.24    0.14    0.58    0.30    0.35    0.14    1                               
0.52    0.23    0.40    0.29    0.22    0.30    0.17    0.56    0.39    0.01    0.32    0.32    0.42    0.02    0.49    0.38    0.30    0.21    0.41    0.40    0.20    1                           
0.45    0.12    0.42    0.25    0.09    0.20    0.36    0.33    0.37    -0.29   0.23    0.29    0.29    0.23    0.50    0.26    0.42    0.32    0.56    0.26    0.45    0.52    1                       
0.37    0.56    0.56    0.24    0.15    0.11    0.45    0.30    0.30    0.33    0.16    0.42    0.22    0.35    0.38    0.36    0.37    0.38    0.31    0.37    0.36    0.38    0.46    1                   
0.34    0.29    0.39    0.35    0.24    0.17    0.45    0.25    0.39    0.45    0.11    0.30    0.18    0.22    0.24    0.19    0.28    0.24    0.10    0.13    0.33    0.32    0.26    0.63    1               
0.62    0.01    0.01    0.29    0.36    0.29    0.51    0.45    0.35    0.52    0.21    0.43    0.24    0.35    0.34    0.37    0.31    0.40    0.42    0.45    0.42    0.37    0.61    0.75    0.54    1           
0.32    0.01    0.01    0.04    -0.06   0.15    0.37    0.28    0.24    0.41    0.00    0.44    0.21    0.33    0.38    0.38    0.27    0.22    0.36    0.45    0.29    0.44    0.41    0.62    0.24    0.66    1       
0.51    0.55    0.55    0.43    0.34    0.21    0.44    0.35    0.34    0.45    0.26    0.33    0.25    0.28    0.40    0.40    0.36    0.41    0.35    0.31    0.45    0.41    0.42    0.66    0.57    0.63    0.45    1   
0.27    0.49    0.49    0.41    0.08    0.01    0.30    0.28    0.24    0.32    0.08    0.39    0.21    0.48    0.25    0.25    0.20    0.35    0.26    0.20    0.22    0.31    0.41    0.75    0.52    0.67    0.56    0.55    1'

lowerH72IPsy <- '
1                                                                           
0.45    1                                                                       
0.54    0.19    1                                                                   
0.46    0.30    0.19    1                                                               
0.49    0.42    0.29    0.35    1                                                           
0.33    0.09    0.23    0.29    0.18    1                                                       
0.59    0.26    0.39    0.39    0.33    0.24    1                                                   
0.39    0.24    0.27    0.20    0.04    0.23    0.23    1                                               
0.20    0.00    0.08    0.27    0.17    0.44    0.28    0.37    1                                           
-0.02   0.04    -0.02   -0.06   0.21    -0.03   -0.13   -0.05   0.03    1                                       
0.33    0.23    0.15    0.38    0.45    0.22    0.16    0.16    0.16    0.11    1                                   
0.40    0.18    0.43    0.26    0.11    0.24    0.41    0.28    0.11    0.05    0.34    1                               
0.41    0.30    0.29    0.15    0.11    0.01    0.36    0.18    0.10    -0.07   0.25    0.41    1                           
0.26    0.04    0.20    0.09    0.00    -0.15   0.27    0.08    0.18    0.19    0.14    0.20    0.08    1                       
0.37    0.56    0.56    0.24    0.15    0.11    0.45    0.30    0.30    0.33    0.16    0.42    0.22    0.35    1                   
0.34    0.29    0.39    0.35    0.24    0.17    0.45    0.25    0.39    0.45    0.11    0.30    0.18    0.22    0.63    1               
0.62    0.01    0.01    0.29    0.36    0.29    0.51    0.45    0.35    0.52    0.21    0.43    0.24    0.35    0.75    0.54    1           
0.32    0.01    0.01    0.04    -0.06   0.15    0.37    0.28    0.24    0.41    0.00    0.44    0.21    0.33    0.62    0.24    0.66    1       
0.51    0.55    0.55    0.43    0.34    0.21    0.44    0.35    0.34    0.45    0.26    0.33    0.25    0.28    0.66    0.57    0.63    0.45    1   
0.27    0.49    0.49    0.41    0.08    0.01    0.30    0.28    0.24    0.32    0.08    0.39    0.21    0.48    0.75    0.52    0.67    0.56    0.55    1'

lowerH72IPia <- '
1                               
0.54    1                           
0.33    0.41    1                       
0.32    0.30    0.28    1                   
0.43    0.45    0.29    0.20    1               
0.39    0.40    0.27    0.21    0.47    1           
0.24    0.14    0.58    0.30    0.35    0.14    1       
0.49    0.38    0.30    0.21    0.41    0.40    0.20    1   
0.50    0.26    0.42    0.32    0.56    0.26    0.45    0.52    1'

H72I.cor = getCov(lowerH72I, names = c("WINF", "WCOM", "WARI", "WSIM", "WVOC", "WPICCO", "WPICAR", "WBD", "WMAZ", "WOA", "LTOV", "LTCO", "LTP", "LOMD", "PS", "PT", "PN", "PD", "PCQ", "PCS", "PI", "PM", "PSE", "CATRV", "CATCOM", "CATAR", "CATAF", "CATME", "CATSP"))
H72IPsy.cor = getCov(lowerH72IPsy, names = c("WINF", "WCOM", "WARI", "WSIM", "WVOC", "WPICCO", "WPICAR", "WBD", "WMAZ", "WOA", "LTOV", "LTCO", "LTP", "LOMD", "CATRV", "CATCOM", "CATAR", "CATAF", "CATME", "CATSP"))
H72IPia.cor = getCov(lowerH72IPia, names = c("PS", "PT", "PN", "PD", "PCQ", "PCS", "PI", "PM", "PSE"))

# Hathaway (1972) - Second Grade; n = 56

lowerH72II <- '
1                                                                                                               
0.45    1                                                                                                           
0.35    0.12    1                                                                                                       
0.46    0.33    0.31    1                                                                                                   
0.52    0.47    0.36    0.35    1                                                                                               
0.47    0.23    0.27    0.39    0.31    1                                                                                           
0.5 0.27    0.06    0.25    0.44    0.45    1                                                                                       
0.44    0.17    0.34    0.34    0.24    0.36    0.39    1                                                                                   
0.25    0.06    0.39    0.08    0.27    0.22    0.08    0.49    1                                                                               
0.16    -0.04   -0.01   -0.01   -0.01   0.02    -0.16   0.14    0.32    1                                                                           
0.4 0.24    0.17    0.3 0.46    0.2 0.34    0.29    0.22    0.17    1                                                                       
0.49    0.26    0.25    0.12    0.39    0.35    0.49    0.18    0.16    0   0.44    1                                                                   
0.51    0.25    0.15    0.2 0.25    0.28    0.18    0.26    0.09    0.16    0.23    0.35    1                                                               
0.2 0.1 0.13    0   0.29    0.16    0   0.03    0.19    0.45    0.29    0.36    0.22    1                                                           
0.33    0.17    0.12    0.25    0.11    0.12    0.15    0.3 0.18    0.27    0.4 0.24    0.34    0.22    1                                                       
0.25    0.17    0   0.29    0.01    0.01    0.13    0.23    0.1 0.1 0.45    0.15    0.4 0.03    0.49    1                                                   
0.27    0.27    0.15    0.04    0.31    0.24    0.29    0.24    0.06    0.09    0.3 0.21    0.11    0.29    0.19    0.17    1                                               
0.32    0.17    0.24    0.2 0.18    0.37    0.27    0.2 0.17    0.09    0.27    0.43    0.29    0.33    0.38    0.24    0.49    1                                           
0.53    0.1 0.26    0.48    0.29    0.19    0.17    0.28    0.13    0.09    0.4 0.21    0.38    0.1 0.44    0.37    0.32    0.5 1                                       
0.39    0.02    0.41    0.21    0.2 0.31    0.17    0.33    0.18    0.17    0.14    0.01    0.26    0.01    0.21    0.289   0.35    0.24    0.58    1                                   
0.4 0.15    0.24    0.26    0.24    0.1 0.15    0.25    0.27    0.26    0.42    0.31    0.45    0.27    0.32    0.32    0.15    0.34    0.36    0.21    1                               
0.38    0.29    0.31    0.21    0.29    0.16    0.32    0.4 0.14    0.13    0.39    0.36    0.21    0.14    0.31    0.23    0.33    0.35    0.27    0.33    0.22    1                           
0.3 0.18    0.35    0.04    0.14    0.18    0.21    0.37    0.35    0.19    0.06    0.33    0.31    0.15    0.39    0.15    0.04    0.26    0.2 0.24    0.26    0.08    1                       
0.41    0.17    0.3 0.23    0.24    0.26    0.2 0.4 0.3 0.27    0.37    0.36    0.38    0.38    0.5 0.46    0.16    0.31    0.26    0.35    0.23    0.39    0.4 1                   
0.47    0.21    0.28    0.15    0.25    0.34    0.39    0.39    0.38    0.2 0.46    0.54    0.4 0.33    0.44    0.38    0.3 0.42    0.27    0.3 0.2 0.44    0.37    0.63    1               
0.55    0.28    0.32    0.27    0.31    0.38    0.39    0.51    0.43    0.25    0.47    0.49    0.48    0.3 0.55    0.52    0.34    0.43    0.37    0.44    0.35    0.54    0.4 0.75    0.8 1           
0.55    0.22    0.37    0.19    0.27    0.27    0.19    0.43    0.44    0.36    0.25    0.42    0.42    0.4 0.49    0.32    0.35    0.37    0.3 0.38    0.37    0.4 0.47    0.62    0.62    0.75    1       
0.39    0.12    0.26    0.19    0.22    0.29    0.25    0.28    0.28    0.15    0.32    0.56    0.39    0.2 0.37    0.28    0.12    0.32    0.25    0.24    0.23    0.29    0.4 0.66    0.57    0.58    0.56    1   
0.4 0.17    0.15    0.17    0.23    0.22    0.13    0.29    0.28    0.3 0.23    0.37    0.45    0.31    0.38    0.47    0.15    0.25    0.27    0.37    0.15    0.32    0.35    0.75    0.52    0.76    0.64    0.59    1'

lowerH72IIPsy <- '
1                                                                           
0.45    1                                                                       
0.35    0.12    1                                                                   
0.46    0.33    0.31    1                                                               
0.52    0.47    0.36    0.35    1                                                           
0.47    0.23    0.27    0.39    0.31    1                                                       
0.5 0.27    0.06    0.25    0.44    0.45    1                                                   
0.44    0.17    0.34    0.34    0.24    0.36    0.39    1                                               
0.25    0.06    0.39    0.08    0.27    0.22    0.08    0.49    1                                           
0.16    -0.04   -0.01   -0.01   -0.01   0.02    -0.16   0.14    0.32    1                                       
0.4 0.24    0.17    0.3 0.46    0.2 0.34    0.29    0.22    0.17    1                                   
0.49    0.26    0.25    0.12    0.39    0.35    0.49    0.18    0.16    0   0.44    1                               
0.51    0.25    0.15    0.2 0.25    0.28    0.18    0.26    0.09    0.16    0.23    0.35    1                           
0.2 0.1 0.13    0   0.29    0.16    0   0.03    0.19    0.45    0.29    0.36    0.22    1                       
0.41    0.17    0.3 0.23    0.24    0.26    0.2 0.4 0.3 0.27    0.37    0.36    0.38    0.38    1                   
0.47    0.21    0.28    0.15    0.25    0.34    0.39    0.39    0.38    0.2 0.46    0.54    0.4 0.33    0.63    1               
0.55    0.28    0.32    0.27    0.31    0.38    0.39    0.51    0.43    0.25    0.47    0.49    0.48    0.3 0.75    0.8 1           
0.55    0.22    0.37    0.19    0.27    0.27    0.19    0.43    0.44    0.36    0.25    0.42    0.42    0.4 0.62    0.62    0.75    1       
0.39    0.12    0.26    0.19    0.22    0.29    0.25    0.28    0.28    0.15    0.32    0.56    0.39    0.2 0.66    0.57    0.58    0.56    1   
0.4 0.17    0.15    0.17    0.23    0.22    0.13    0.29    0.28    0.3 0.23    0.37    0.45    0.31    0.75    0.52    0.76    0.64    0.59    1'

lowerH72IIPia <- '
1                               
0.49    1                           
0.19    0.17    1                       
0.38    0.24    0.49    1                   
0.44    0.37    0.32    0.5 1               
0.21    0.289   0.35    0.24    0.58    1           
0.32    0.32    0.15    0.34    0.36    0.21    1       
0.31    0.23    0.33    0.35    0.27    0.33    0.22    1   
0.39    0.15    0.04    0.26    0.2 0.24    0.26    0.08    1'

H72II.cor = getCov(lowerH72II, names = c("WINF", "WCOM", "WARI", "WSIM", "WVOC", "WPICCO", "WPICAR", "WBD", "WMAZ", "WOA", "LTOV", "LTCO", "LTP", "LOMD", "PS", "PT", "PN", "PD", "PCQ", "PCS", "PI", "PM", "PSE", "CATRV", "CATCOM", "CATAR", "CATAF", "CATME", "CATSP"))
H72IIPsy.cor = getCov(lowerH72IIPsy, names = c("WINF", "WCOM", "WARI", "WSIM", "WVOC", "WPICCO", "WPICAR", "WBD", "WMAZ", "WOA", "LTOV", "LTCO", "LTP", "LOMD", "CATRV", "CATCOM", "CATAR", "CATAF", "CATME", "CATSP"))
H72IIPia.cor = getCov(lowerH72IIPia, names = c("PS", "PT", "PN", "PD", "PCQ", "PCS", "PI", "PM", "PSE"))

Analysis

DeVries & Kohlberg (1977); n = 47

fa.parallel(DK77.cor, n.obs =  nDK77)

## Parallel analysis suggests that the number of factors =  1  and the number of components =  1
fa77 <- fa(DK77.cor, n.obs = nDK77, nfactors = 1)
faPsy77 <- fa(DK77Psy.cor, n.obs = nDK77, nfactors = 1)
faPia77 <- fa(DK77Pia.cor, n.obs = nDK77, nfactors = 1)

print(fa77$loadings, cutoff = 0.15)
## 
## Loadings:
##    MR1  
## QT 0.398
## IN 0.632
## MC 0.232
## DS 0.536
## CO 0.375
## S1 0.683
## MT 0.511
## LD 0.343
## PS 0.637
## LO 0.246
## AS 0.532
## LC 0.574
## NC 0.362
## S2 0.519
## AI 0.632
## 
##                  MR1
## SS loadings    3.765
## Proportion Var 0.251
print(faPsy77$loadings, cutoff = 0.15)
## 
## Loadings:
##    MR1  
## QT 0.473
## IN 0.664
## MC 0.195
## DS 0.558
## CO 0.262
## S1 0.638
## MT 0.467
## LD 0.419
## PS 0.734
## 
##                  MR1
## SS loadings    2.422
## Proportion Var 0.269
print(faPia77$loadings, cutoff = 0.15)
## 
## Loadings:
##    MR1  
## LO 0.259
## AS 0.592
## LC 0.438
## NC 0.493
## S2 0.654
## AI 0.618
## 
##                  MR1
## SS loadings    1.662
## Proportion Var 0.277
EFATOG77 <- c(0.398, 0.632, 0.232, 0.536, 0.375, 0.683, 0.511, 0.343, 0.637, 0.246, 0.532, 0.574, 0.362, 0.519, 0.632)
EFATOG77Psy <- c(0.398, 0.632, 0.232, 0.536, 0.375, 0.683, 0.511, 0.343, 0.637); EFATOG77Pia <- c(0.246, 0.532, 0.574, 0.362, 0.519, 0.632)
EFASEP77 <- c(0.473, 0.664, 0.195, 0.558, 0.262, 0.638, 0.467, 0.419, 0.734, 0.259, 0.592, 0.438, 0.493, 0.654, 0.618)
EFASEP77Psy <- c(0.473, 0.664, 0.195, 0.558, 0.262, 0.638, 0.467, 0.419, 0.734); EFASEP77Pia <- c(0.259, 0.592, 0.438, 0.493, 0.654, 0.618)

cor(EFATOG77, EFASEP77, method = "pearson"); cor(EFATOG77, EFASEP77, method = "spearman"); CONGO(EFATOG77, EFASEP77)
## [1] 0.8641941
## [1] 0.8203757
## [1] 0.9883043
cor(EFATOG77Psy, EFASEP77Psy, method = "pearson"); cor(EFATOG77Psy, EFASEP77Psy, method = "spearman"); CONGO(EFATOG77Psy, EFASEP77Psy)
## [1] 0.9227877
## [1] 0.9166667
## [1] 0.9918101
cor(EFATOG77Pia, EFASEP77Pia, method = "pearson"); cor(EFATOG77Pia, EFASEP77Pia, method = "spearman"); CONGO(EFATOG77Pia, EFASEP77Pia)
## [1] 0.7557447
## [1] 0.4285714
## [1] 0.9833407
DK771FNo <- '
gPsy =~ QT + IN + MC + DS + CO + S1 + MT + LD + PS
gPia =~ LO + AS + LC + NC + S2 + AI

gPsy ~~ 0*gPia'

DK771F <- '
gPsy =~ QT + IN + MC + DS + CO + S1 + MT + LD + PS
gPia =~ LO + AS + LC + NC + S2 + AI

gPsy ~~ gPia'

DK771FID <- '
gPsy =~ QT + IN + MC + DS + CO + S1 + MT + LD + PS
gPia =~ LO + AS + LC + NC + S2 + AI

gPsy ~~ 1*gPia'

DK77No.fit <- cfa(DK771FNo, sample.cov = DK77.cor, sample.nobs = nDK77, std.lv = T)
DK77.fit <- cfa(DK771F, sample.cov = DK77.cor, sample.nobs = nDK77, std.lv = T)
DK77ID.fit <- cfa(DK771FID, sample.cov = DK77.cor, sample.nobs = nDK77, std.lv = T)

round(cbind("No Relationship"   = fitMeasures(DK77No.fit, FITM),
            "Free Relationship" = fitMeasures(DK77.fit, FITM),
            "Identical"         = fitMeasures(DK77ID.fit, FITM)),3)
##                No Relationship Free Relationship Identical
## chisq                  111.578            88.259    90.026
## df                      90.000            89.000    90.000
## npar                    30.000            31.000    30.000
## cfi                      0.805             1.000     1.000
## rmsea                    0.071             0.000     0.002
## rmsea.ci.lower           0.000             0.000     0.000
## rmsea.ci.upper           0.111             0.078     0.079
## aic                   1941.475          1920.157  1919.923
## bic                   1996.980          1977.511  1975.427
## srmr                     0.179             0.089     0.089
#suppressWarnings(cfaHB(DK77.fit)) <- does not work

1 - pchisq(90.026 - 88.259, 1, lower.tail = T)
## [1] 0.1837538
summary(DK77.fit, stand = T, fit = T)
## lavaan 0.6-12 ended normally after 16 iterations
## 
##   Estimator                                         ML
##   Optimization method                           NLMINB
##   Number of model parameters                        31
## 
##   Number of observations                            47
## 
## Model Test User Model:
##                                                       
##   Test statistic                                88.259
##   Degrees of freedom                                89
##   P-value (Chi-square)                           0.502
## 
## Model Test Baseline Model:
## 
##   Test statistic                               215.644
##   Degrees of freedom                               105
##   P-value                                        0.000
## 
## User Model versus Baseline Model:
## 
##   Comparative Fit Index (CFI)                    1.000
##   Tucker-Lewis Index (TLI)                       1.008
## 
## Loglikelihood and Information Criteria:
## 
##   Loglikelihood user model (H0)               -929.078
##   Loglikelihood unrestricted model (H1)       -884.949
##                                                       
##   Akaike (AIC)                                1920.157
##   Bayesian (BIC)                              1977.511
##   Sample-size adjusted Bayesian (BIC)         1880.283
## 
## Root Mean Square Error of Approximation:
## 
##   RMSEA                                          0.000
##   90 Percent confidence interval - lower         0.000
##   90 Percent confidence interval - upper         0.078
##   P-value RMSEA <= 0.05                          0.769
## 
## Standardized Root Mean Square Residual:
## 
##   SRMR                                           0.089
## 
## Parameter Estimates:
## 
##   Standard errors                             Standard
##   Information                                 Expected
##   Information saturated (h1) model          Structured
## 
## Latent Variables:
##                    Estimate  Std.Err  z-value  P(>|z|)   Std.lv  Std.all
##   gPsy =~                                                               
##     QT                0.417    0.150    2.773    0.006    0.417    0.422
##     IN                0.643    0.140    4.603    0.000    0.643    0.650
##     MC                0.224    0.156    1.439    0.150    0.224    0.227
##     DS                0.545    0.145    3.756    0.000    0.545    0.550
##     CO                0.340    0.153    2.225    0.026    0.340    0.344
##     S1                0.679    0.138    4.937    0.000    0.679    0.687
##     MT                0.500    0.147    3.398    0.001    0.500    0.505
##     LD                0.384    0.152    2.536    0.011    0.384    0.389
##     PS                0.670    0.138    4.849    0.000    0.670    0.677
##   gPia =~                                                               
##     LO                0.250    0.159    1.573    0.116    0.250    0.252
##     AS                0.558    0.148    3.765    0.000    0.558    0.564
##     LC                0.576    0.147    3.909    0.000    0.576    0.583
##     NC                0.403    0.155    2.606    0.009    0.403    0.407
##     S2                0.541    0.149    3.629    0.000    0.541    0.547
##     AI                0.647    0.144    4.486    0.000    0.647    0.654
## 
## Covariances:
##                    Estimate  Std.Err  z-value  P(>|z|)   Std.lv  Std.all
##   gPsy ~~                                                               
##     gPia              0.876    0.096    9.101    0.000    0.876    0.876
## 
## Variances:
##                    Estimate  Std.Err  z-value  P(>|z|)   Std.lv  Std.all
##    .QT                0.805    0.173    4.638    0.000    0.805    0.822
##    .IN                0.565    0.137    4.130    0.000    0.565    0.577
##    .MC                0.928    0.194    4.795    0.000    0.928    0.949
##    .DS                0.682    0.154    4.424    0.000    0.682    0.697
##    .CO                0.863    0.183    4.718    0.000    0.863    0.882
##    .S1                0.517    0.130    3.970    0.000    0.517    0.528
##    .MT                0.729    0.161    4.514    0.000    0.729    0.745
##    .LD                0.831    0.178    4.675    0.000    0.831    0.849
##    .PS                0.530    0.132    4.015    0.000    0.530    0.541
##    .LO                0.916    0.192    4.764    0.000    0.916    0.936
##    .AS                0.667    0.157    4.261    0.000    0.667    0.682
##    .LC                0.647    0.154    4.200    0.000    0.647    0.661
##    .NC                0.817    0.177    4.603    0.000    0.817    0.834
##    .S2                0.686    0.159    4.314    0.000    0.686    0.701
##    .AI                0.561    0.144    3.892    0.000    0.561    0.573
##     gPsy              1.000                               1.000    1.000
##     gPia              1.000                               1.000    1.000
resid(DK77.fit, "cor"); sum(abs(resid(DK77.fit, "cor")$cov) > CRITR(47)); sum(abs(resid(DK77.fit, "cor")$cov) > CRITR(47, NP(47)))
## $type
## [1] "cor.bollen"
## 
## $cov
##    QT     IN     MC     DS     CO     S1     MT     LD     PS     LO     AS    
## QT  0.000                                                                      
## IN  0.176  0.000                                                               
## MC  0.004 -0.037  0.000                                                        
## DS -0.052 -0.038 -0.145  0.000                                                 
## CO  0.075  0.026  0.032  0.011  0.000                                          
## S1 -0.120 -0.046  0.064  0.002 -0.026  0.000                                   
## MT  0.087  0.022  0.055 -0.018 -0.024 -0.007  0.000                            
## LD  0.026 -0.043 -0.078  0.236 -0.284 -0.037 -0.196  0.000                     
## PS  0.034  0.010 -0.014  0.007 -0.133  0.045 -0.042  0.217  0.000              
## LO  0.037 -0.084  0.000 -0.072 -0.016  0.168  0.048 -0.036 -0.020  0.000       
## AS -0.098  0.029  0.178 -0.022  0.030  0.001 -0.030 -0.062 -0.015 -0.052  0.000
## LC -0.115  0.098 -0.066 -0.031 -0.016  0.100  0.142  0.032  0.104 -0.097 -0.059
## NC -0.030 -0.112 -0.121 -0.086  0.107  0.005  0.030 -0.109 -0.071  0.157  0.020
## S2 -0.002 -0.161  0.201  0.156  0.155 -0.019 -0.012 -0.056 -0.214  0.032  0.172
## AI -0.061  0.068 -0.050 -0.025  0.253  0.007  0.041 -0.062 -0.058 -0.005 -0.049
##    LC     NC     S2     AI    
## QT                            
## IN                            
## MC                            
## DS                            
## CO                            
## S1                            
## MT                            
## LD                            
## PS                            
## LO                            
## AS                            
## LC  0.000                     
## NC -0.117  0.000              
## S2 -0.038  0.087  0.000       
## AI -0.011  0.104 -0.027  0.000
## [1] 0
## [1] 0

With a critical r of 0.29, there were no violations of local independence.

semPaths(DK77.fit, "std", title = F, residuals = F, bifactor = c("gPsy", "gPia"), pastel = T, mar = c(2, 1, 3, 1), posCol = c("skyblue4"), layout = "tree2", exoCov = T)

DeVries, 1974 (1); n = 50

fa.parallel(D74A.cor, n.obs =  nD74A)

## Parallel analysis suggests that the number of factors =  2  and the number of components =  1
fa.parallel(D74PsyA.cor, n.obs =  nD74A)
## Warning in fa.stats(r = r, f = f, phi = phi, n.obs = n.obs, np.obs = np.obs, :
## The estimated weights for the factor scores are probably incorrect. Try a
## different factor score estimation method.
## Warning in fac(r = r, nfactors = nfactors, n.obs = n.obs, rotate = rotate, : An
## ultra-Heywood case was detected. Examine the results carefully
## Warning in fa.stats(r = r, f = f, phi = phi, n.obs = n.obs, np.obs = np.obs, :
## The estimated weights for the factor scores are probably incorrect. Try a
## different factor score estimation method.

## Warning in fa.stats(r = r, f = f, phi = phi, n.obs = n.obs, np.obs = np.obs, :
## The estimated weights for the factor scores are probably incorrect. Try a
## different factor score estimation method.

## Warning in fa.stats(r = r, f = f, phi = phi, n.obs = n.obs, np.obs = np.obs, :
## The estimated weights for the factor scores are probably incorrect. Try a
## different factor score estimation method.
## Warning in fac(r = r, nfactors = nfactors, n.obs = n.obs, rotate = rotate, : An
## ultra-Heywood case was detected. Examine the results carefully
## Warning in fa.stats(r = r, f = f, phi = phi, n.obs = n.obs, np.obs = np.obs, :
## The estimated weights for the factor scores are probably incorrect. Try a
## different factor score estimation method.
## Warning in fac(r = r, nfactors = nfactors, n.obs = n.obs, rotate = rotate, : An
## ultra-Heywood case was detected. Examine the results carefully
## Warning in fa.stats(r = r, f = f, phi = phi, n.obs = n.obs, np.obs = np.obs, :
## The estimated weights for the factor scores are probably incorrect. Try a
## different factor score estimation method.
## Warning in fac(r = r, nfactors = nfactors, n.obs = n.obs, rotate = rotate, : An
## ultra-Heywood case was detected. Examine the results carefully
## Warning in fa.stats(r = r, f = f, phi = phi, n.obs = n.obs, np.obs = np.obs, :
## The estimated weights for the factor scores are probably incorrect. Try a
## different factor score estimation method.
## Warning in fac(r = r, nfactors = nfactors, n.obs = n.obs, rotate = rotate, : An
## ultra-Heywood case was detected. Examine the results carefully

## Parallel analysis suggests that the number of factors =  2  and the number of components =  1
fa.parallel(D74PiaA.cor, n.obs =  nD74A)
## Warning in fa.stats(r = r, f = f, phi = phi, n.obs = n.obs, np.obs = np.obs, :
## The estimated weights for the factor scores are probably incorrect. Try a
## different factor score estimation method.

## Warning in fa.stats(r = r, f = f, phi = phi, n.obs = n.obs, np.obs = np.obs, :
## An ultra-Heywood case was detected. Examine the results carefully
## Warning in fa.stats(r = r, f = f, phi = phi, n.obs = n.obs, np.obs = np.obs, :
## The estimated weights for the factor scores are probably incorrect. Try a
## different factor score estimation method.
## Warning in fac(r = r, nfactors = nfactors, n.obs = n.obs, rotate = rotate, : An
## ultra-Heywood case was detected. Examine the results carefully
## Warning in fa.stats(r = r, f = f, phi = phi, n.obs = n.obs, np.obs = np.obs, :
## The estimated weights for the factor scores are probably incorrect. Try a
## different factor score estimation method.
## Warning in fac(r = r, nfactors = nfactors, n.obs = n.obs, rotate = rotate, : An
## ultra-Heywood case was detected. Examine the results carefully

## Parallel analysis suggests that the number of factors =  1  and the number of components =  1
fa74A1 <- fa(D74A.cor, n.obs = nD74A, nfactors = 1)
fa74A2 <- fa(D74A.cor, n.obs = nD74A, nfactors = 2)
## Loading required namespace: GPArotation
faPsy74A1 <- fa(D74PsyA.cor, n.obs = nD74A, nfactors = 1)
faPsy74A2 <- fa(D74PsyA.cor, n.obs = nD74A, nfactors = 2)

faPia74A1 <- fa(D74PiaA.cor, n.obs = nD74A, nfactors = 1)
#faPia74A2 <- fa(D74PiaA.cor, n.obs = nD74A, nfactors = 2) #Model not called for and returns practically useless results - run if you want!

print(fa74A1$loadings, cutoff = 0.10)
## 
## Loadings:
##     MR1  
## SB  0.722
## CL  0.598
## CN  0.610
## PM  0.591
## PC  0.395
## PD  0.591
## PL  0.331
## PN  0.687
## PI  0.548
## PE  0.608
## PA  0.523
## PR  0.697
## PG  0.688
## PS  0.657
## PSI 0.506
## PT  0.349
## PGG 0.228
## POS 0.121
## MW  0.647
## MK  0.637
## MA  0.508
## MR  0.375
## 
##                  MR1
## SS loadings    6.690
## Proportion Var 0.304
print(fa74A2$loadings, cutoff = 0.10)
## 
## Loadings:
##     MR1    MR2   
## SB   0.329  0.523
## CL   0.331  0.369
## CN   0.292  0.426
## PM   0.414  0.270
## PC   0.140  0.328
## PD   0.144  0.571
## PL   0.203  0.183
## PN   0.949 -0.141
## PI   0.607       
## PE   0.656       
## PA   0.605       
## PR   0.705       
## PG   0.571  0.225
## PS   0.643  0.115
## PSI  0.389  0.197
## PT          0.349
## PGG  0.376 -0.122
## POS         0.191
## MW          0.826
## MK          0.727
## MA          0.701
## MR  -0.197  0.683
## 
##                  MR1   MR2
## SS loadings    4.201 3.606
## Proportion Var 0.191 0.164
## Cumulative Var 0.191 0.355
print(faPsy74A1$loadings, cutoff = 0.10)
## 
## Loadings:
##    MR1  
## SB 0.702
## CL 0.532
## CN 0.592
## MW 0.786
## MK 0.751
## MA 0.625
## MR 0.611
## 
##                  MR1
## SS loadings    3.072
## Proportion Var 0.439
print(faPsy74A2$loadings, cutoff = 0.10)
## 
## Loadings:
##    MR1    MR2   
## SB         0.821
## CL         0.698
## CN         0.715
## MW  0.750  0.112
## MK  0.909       
## MA  0.477  0.202
## MR  0.706       
## 
##                  MR1   MR2
## SS loadings    2.120 1.730
## Proportion Var 0.303 0.247
## Cumulative Var 0.303 0.550
print(faPia74A1$loadings, cutoff = 0.10)
## 
## Loadings:
##     MR1  
## PM  0.527
## PC  0.316
## PD  0.463
## PL  0.319
## PN  0.843
## PI  0.622
## PE  0.664
## PA  0.594
## PR  0.748
## PG  0.725
## PS  0.708
## PSI 0.499
## PT  0.267
## PGG 0.313
## POS 0.116
## 
##                  MR1
## SS loadings    4.601
## Proportion Var 0.307
#print(faPia74A2$loadings, cutoff = 0.10)
EFATOG74A <- c(0.722, 0.598, 0.610, 0.591, 0.395, 0.591, 0.331, 0.687, 0.548, 0.608, 0.523, 0.697, 0.688, 0.657, 0.506, 0.349, 0.228, 0.121, 0.647, 0.637, 0.508, 0.375)
EFATOG74APsy <- c(0.722, 0.598, 0.610, 0.647, 0.637, 0.508, 0.375); EFATOG74APia <- c(0.591, 0.395, 0.591, 0.331, 0.687, 0.548, 0.608, 0.523, 0.697, 0.688, 0.657, 0.506, 0.349, 0.228, 0.121)
EFASEP74A <- c(0.702, 0.532, 0.592, 0.527, 0.316, 0.463, 0.319, 0.843, 0.622, 0.664, 0.594, 0.748, 0.725, 0.708, 0.499, 0.267, 0.313, 0.116, 0.786, 0.751, 0.625, 0.611)
EFASEP74APsy <- c(0.702, 0.532, 0.592, 0.786, 0.751, 0.625, 0.611); EFASEP74APia <- c(0.527, 0.316, 0.463, 0.319, 0.843, 0.622, 0.664, 0.594, 0.748, 0.725, 0.708, 0.499, 0.267, 0.313, 0.116)

cor(EFATOG74A, EFASEP74A, method = "pearson"); cor(EFATOG74A, EFASEP74A, method = "spearman"); CONGO(EFATOG74A, EFASEP74A)
## [1] 0.8842337
## [1] 0.8410054
## [1] 0.9889509
cor(EFATOG74APsy, EFASEP74APsy, method = "pearson"); cor(EFATOG74APsy, EFASEP74APsy, method = "spearman"); CONGO(EFATOG74APsy, EFASEP74APsy)
## [1] 0.4503779
## [1] 0.6071429
## [1] 0.9864973
cor(EFATOG74APia, EFASEP74APia, method = "pearson"); cor(EFATOG74APia, EFASEP74APia, method = "spearman"); CONGO(EFATOG74APia, EFASEP74APia)
## [1] 0.9362388
## [1] 0.9294016
## [1] 0.9912709
D74A1FNo <- '
gPsy =~ SB + CL + CN + MW + MK + MA + MR
gPia =~ PM + PC + PD + PL + PN + PI + PE + PA + PR + PG + PS + PSI + PT + PGG + POS

gPsy ~~ 0*gPia'

D74A1F <- '
gPsy =~ SB + CL + CN + MW + MK + MA + MR
gPia =~ PM + PC + PD + PL + PN + PI + PE + PA + PR + PG + PS + PSI + PT + PGG + POS

gPsy ~~ gPia'

D74A1FID <- '
gPsy =~ SB + CL + CN + MW + MK + MA + MR
gPia =~ PM + PC + PD + PL + PN + PI + PE + PA + PR + PG + PS + PSI + PT + PGG + POS

gPsy ~~ 1*gPia'

D74A2FNo <- '
SBF =~ SB + CL + CN + MW
MAT =~ MW + MK + MA + MR

gPsy =~ SBF + MAT
gPia =~ PM + PC + PD + PL + PN + PI + PE + PA + PR + PG + PS + PSI + PT + PGG + POS

gPsy ~~ 0*gPia'

D74A2F <- '
SBF =~ SB + CL + CN + MW
MAT =~ MW + MK + MA + MR

gPsy =~ SBF + MAT
gPia =~ PM + PC + PD + PL + PN + PI + PE + PA + PR + PG + PS + PSI + PT + PGG + POS

gPsy ~~ gPia'

D74A2FID <- '
SBF =~ SB + CL + CN + MW
MAT =~ MW + MK + MA + MR

gPsy =~ SBF + MAT
gPia =~ PM + PC + PD + PL + PN + PI + PE + PA + PR + PG + PS + PSI + PT + PGG + POS

gPsy ~~ 1*gPia'

D74ANo1.fit <- cfa(D74A1FNo, sample.cov = D74A.cor, sample.nobs = nD74A, std.lv = T)
D74A1.fit <- cfa(D74A1F, sample.cov = D74A.cor, sample.nobs = nD74A, std.lv = T)
D74AID1.fit <- cfa(D74A1FID, sample.cov = D74A.cor, sample.nobs = nD74A, std.lv = T)

D74ANo2.fit <- cfa(D74A2FNo, sample.cov = D74A.cor, sample.nobs = nD74A, std.lv = T); "\n" #Leads to severe estimation issues
## Warning in lav_model_vcov(lavmodel = lavmodel, lavsamplestats = lavsamplestats, : lavaan WARNING:
##     Could not compute standard errors! The information matrix could
##     not be inverted. This may be a symptom that the model is not
##     identified.
## [1] "\n"
D74A2.fit <- cfa(D74A2F, sample.cov = D74A.cor, sample.nobs = nD74A, std.lv = T)
D74AID2.fit <- cfa(D74A2FID, sample.cov = D74A.cor, sample.nobs = nD74A, std.lv = T)

round(cbind("No Rel - 1"   = fitMeasures(D74ANo1.fit, FITM),
            "Free Rel- 1" = fitMeasures(D74A1.fit, FITM),
            "Identical - 1"         = fitMeasures(D74AID1.fit, FITM),
            "No Rel - 2"   = fitMeasures(D74ANo2.fit, FITM),
            "Free Rel - 2" = fitMeasures(D74A2.fit, FITM),
            "Identical - 2"         = fitMeasures(D74AID2.fit, FITM)), 3)
##                No Rel - 1 Free Rel- 1 Identical - 1 No Rel - 2 Free Rel - 2
## chisq             403.237     383.919       416.131    377.211      351.978
## df                209.000     208.000       209.000    206.000      205.000
## npar               44.000      45.000        44.000     47.000       48.000
## cfi                 0.598       0.636         0.572      0.646        0.696
## rmsea               0.136       0.130         0.141      0.129        0.120
## rmsea.ci.lower      0.116       0.110         0.121      0.108        0.098
## rmsea.ci.upper      0.156       0.150         0.160      0.149        0.141
## aic              2876.084    2858.766      2888.978   2856.058     2832.825
## bic              2960.213    2944.807      2973.107   2945.923     2924.602
## srmr                0.206       0.116         0.112      0.204        0.105
##                Identical - 2
## chisq                354.482
## df                   206.000
## npar                  47.000
## cfi                    0.693
## rmsea                  0.120
## rmsea.ci.lower         0.099
## rmsea.ci.upper         0.141
## aic                 2833.329
## bic                 2923.194
## srmr                   0.106
#cfaHB(D74A1.fit) <- does not work

1 - pchisq(354.482 - 351.978, 1, lower.tail = T)
## [1] 0.1135575
summary(D74A2.fit, stand = T, fit = T)
## lavaan 0.6-12 ended normally after 32 iterations
## 
##   Estimator                                         ML
##   Optimization method                           NLMINB
##   Number of model parameters                        48
## 
##   Number of observations                            50
## 
## Model Test User Model:
##                                                       
##   Test statistic                               351.978
##   Degrees of freedom                               205
##   P-value (Chi-square)                           0.000
## 
## Model Test Baseline Model:
## 
##   Test statistic                               714.595
##   Degrees of freedom                               231
##   P-value                                        0.000
## 
## User Model versus Baseline Model:
## 
##   Comparative Fit Index (CFI)                    0.696
##   Tucker-Lewis Index (TLI)                       0.658
## 
## Loglikelihood and Information Criteria:
## 
##   Loglikelihood user model (H0)              -1368.412
##   Loglikelihood unrestricted model (H1)      -1192.424
##                                                       
##   Akaike (AIC)                                2832.825
##   Bayesian (BIC)                              2924.602
##   Sample-size adjusted Bayesian (BIC)         2773.938
## 
## Root Mean Square Error of Approximation:
## 
##   RMSEA                                          0.120
##   90 Percent confidence interval - lower         0.098
##   90 Percent confidence interval - upper         0.141
##   P-value RMSEA <= 0.05                          0.000
## 
## Standardized Root Mean Square Residual:
## 
##   SRMR                                           0.105
## 
## Parameter Estimates:
## 
##   Standard errors                             Standard
##   Information                                 Expected
##   Information saturated (h1) model          Structured
## 
## Latent Variables:
##                    Estimate  Std.Err  z-value  P(>|z|)   Std.lv  Std.all
##   SBF =~                                                                
##     SB                0.413    0.195    2.121    0.034    0.867    0.876
##     CL                0.305    0.147    2.081    0.037    0.641    0.647
##     CN                0.333    0.157    2.123    0.034    0.701    0.708
##     MW                0.084    0.076    1.105    0.269    0.176    0.178
##   MAT =~                                                                
##     MW                0.537    0.117    4.598    0.000    0.697    0.704
##     MK                0.670    0.120    5.595    0.000    0.870    0.879
##     MA                0.458    0.114    4.030    0.000    0.595    0.601
##     MR                0.517    0.113    4.556    0.000    0.671    0.678
##   gPsy =~                                                               
##     SBF               1.848    1.078    1.714    0.087    0.879    0.879
##     MAT               0.829    0.274    3.020    0.003    0.638    0.638
##   gPia =~                                                               
##     PM                0.550    0.134    4.090    0.000    0.550    0.556
##     PC                0.337    0.142    2.366    0.018    0.337    0.340
##     PD                0.483    0.137    3.511    0.000    0.483    0.488
##     PL                0.315    0.143    2.202    0.028    0.315    0.318
##     PN                0.796    0.119    6.668    0.000    0.796    0.804
##     PI                0.614    0.131    4.681    0.000    0.614    0.620
##     PE                0.678    0.127    5.322    0.000    0.678    0.685
##     PA                0.578    0.133    4.344    0.000    0.578    0.584
##     PR                0.743    0.123    6.024    0.000    0.743    0.750
##     PG                0.711    0.125    5.669    0.000    0.711    0.718
##     PS                0.704    0.126    5.595    0.000    0.704    0.711
##     PSI               0.499    0.137    3.646    0.000    0.499    0.504
##     PT                0.293    0.144    2.041    0.041    0.293    0.296
##     PGG               0.305    0.143    2.132    0.033    0.305    0.309
##     POS               0.087    0.147    0.594    0.553    0.087    0.088
## 
## Covariances:
##                    Estimate  Std.Err  z-value  P(>|z|)   Std.lv  Std.all
##   gPsy ~~                                                               
##     gPia              0.812    0.109    7.440    0.000    0.812    0.812
## 
## Variances:
##                    Estimate  Std.Err  z-value  P(>|z|)   Std.lv  Std.all
##    .SB                0.229    0.104    2.200    0.028    0.229    0.233
##    .CL                0.570    0.130    4.372    0.000    0.570    0.581
##    .CN                0.489    0.120    4.092    0.000    0.489    0.499
##    .MW                0.325    0.091    3.577    0.000    0.325    0.332
##    .MK                0.222    0.098    2.263    0.024    0.222    0.227
##    .MA                0.626    0.137    4.566    0.000    0.626    0.638
##    .MR                0.530    0.122    4.332    0.000    0.530    0.541
##    .PM                0.677    0.142    4.768    0.000    0.677    0.691
##    .PC                0.866    0.176    4.932    0.000    0.866    0.884
##    .PD                0.747    0.154    4.838    0.000    0.747    0.762
##    .PL                0.881    0.178    4.942    0.000    0.881    0.899
##    .PN                0.346    0.086    4.043    0.000    0.346    0.353
##    .PI                0.603    0.129    4.675    0.000    0.603    0.615
##    .PE                0.520    0.115    4.539    0.000    0.520    0.530
##    .PA                0.646    0.136    4.731    0.000    0.646    0.659
##    .PR                0.429    0.099    4.329    0.000    0.429    0.437
##    .PG                0.475    0.107    4.445    0.000    0.475    0.484
##    .PS                0.484    0.108    4.466    0.000    0.484    0.494
##    .PSI               0.731    0.152    4.824    0.000    0.731    0.746
##    .PT                0.894    0.181    4.950    0.000    0.894    0.912
##    .PGG               0.887    0.179    4.946    0.000    0.887    0.905
##    .POS               0.972    0.195    4.996    0.000    0.972    0.992
##    .SBF               1.000                               0.226    0.226
##    .MAT               1.000                               0.593    0.593
##     gPsy              1.000                               1.000    1.000
##     gPia              1.000                               1.000    1.000
resid(D74A2.fit, "cor"); sum(abs(resid(D74A2.fit, "cor")$cov) > CRITR(50)); sum(abs(resid(D74A2.fit, "cor")$cov) > CRITR(50, NP(50)))
## $type
## [1] "cor.bollen"
## 
## $cov
##     SB     CL     CN     MW     MK     MA     MR     PM     PC     PD    
## SB   0.000                                                               
## CL  -0.007  0.000                                                        
## CN   0.000  0.012  0.000                                                 
## MW   0.048 -0.091 -0.046  0.000                                          
## MK  -0.002 -0.049 -0.019 -0.007  0.000                                   
## MA   0.044  0.172  0.101  0.027 -0.019  0.000                            
## MR  -0.043 -0.006  0.031  0.005  0.014 -0.008  0.000                     
## PM   0.193  0.313  0.299 -0.013 -0.033  0.047 -0.035  0.000              
## PC   0.107  0.203  0.208  0.063  0.075  0.184  0.030  0.191  0.000       
## PD   0.115  0.095  0.154  0.310  0.348  0.388  0.079  0.049 -0.066  0.000
## PL   0.031  0.123 -0.141  0.043  0.015  0.261 -0.042  0.013  0.042  0.085
## PN  -0.173 -0.022 -0.077 -0.186 -0.026 -0.051 -0.203 -0.117 -0.134 -0.042
## PI  -0.028 -0.147 -0.114 -0.015  0.037  0.147 -0.148 -0.085 -0.151 -0.022
## PE  -0.009 -0.007  0.014  0.053  0.018 -0.144 -0.171 -0.151  0.007  0.016
## PA  -0.155  0.030 -0.045 -0.007  0.004  0.028 -0.045 -0.035 -0.149 -0.075
## PR   0.111  0.133  0.021 -0.089 -0.032 -0.164 -0.163  0.053  0.025  0.024
## PG  -0.049 -0.022  0.027  0.017  0.083  0.056 -0.032  0.101  0.086  0.020
## PS  -0.015  0.001  0.000  0.020  0.076 -0.052 -0.110  0.055  0.088 -0.207
## PSI -0.005  0.067  0.015  0.172  0.090  0.103 -0.127 -0.050  0.028 -0.066
## PT   0.265  0.133 -0.030  0.194  0.095  0.128  0.096 -0.144  0.069  0.196
## PGG -0.143 -0.013 -0.096 -0.212 -0.081  0.004 -0.088 -0.101 -0.265  0.070
## POS -0.175 -0.071 -0.064  0.177  0.060  0.253  0.109  0.041  0.040  0.127
##     PL     PN     PI     PE     PA     PR     PG     PS     PSI    PT    
## SB                                                                       
## CL                                                                       
## CN                                                                       
## MW                                                                       
## MK                                                                       
## MA                                                                       
## MR                                                                       
## PM                                                                       
## PC                                                                       
## PD                                                                       
## PL   0.000                                                               
## PN  -0.016  0.000                                                        
## PI  -0.127  0.111  0.000                                                 
## PE  -0.048  0.129  0.045  0.000                                          
## PA   0.034  0.130  0.208  0.000  0.000                                   
## PR   0.091  0.017 -0.025  0.026 -0.118  0.000                            
## PG  -0.059 -0.048 -0.056 -0.022 -0.039 -0.079  0.000                     
## PS  -0.026 -0.012 -0.071 -0.087 -0.025 -0.034  0.239  0.000              
## PSI  0.040 -0.025  0.037 -0.015  0.086  0.072 -0.102  0.002  0.000       
## PT   0.086 -0.008 -0.194 -0.103 -0.153  0.108 -0.023 -0.061  0.071  0.000
## PGG  0.022  0.212  0.049 -0.121  0.060 -0.051 -0.032  0.051 -0.135  0.109
## POS -0.108 -0.101  0.175 -0.070 -0.041 -0.066  0.107  0.037  0.056 -0.026
##     PGG    POS   
## SB               
## CL               
## CN               
## MW               
## MK               
## MA               
## MR               
## PM               
## PC               
## PD               
## PL               
## PN               
## PI               
## PE               
## PA               
## PR               
## PG               
## PS               
## PSI              
## PT               
## PGG  0.000       
## POS  0.123  0.000
## [1] 10
## [1] 0

With a critical r of 0.28, there were ten violations of local independence; this reduced to zero with scaling.

semPaths(D74A2.fit, "std", title = F, residuals = F, bifactor = c("gPsy", "gPia"), pastel = T, mar = c(2, 1, 3, 1), posCol = c("skyblue4"), layout = "circle", exoCov = T)
## Warning in semPaths(D74A2.fit, "std", title = F, residuals = F, bifactor =
## c("gPsy", : 'bifactor' argument only supported in layouts 'tree2', 'tree3',
## 'circle2' and 'circle3'

DeVries, 1974 (2); n = 79

fa.parallel(D74B.cor, n.obs =  nD74B)

## Parallel analysis suggests that the number of factors =  2  and the number of components =  2
fa.parallel(D74PsyB.cor, n.obs =  nD74B)
## Warning in fa.stats(r = r, f = f, phi = phi, n.obs = n.obs, np.obs = np.obs, :
## The estimated weights for the factor scores are probably incorrect. Try a
## different factor score estimation method.
## Warning in fac(r = r, nfactors = nfactors, n.obs = n.obs, rotate = rotate, : An
## ultra-Heywood case was detected. Examine the results carefully

## Parallel analysis suggests that the number of factors =  1  and the number of components =  1
fa.parallel(D74PiaB.cor, n.obs =  nD74B)

## Parallel analysis suggests that the number of factors =  2  and the number of components =  1
fa74B1 <- fa(D74B.cor, n.obs = nD74B, nfactors = 1)
fa74B2 <- fa(D74B.cor, n.obs = nD74B, nfactors = 2)

faPsy74B1 <- fa(D74PsyB.cor, n.obs = nD74B, nfactors = 1)

faPia74B1 <- fa(D74PiaB.cor, n.obs = nD74B, nfactors = 1)
faPia74B2 <- fa(D74PiaB.cor, n.obs = nD74B, nfactors = 2)

print(fa74B1$loadings, cutoff = 0.05)
## 
## Loadings:
##     MR1  
## SB  0.716
## CL  0.628
## CN  0.639
## PM  0.534
## PC  0.350
## PD  0.532
## PL  0.184
## PN  0.681
## PI  0.563
## PE  0.595
## PA  0.514
## PR  0.767
## PG  0.687
## PS  0.636
## PSI 0.387
## PT  0.262
## PGG 0.274
## POS      
## 
##                  MR1
## SS loadings    5.197
## Proportion Var 0.289
print(fa74B2$loadings, cutoff = 0.10)
## 
## Loadings:
##     MR1    MR2   
## SB   0.832       
## CL   0.695       
## CN   0.684       
## PM   0.632       
## PC   0.620 -0.236
## PD   0.453  0.153
## PL   0.300 -0.102
## PN          0.780
## PI          0.820
## PE          0.750
## PA          0.666
## PR   0.400  0.496
## PG   0.459  0.331
## PS   0.460  0.268
## PSI  0.202  0.250
## PT   0.319       
## PGG  0.146  0.173
## POS         0.107
## 
##                  MR1   MR2
## SS loadings    3.489 2.910
## Proportion Var 0.194 0.162
## Cumulative Var 0.194 0.355
print(faPsy74B1$loadings, cutoff = 0.10)
## 
## Loadings:
##    MR1  
## SB 0.828
## CL 0.701
## CN 0.785
## 
##                  MR1
## SS loadings    1.793
## Proportion Var 0.598
print(faPia74B1$loadings, cutoff = 0.10)
## 
## Loadings:
##     MR1  
## PM  0.444
## PC  0.255
## PD  0.453
## PL  0.140
## PN  0.771
## PI  0.654
## PE  0.675
## PA  0.568
## PR  0.757
## PG  0.704
## PS  0.648
## PSI 0.381
## PT  0.225
## PGG 0.287
## POS      
## 
##                  MR1
## SS loadings    4.058
## Proportion Var 0.271
print(faPia74B2$loadings, cutoff = 0.22)
## 
## Loadings:
##     MR1    MR2   
## PM          0.607
## PC  -0.254  0.602
## PD          0.314
## PL          0.265
## PN   0.762       
## PI   0.821       
## PE   0.752       
## PA   0.678       
## PR   0.519  0.337
## PG          0.700
## PS          0.731
## PSI  0.269       
## PT          0.332
## PGG         0.226
## POS              
## 
##                  MR1   MR2
## SS loadings    2.788 2.246
## Proportion Var 0.186 0.150
## Cumulative Var 0.186 0.336
EFATOG74B <- c(0.716, 0.628, 0.639, 0.534, 0.350, 0.53, 0.184, 0.681, 0.563, 0.595, 0.514, 0.767, 0.687, 0.636, 0.387, 0.262, 0.274)
EFATOG74BPsy <- c(0.716, 0.628, 0.639); EFATOG74BPia <- c(0.534, 0.350, 0.53, 0.184, 0.681, 0.563, 0.595, 0.514, 0.767, 0.687, 0.636, 0.387, 0.262, 0.274)
EFASEP74B <- c(0.828, 0.701, 0.785, 0.444, 0.255, 0.453, 0.140, 0.771, 0.654, 0.675, 0.568, 0.757, 0.704, 0.648, 0.381, 0.225, 0.287)
EFASEP74BPsy <- c(0.828, 0.701, 0.785); EFASEP74BPia <- c(0.444, 0.255, 0.453, 0.140, 0.771, 0.654, 0.675, 0.568, 0.757, 0.704, 0.648, 0.381, 0.225, 0.287)

cor(EFATOG74B, EFASEP74B, method = "pearson"); cor(EFATOG74B, EFASEP74B, method = "spearman"); CONGO(EFATOG74B, EFASEP74B)
## [1] 0.9597069
## [1] 0.9436275
## [1] 0.993405
cor(EFATOG74BPsy, EFASEP74BPsy, method = "pearson"); cor(EFATOG74BPsy, EFASEP74BPsy, method = "spearman"); CONGO(EFATOG74BPsy, EFASEP74BPsy)
## [1] 0.829317
## [1] 1
## [1] 0.9992701
cor(EFATOG74BPia, EFASEP74BPia, method = "pearson"); cor(EFATOG74BPia, EFASEP74BPia, method = "spearman"); CONGO(EFATOG74BPia, EFASEP74BPia)
## [1] 0.9624812
## [1] 0.9516484
## [1] 0.993586

POS is removed in the CFAs for DeVries (1974 - 2) because it fails to load on anything in this sample. Sampling error, range restriction and the lack of power are more than enough to explain this.

D74B1FNo <- '
gPsy =~ SB + CL + CN
gPia =~ PM + PC + PD + PL + PN + PI + PE + PA + PR + PG + PS + PSI + PT + PGG

gPsy ~~ 0*gPia'

D74B1F <- '
gPsy =~ SB + CL + CN
gPia =~ PM + PC + PD + PL + PN + PI + PE + PA + PR + PG + PS + PSI + PT + PGG 

gPsy ~~ gPia'

D74B1FID <- '
gPsy =~ SB + CL + CN
gPia =~ PM + PC + PD + PL + PN + PI + PE + PA + PR + PG + PS + PSI + PT + PGG

gPsy ~~ 1*gPia'

D74B2FNo <- '
Pia1 =~ PN + PI + PE + PA + PR + PSI
Pia2 =~ PM + PC + PD + PL + PG + PS + PT + PGG + PR

gPsy =~ SB + CL + CN
gPia =~ Pia1 + Pia2

gPsy ~~ 0*gPia'

D74B2F <- '
Pia1 =~ PN + PI + PE + PA + PR + PSI
Pia2 =~ PM + PC + PD + PL + PG + PS + PT + PGG + PR

gPsy =~ SB + CL + CN
gPia =~ Pia1 + Pia2

gPsy ~~ gPia'

D74B2FID <- '
Pia1 =~ PN + PI + PE + PA + PR + PSI
Pia2 =~ PM + PC + PD + PL + PG + PS + PT + PGG + PR

gPsy =~ SB + CL + CN
gPia =~ Pia1 + Pia2

gPsy ~~ 1*gPia'

D74BNo1.fit <- cfa(D74B1FNo, sample.cov = D74B.cor, sample.nobs = nD74B, std.lv = T)
D74B1.fit <- cfa(D74B1F, sample.cov = D74B.cor, sample.nobs = nD74B, std.lv = T)
D74BID1.fit <- cfa(D74B1FID, sample.cov = D74B.cor, sample.nobs = nD74B, std.lv = T)

D74BNo2.fit <- cfa(D74B2FNo, sample.cov = D74B.cor, sample.nobs = nD74B, std.lv = T); "\n" #Leads to severe estimation issues
## Warning in lav_model_vcov(lavmodel = lavmodel, lavsamplestats = lavsamplestats, : lavaan WARNING:
##     Could not compute standard errors! The information matrix could
##     not be inverted. This may be a symptom that the model is not
##     identified.
## [1] "\n"
D74B2.fit <- cfa(D74B2F, sample.cov = D74B.cor, sample.nobs = nD74B, std.lv = T)
D74BID2.fit <- cfa(D74B2FID, sample.cov = D74B.cor, sample.nobs = nD74B, std.lv = T)

round(cbind("No Rel - 1"   = fitMeasures(D74BNo1.fit, FITM),
            "Free Rel- 1" = fitMeasures(D74B1.fit, FITM),
            "Identical - 1"         = fitMeasures(D74BID1.fit, FITM),
            "No Rel - 2"   = fitMeasures(D74BNo2.fit, FITM),
            "Free Rel - 2" = fitMeasures(D74B2.fit, FITM),
            "Identical - 2"         = fitMeasures(D74BID2.fit, FITM)),3)
##                No Rel - 1 Free Rel- 1 Identical - 1 No Rel - 2 Free Rel - 2
## chisq             278.164     242.787       271.287    208.949      175.633
## df                119.000     118.000       119.000    116.000      115.000
## npar               34.000      35.000        34.000     37.000       38.000
## cfi                 0.669       0.741         0.684      0.807        0.874
## rmsea               0.130       0.116         0.127      0.101        0.082
## rmsea.ci.lower      0.110       0.095         0.107      0.078        0.056
## rmsea.ci.upper      0.150       0.136         0.147      0.122        0.105
## aic              3522.862    3489.485      3515.984   3459.647     3428.330
## bic              3603.423    3572.415      3596.545   3547.316     3518.369
## srmr                0.194       0.103         0.104      0.188        0.091
##                Identical - 2
## chisq                180.338
## df                   116.000
## npar                  37.000
## cfi                    0.866
## rmsea                  0.084
## rmsea.ci.lower         0.059
## rmsea.ci.upper         0.107
## aic                 3431.036
## bic                 3518.705
## srmr                   0.095
#cfaHB(D74B1.fit) <- does not work

1 - pchisq(180.338-175.633, 1, lower.tail = T)
## [1] 0.030075
summary(D74B2.fit, stand = T, fit = T)
## lavaan 0.6-12 ended normally after 35 iterations
## 
##   Estimator                                         ML
##   Optimization method                           NLMINB
##   Number of model parameters                        38
## 
##   Number of observations                            79
## 
## Model Test User Model:
##                                                       
##   Test statistic                               175.633
##   Degrees of freedom                               115
##   P-value (Chi-square)                           0.000
## 
## Model Test Baseline Model:
## 
##   Test statistic                               617.463
##   Degrees of freedom                               136
##   P-value                                        0.000
## 
## User Model versus Baseline Model:
## 
##   Comparative Fit Index (CFI)                    0.874
##   Tucker-Lewis Index (TLI)                       0.851
## 
## Loglikelihood and Information Criteria:
## 
##   Loglikelihood user model (H0)              -1676.165
##   Loglikelihood unrestricted model (H1)      -1588.349
##                                                       
##   Akaike (AIC)                                3428.330
##   Bayesian (BIC)                              3518.369
##   Sample-size adjusted Bayesian (BIC)         3398.553
## 
## Root Mean Square Error of Approximation:
## 
##   RMSEA                                          0.082
##   90 Percent confidence interval - lower         0.056
##   90 Percent confidence interval - upper         0.105
##   P-value RMSEA <= 0.05                          0.023
## 
## Standardized Root Mean Square Residual:
## 
##   SRMR                                           0.091
## 
## Parameter Estimates:
## 
##   Standard errors                             Standard
##   Information                                 Expected
##   Information saturated (h1) model          Structured
## 
## Latent Variables:
##                    Estimate  Std.Err  z-value  P(>|z|)   Std.lv  Std.all
##   Pia1 =~                                                               
##     PN                0.617    0.092    6.706    0.000    0.820    0.825
##     PI                0.553    0.091    6.074    0.000    0.734    0.739
##     PE                0.573    0.091    6.286    0.000    0.762    0.767
##     PA                0.492    0.091    5.427    0.000    0.654    0.658
##     PR                0.434    0.093    4.664    0.000    0.577    0.581
##     PSI               0.280    0.090    3.105    0.002    0.372    0.374
##   Pia2 =~                                                               
##     PM                0.284    0.094    3.018    0.003    0.527    0.530
##     PC                0.202    0.081    2.507    0.012    0.375    0.377
##     PD                0.232    0.085    2.719    0.007    0.430    0.432
##     PL                0.072    0.066    1.083    0.279    0.133    0.134
##     PG                0.471    0.133    3.544    0.000    0.873    0.879
##     PS                0.455    0.128    3.541    0.000    0.844    0.849
##     PT                0.131    0.071    1.831    0.067    0.243    0.244
##     PGG               0.145    0.073    1.981    0.048    0.268    0.270
##     PR                0.130    0.069    1.890    0.059    0.242    0.243
##   gPsy =~                                                               
##     SB                0.839    0.099    8.430    0.000    0.839    0.844
##     CL                0.693    0.105    6.576    0.000    0.693    0.697
##     CN                0.765    0.102    7.473    0.000    0.765    0.770
##   gPia =~                                                               
##     Pia1              0.876    0.233    3.758    0.000    0.659    0.659
##     Pia2              1.562    0.604    2.585    0.010    0.842    0.842
## 
## Covariances:
##                    Estimate  Std.Err  z-value  P(>|z|)   Std.lv  Std.all
##   gPsy ~~                                                               
##     gPia              0.788    0.096    8.178    0.000    0.788    0.788
## 
## Variances:
##                    Estimate  Std.Err  z-value  P(>|z|)   Std.lv  Std.all
##    .PN                0.315    0.073    4.284    0.000    0.315    0.319
##    .PI                0.448    0.087    5.164    0.000    0.448    0.454
##    .PE                0.407    0.082    4.950    0.000    0.407    0.412
##    .PA                0.560    0.100    5.583    0.000    0.560    0.567
##    .PR                0.441    0.081    5.466    0.000    0.441    0.447
##    .PSI               0.849    0.138    6.137    0.000    0.849    0.860
##    .PM                0.710    0.119    5.968    0.000    0.710    0.719
##    .PC                0.847    0.138    6.152    0.000    0.847    0.858
##    .PD                0.803    0.132    6.100    0.000    0.803    0.813
##    .PL                0.970    0.155    6.270    0.000    0.970    0.982
##    .PG                0.225    0.068    3.319    0.001    0.225    0.228
##    .PS                0.275    0.070    3.929    0.000    0.275    0.279
##    .PT                0.929    0.149    6.234    0.000    0.929    0.940
##    .PGG               0.915    0.147    6.222    0.000    0.915    0.927
##    .SB                0.284    0.085    3.331    0.001    0.284    0.287
##    .CL                0.507    0.098    5.162    0.000    0.507    0.514
##    .CN                0.402    0.090    4.481    0.000    0.402    0.407
##    .Pia1              1.000                               0.566    0.566
##    .Pia2              1.000                               0.291    0.291
##     gPsy              1.000                               1.000    1.000
##     gPia              1.000                               1.000    1.000
resid(D74B2.fit, "cor"); sum(abs(resid(D74B2.fit, "cor")$cov) > CRITR(79)); sum(abs(resid(D74B2.fit, "cor")$cov) > CRITR(79, NP(79)))
## $type
## [1] "cor.bollen"
## 
## $cov
##     PN     PI     PE     PA     PR     PSI    PM     PC     PD     PL    
## PN   0.000                                                               
## PI  -0.020  0.000                                                        
## PE   0.037 -0.017  0.000                                                 
## PA   0.017  0.094 -0.044  0.000                                          
## PR  -0.011 -0.019  0.031 -0.101  0.000                                   
## PSI -0.089  0.063 -0.107  0.064  0.152  0.000                            
## PM  -0.013 -0.027 -0.056 -0.013  0.050  0.080  0.000                     
## PC  -0.073 -0.155 -0.111 -0.138 -0.013  0.092  0.210  0.000              
## PD   0.182  0.063  0.146 -0.038  0.135  0.010  0.051 -0.043  0.000       
## PL  -0.001 -0.065 -0.117  0.021  0.104  0.142  0.049  0.099  0.042  0.000
## PG   0.018 -0.010  0.046  0.009 -0.027  0.017 -0.066 -0.052  0.010 -0.058
## PS  -0.019 -0.048 -0.011 -0.010 -0.050  0.044 -0.010 -0.021 -0.147 -0.044
## PT   0.128 -0.090 -0.054 -0.039  0.032 -0.031 -0.069  0.128  0.054  0.187
## PGG  0.206  0.089 -0.025 -0.038  0.017 -0.066 -0.033 -0.122  0.083  0.144
## SB  -0.052 -0.014 -0.046 -0.078  0.149  0.096  0.123  0.169  0.268  0.175
## CL   0.011 -0.077 -0.007  0.082  0.147  0.105  0.265  0.185  0.230  0.098
## CN   0.000 -0.075 -0.046  0.017  0.104  0.100  0.179  0.147  0.219  0.041
##     PG     PS     PT     PGG    SB     CL     CN    
## PN                                                  
## PI                                                  
## PE                                                  
## PA                                                  
## PR                                                  
## PSI                                                 
## PM                                                  
## PC                                                  
## PD                                                  
## PL                                                  
## PG   0.000                                          
## PS   0.054  0.000                                   
## PT  -0.045 -0.027  0.000                            
## PGG -0.037  0.021  0.214  0.000                     
## SB  -0.042 -0.056  0.253 -0.001  0.000              
## CL  -0.096 -0.083  0.067  0.025 -0.009  0.000       
## CN  -0.059 -0.094 -0.055  0.002  0.000  0.013  0.000
## [1] 8
## [1] 0

With a critical r of 0.22, there were eight violations of local independence; this reduced to zero with scaling.

semPaths(D74B2.fit, "std", title = F, residuals = F, bifactor = c("gPsy", "gPia"), pastel = T, mar = c(2, 1, 3, 1), posCol = c("skyblue4"), layout = "tree2", exoCov = T)

DeVries, 1974 (3); n = 126

fa.parallel(D74C.cor, n.obs =  nD74C)
## Warning in fa.stats(r = r, f = f, phi = phi, n.obs = n.obs, np.obs = np.obs, :
## The estimated weights for the factor scores are probably incorrect. Try a
## different factor score estimation method.

## Warning in fa.stats(r = r, f = f, phi = phi, n.obs = n.obs, np.obs = np.obs, :
## The estimated weights for the factor scores are probably incorrect. Try a
## different factor score estimation method.

## Parallel analysis suggests that the number of factors =  3  and the number of components =  2
fa74C1 <- fa(D74C.cor, n.obs = nD74C, nfactors = 1)

fa74C2 <- fa(D74C.cor, n.obs = nD74C, nfactors = 2)
fa74C3 <- fa(D74C.cor, n.obs = nD74C, nfactors = 3)

print(fa74C1$loadings)
## 
## Loadings:
##     MR1  
## SB  0.756
## PM  0.565
## PC  0.462
## PD  0.643
## PL  0.322
## PN  0.698
## PI  0.669
## PE  0.631
## PA  0.499
## PR  0.786
## PG  0.576
## PS  0.537
## PSI 0.291
## PT  0.284
## PGG 0.464
## POS      
## 
##                  MR1
## SS loadings    4.828
## Proportion Var 0.302
print(fa74C2$loadings, cutoff = 0.30)
## 
## Loadings:
##     MR2    MR1   
## SB   0.846       
## PM   0.572       
## PC   0.666       
## PD   0.485       
## PL   0.577       
## PN          0.795
## PI          0.797
## PE          0.698
## PA          0.724
## PR   0.409  0.489
## PG   0.361       
## PS   0.361       
## PSI              
## PT   0.423       
## PGG  0.445       
## POS              
## 
##                  MR2   MR1
## SS loadings    2.914 2.837
## Proportion Var 0.182 0.177
## Cumulative Var 0.182 0.359
print(fa74C3$loadings, cutoff = 0.30)
## 
## Loadings:
##     MR1    MR2    MR3   
## SB          0.848       
## PM          0.457       
## PC          0.623       
## PD          0.439       
## PL          0.635       
## PN   0.804              
## PI   0.808              
## PE   0.672              
## PA   0.743              
## PR   0.448  0.331       
## PG                 0.830
## PS                 0.842
## PSI                     
## PT          0.408       
## PGG         0.495       
## POS                     
## 
##                  MR1   MR2   MR3
## SS loadings    2.647 2.462 1.536
## Proportion Var 0.165 0.154 0.096
## Cumulative Var 0.165 0.319 0.415
EFATOG74A <- c(0.722, 0.591, 0.395, 0.591, 0.331, 0.687, 0.548, 0.608, 0.523, 0.697, 0.688, 0.657, 0.506, 0.349, 0.228)
EFATOG74B <- c(0.716, 0.534, 0.350, 0.532, 0.184, 0.681, 0.563, 0.595, 0.514, 0.767, 0.687, 0.636, 0.387, 0.262, 0.274)
EFATOG74C <- c(0.756, 0.565, 0.462, 0.643, 0.322, 0.698, 0.669, 0.631, 0.499, 0.786, 0.576, 0.537, 0.291, 0.284, 0.464)

cor(EFATOG74A, EFATOG74B, method = "pearson"); cor(EFATOG74A, EFATOG74B, method = "spearman"); CONGO(EFATOG74A, EFATOG74B)
## [1] 0.9540999
## [1] 0.9740844
## [1] 0.9940475
cor(EFATOG74A, EFATOG74C, method = "pearson"); cor(EFATOG74A, EFATOG74C, method = "spearman"); CONGO(EFATOG74A, EFATOG74C)
## [1] 0.7580764
## [1] 0.8275249
## [1] 0.9826617
cor(EFATOG74C, EFATOG74B, method = "pearson"); cor(EFATOG74C, EFATOG74B, method = "spearman"); CONGO(EFATOG74C, EFATOG74B)
## [1] 0.8710679
## [1] 0.8642857
## [1] 0.9871956

Hathaway (1972) - Kindergarten; n = 56

fa.parallel(H72K.cor, n.obs = nH72)

## Parallel analysis suggests that the number of factors =  1  and the number of components =  1
fa72K1 <- fa(H72K.cor, n.obs = nH72, nfactors = 1)
fa72K2 <- fa(H72K.cor, n.obs = nH72, nfactors = 2)
fa72K3 <- fa(H72K.cor, n.obs = nH72, nfactors = 3)
faPsy72K1 <- fa(H72KPsy.cor, n.obs = nH72, nfactors = 1)
faPsy72K2 <- fa(H72KPsy.cor, n.obs = nH72, nfactors = 2)
faPia72K1 <- fa(H72KPia.cor, n.obs = nH72, nfactors = 1)
#faPia72K2 <- fa(H72KPia.cor, n.obs = nH72, nfactors = 2) #produces an ultra-Heywood

print(fa72K1$loadings)
## 
## Loadings:
##        MR1  
## WINF   0.457
## WCOM   0.174
## WARI   0.168
## WSIM   0.569
## WVOC   0.492
## WPICCO 0.301
## WPICAR 0.553
## WBD    0.649
## WMAZ   0.506
## WOA    0.377
## LTOV   0.675
## LTCO   0.578
## LTP    0.527
## LOMD   0.381
## PS     0.559
## PT     0.572
## PN     0.551
## PD     0.441
## PCQ    0.349
## PCS    0.203
## PI     0.421
## PM     0.255
## PSE    0.587
## 
##                  MR1
## SS loadings    5.148
## Proportion Var 0.224
print(fa72K2$loadings, cutoff = 0.25)
## 
## Loadings:
##        MR1    MR2   
## WINF    0.279  0.292
## WCOM           0.274
## WARI                
## WSIM    0.276  0.469
## WVOC    0.449       
## WPICCO  0.266       
## WPICAR  0.440       
## WBD     0.676       
## WMAZ    0.524       
## WOA     0.535       
## LTOV    0.797       
## LTCO    0.569       
## LTP     0.689       
## LOMD    0.297       
## PS      0.332  0.373
## PT      0.324  0.406
## PN      0.392  0.279
## PD             0.394
## PCQ            0.666
## PCS            0.624
## PI      0.374       
## PM             0.273
## PSE     0.279  0.495
## 
##                  MR1   MR2
## SS loadings    3.850 2.274
## Proportion Var 0.167 0.099
## Cumulative Var 0.167 0.266
print(fa72K3$loadings, cutoff = 0.25)
## 
## Loadings:
##        MR1    MR2    MR3   
## WINF    0.271  0.310       
## WCOM                  0.264
## WARI                       
## WSIM           0.448       
## WVOC    0.397              
## WPICCO  0.271              
## WPICAR  0.507  0.264       
## WBD     0.604              
## WMAZ    0.370         0.372
## WOA     0.677              
## LTOV    0.749              
## LTCO    0.497              
## LTP     0.569         0.276
## LOMD    0.322              
## PS      0.259  0.365       
## PT      0.429  0.490       
## PN                    0.385
## PD             0.374       
## PCQ            0.672       
## PCS    -0.268  0.600       
## PI                    0.743
## PM             0.348 -0.290
## PSE            0.456  0.415
## 
##                  MR1   MR2   MR3
## SS loadings    3.271 2.296 1.580
## Proportion Var 0.142 0.100 0.069
## Cumulative Var 0.142 0.242 0.311
print(faPsy72K1$loadings)
## 
## Loadings:
##        MR1  
## WINF   0.419
## WCOM   0.110
## WARI   0.169
## WSIM   0.505
## WVOC   0.511
## WPICCO 0.313
## WPICAR 0.580
## WBD    0.706
## WMAZ   0.498
## WOA    0.458
## LTOV   0.750
## LTCO   0.602
## LTP    0.570
## LOMD   0.364
## 
##                  MR1
## SS loadings    3.504
## Proportion Var 0.250
print(faPsy72K2$loadings, cutoff = 0.2)
## 
## Loadings:
##        MR1    MR2   
## WINF           0.577
## WCOM           0.326
## WARI           0.225
## WSIM           0.686
## WVOC           0.505
## WPICCO         0.388
## WPICAR         0.614
## WBD     0.480  0.325
## WMAZ    0.455       
## WOA     0.440       
## LTOV    0.772       
## LTCO    0.541       
## LTP     0.797       
## LOMD    0.463       
## 
##                  MR1   MR2
## SS loadings    2.427 1.916
## Proportion Var 0.173 0.137
## Cumulative Var 0.173 0.310
print(faPia72K1$loadings)
## 
## Loadings:
##     MR1  
## PS  0.580
## PT  0.512
## PN  0.519
## PD  0.500
## PCQ 0.569
## PCS 0.421
## PI  0.382
## PM  0.321
## PSE 0.690
## 
##                  MR1
## SS loadings    2.343
## Proportion Var 0.260
#print(faPia72K2$loadings, cutoff = 0.2)
EFATOG72HK <- c(.457, .174, .168, .569, .492, .301, .553, .649, .506, .377, .675, .578, .527, .381, .559, .572, .551, .441, .349, .203, .421, .255, .587)
EFATOG72HKPsy <- c(.457, .174, .168, .569, .492, .301, .553, .649, .506, .377, .675, .578, .527, .381); EFATOG72HKPia <- c(.559, .572, .551, .441, .349, .203, .421, .255, .587)
EFASEP72HK <- c(.419, .110, .169, .505, .511, .313, .580, .706, .498, .458, .750, .602, .570, .364, .580, .512, .519, .500, .569, .421, .382, .321, .690)
EFASEP72HKPsy <- c(.419, .110, .169, .505, .511, .313, .580, .706, .498, .458, .750, .602, .570, .364); EFASEP72HKPia <- c(.580, .512, .519, .500, .569, .421, .382, .321, .690)

cor(EFATOG72HK, EFASEP72HK, method = "pearson"); cor(EFATOG72HK, EFASEP72HK, method = "spearman"); CONGO(EFATOG72HK, EFASEP72HK)
## [1] 0.8769877
## [1] 0.8722511
## [1] 0.9883622
cor(EFATOG72HKPsy, EFASEP72HKPsy, method = "pearson"); cor(EFATOG72HKPsy, EFASEP72HKPsy, method = "spearman"); CONGO(EFATOG72HKPsy, EFASEP72HKPsy)
## [1] 0.9735484
## [1] 0.9472527
## [1] 0.9963483
cor(EFATOG72HKPia, EFASEP72HKPia, method = "pearson"); cor(EFATOG72HKPia, EFASEP72HKPia, method = "spearman"); CONGO(EFATOG72HKPia, EFASEP72HKPia)
## [1] 0.69348
## [1] 0.7
## [1] 0.9771153
H72K1FNo <- '
gPsy =~ WINF + WCOM + WARI + WSIM + WVOC + WPICCO + WPICAR + WBD + WMAZ + WOA + LTOV + LTCO + LTP + LOMD 
gPia =~ PS + PT + PN + PD + PCQ + PCS + PI + PM + PSE

gPsy ~~ 0*gPia'

H72K1F <- '
gPsy =~ WINF + WCOM + WARI + WSIM + WVOC + WPICCO + WPICAR + WBD + WMAZ + WOA + LTOV + LTCO + LTP + LOMD 
gPia =~ PS + PT + PN + PD + PCQ + PCS + PI + PM + PSE

gPsy ~~ gPia'

H72K1FID <- '
gPsy =~ WINF + WCOM + WARI + WSIM + WVOC + WPICCO + WPICAR + WBD + WMAZ + WOA + LTOV + LTCO + LTP + LOMD 
gPia =~ PS + PT + PN + PD + PCQ + PCS + PI + PM + PSE

gPsy ~~ 1*gPia'

H72K2FNo <- '
VER =~ WINF + WCOM + WARI + WSIM + WVOC + WPICCO + WPICAR
PERF =~ WBD + WMAZ + WOA + LTOV + LTCO + LTP + LOMD 

gPsy =~ VER + PERF
gPia =~ PS + PT + PN + PD + PCQ + PCS + PI + PM + PSE

gPsy ~~ 0*gPia'

H72K2F <- '
VER =~ WINF + WCOM + WARI + WSIM + WVOC + WPICCO + WPICAR
PERF =~ WBD + WMAZ + WOA + LTOV + LTCO + LTP + LOMD 

gPsy =~ VER + PERF
gPia =~ PS + PT + PN + PD + PCQ + PCS + PI + PM + PSE

gPsy ~~ gPia'

H72K2FID <- '
VER =~ WINF + WCOM + WARI + WSIM + WVOC + WPICCO + WPICAR
PERF =~ WBD + WMAZ + WOA + LTOV + LTCO + LTP + LOMD 

gPsy =~ VER + PERF
gPia =~ PS + PT + PN + PD + PCQ + PCS + PI + PM + PSE

gPsy ~~ 1*gPia'

H72KNo1.fit <- cfa(H72K1FNo, sample.cov = H72K.cor, sample.nobs = nH72, std.lv = T)
H72K1.fit <- cfa(H72K1F, sample.cov = H72K.cor, sample.nobs = nH72, std.lv = T)
H72KID1.fit <- cfa(H72K1FID, sample.cov = H72K.cor, sample.nobs = nH72, std.lv = T)

H72KNo2.fit <- cfa(H72K2FNo, sample.cov = H72K.cor, sample.nobs = nH72, std.lv = T); "\n"
## Warning in lav_model_vcov(lavmodel = lavmodel, lavsamplestats = lavsamplestats, : lavaan WARNING:
##     Could not compute standard errors! The information matrix could
##     not be inverted. This may be a symptom that the model is not
##     identified.
## [1] "\n"
H72K2.fit <- cfa(H72K2F, sample.cov = H72K.cor, sample.nobs = nH72, std.lv = T)
H72KID2.fit <- cfa(H72K2FID, sample.cov = H72K.cor, sample.nobs = nH72, std.lv = T)

round(cbind("No Rel - 1"   = fitMeasures(H72KNo1.fit, FITM),
            "Free Rel - 1" = fitMeasures(H72K1.fit, FITM),
            "Identical - 1"         = fitMeasures(H72KID1.fit, FITM),
            "No Rel - 2"   = fitMeasures(H72KNo2.fit, FITM),
            "Free Rel - 2" = fitMeasures(H72K2.fit, FITM),
            "Identical - 2"         = fitMeasures(H72KID2.fit, FITM)), 3)
##                No Rel - 1 Free Rel - 1 Identical - 1 No Rel - 2 Free Rel - 2
## chisq             318.306      294.545       307.481    297.623      271.312
## df                230.000      229.000       230.000    228.000      227.000
## npar               46.000       47.000        46.000     48.000       49.000
## cfi                 0.683        0.765         0.722      0.750        0.841
## rmsea               0.083        0.071         0.078      0.074        0.059
## rmsea.ci.lower      0.059        0.045         0.053      0.048        0.024
## rmsea.ci.upper      0.104        0.094         0.099      0.096        0.084
## aic              3510.852     3489.091      3500.027   3494.169     3469.858
## bic              3604.018     3584.282      3593.193   3591.386     3569.100
## srmr                0.165        0.103         0.104      0.162        0.096
##                Identical - 2
## chisq                272.179
## df                   228.000
## npar                  48.000
## cfi                    0.841
## rmsea                  0.059
## rmsea.ci.lower         0.023
## rmsea.ci.upper         0.084
## aic                 3468.725
## bic                 3565.942
## srmr                   0.096
#cfaHB(H72K1.fit) <- does not work

1 - pchisq(294.545 - 271.312, 2, lower.tail = T)
## [1] 9.016088e-06
1 - pchisq(272.179 - 271.312, 1, lower.tail = T)
## [1] 0.3517872
summary(H72K2.fit, stand = T, fit = T)
## lavaan 0.6-12 ended normally after 30 iterations
## 
##   Estimator                                         ML
##   Optimization method                           NLMINB
##   Number of model parameters                        49
## 
##   Number of observations                            56
## 
## Model Test User Model:
##                                                       
##   Test statistic                               271.312
##   Degrees of freedom                               227
##   P-value (Chi-square)                           0.023
## 
## Model Test Baseline Model:
## 
##   Test statistic                               531.432
##   Degrees of freedom                               253
##   P-value                                        0.000
## 
## User Model versus Baseline Model:
## 
##   Comparative Fit Index (CFI)                    0.841
##   Tucker-Lewis Index (TLI)                       0.823
## 
## Loglikelihood and Information Criteria:
## 
##   Loglikelihood user model (H0)              -1685.929
##   Loglikelihood unrestricted model (H1)      -1550.273
##                                                       
##   Akaike (AIC)                                3469.858
##   Bayesian (BIC)                              3569.100
##   Sample-size adjusted Bayesian (BIC)         3415.095
## 
## Root Mean Square Error of Approximation:
## 
##   RMSEA                                          0.059
##   90 Percent confidence interval - lower         0.024
##   90 Percent confidence interval - upper         0.084
##   P-value RMSEA <= 0.05                          0.294
## 
## Standardized Root Mean Square Residual:
## 
##   SRMR                                           0.096
## 
## Parameter Estimates:
## 
##   Standard errors                             Standard
##   Information                                 Expected
##   Information saturated (h1) model          Structured
## 
## Latent Variables:
##                    Estimate  Std.Err  z-value  P(>|z|)   Std.lv  Std.all
##   VER =~                                                                
##     WINF              0.308    0.116    2.662    0.008    0.539    0.544
##     WCOM              0.132    0.092    1.439    0.150    0.230    0.233
##     WARI              0.130    0.091    1.419    0.156    0.227    0.229
##     WSIM              0.380    0.130    2.919    0.004    0.665    0.671
##     WVOC              0.317    0.117    2.702    0.007    0.555    0.560
##     WPICCO            0.229    0.103    2.221    0.026    0.400    0.404
##     WPICAR            0.372    0.128    2.897    0.004    0.650    0.656
##   PERF =~                                                               
##     WBD               0.467    0.109    4.266    0.000    0.689    0.695
##     WMAZ              0.360    0.104    3.449    0.001    0.532    0.537
##     WOA               0.300    0.102    2.930    0.003    0.442    0.446
##     LTOV              0.532    0.114    4.674    0.000    0.786    0.793
##     LTCO              0.425    0.107    3.962    0.000    0.627    0.633
##     LTP               0.446    0.108    4.118    0.000    0.658    0.664
##     LOMD              0.301    0.102    2.946    0.003    0.445    0.449
##   gPsy =~                                                               
##     VER               1.436    0.611    2.349    0.019    0.821    0.821
##     PERF              1.086    0.353    3.074    0.002    0.736    0.736
##   gPia =~                                                               
##     PS                0.599    0.132    4.536    0.000    0.599    0.605
##     PT                0.543    0.135    4.030    0.000    0.543    0.548
##     PN                0.555    0.134    4.135    0.000    0.555    0.560
##     PD                0.477    0.137    3.475    0.001    0.477    0.482
##     PCQ               0.479    0.137    3.487    0.000    0.479    0.483
##     PCS               0.328    0.142    2.304    0.021    0.328    0.331
##     PI                0.455    0.138    3.290    0.001    0.455    0.459
##     PM                0.291    0.143    2.029    0.042    0.291    0.293
##     PSE               0.675    0.128    5.256    0.000    0.675    0.681
## 
## Covariances:
##                    Estimate  Std.Err  z-value  P(>|z|)   Std.lv  Std.all
##   gPsy ~~                                                               
##     gPia              0.892    0.109    8.155    0.000    0.892    0.892
## 
## Variances:
##                    Estimate  Std.Err  z-value  P(>|z|)   Std.lv  Std.all
##    .WINF              0.692    0.148    4.677    0.000    0.692    0.705
##    .WCOM              0.929    0.178    5.209    0.000    0.929    0.946
##    .WARI              0.931    0.179    5.212    0.000    0.931    0.948
##    .WSIM              0.540    0.132    4.078    0.000    0.540    0.550
##    .WVOC              0.675    0.146    4.623    0.000    0.675    0.687
##    .WPICCO            0.822    0.164    5.009    0.000    0.822    0.837
##    .WPICAR            0.559    0.134    4.172    0.000    0.559    0.570
##    .WBD               0.508    0.116    4.393    0.000    0.508    0.517
##    .WMAZ              0.699    0.142    4.908    0.000    0.699    0.712
##    .WOA               0.786    0.156    5.057    0.000    0.786    0.801
##    .LTOV              0.364    0.099    3.662    0.000    0.364    0.371
##    .LTCO              0.589    0.127    4.654    0.000    0.589    0.600
##    .LTP               0.549    0.121    4.536    0.000    0.549    0.559
##    .LOMD              0.784    0.155    5.053    0.000    0.784    0.798
##    .PS                0.623    0.135    4.599    0.000    0.623    0.634
##    .PT                0.687    0.144    4.778    0.000    0.687    0.700
##    .PN                0.674    0.142    4.744    0.000    0.674    0.687
##    .PD                0.754    0.153    4.930    0.000    0.754    0.768
##    .PCQ               0.753    0.153    4.927    0.000    0.753    0.766
##    .PCS               0.875    0.170    5.145    0.000    0.875    0.891
##    .PI                0.775    0.156    4.973    0.000    0.775    0.789
##    .PM                0.898    0.173    5.180    0.000    0.898    0.914
##    .PSE               0.527    0.124    4.249    0.000    0.527    0.537
##    .VER               1.000                               0.327    0.327
##    .PERF              1.000                               0.459    0.459
##     gPsy              1.000                               1.000    1.000
##     gPia              1.000                               1.000    1.000
resid(H72K2.fit, "cor"); sum(abs(resid(H72K2.fit, "cor")$cov > CRITR(56))); sum(abs(resid(H72K2.fit, "cor")$cov > CRITR(56, NP(56))))
## $type
## [1] "cor.bollen"
## 
## $cov
##        WINF   WCOM   WARI   WSIM   WVOC   WPICCO WPICAR WBD    WMAZ   WOA   
## WINF    0.000                                                               
## WCOM   -0.016  0.000                                                        
## WARI    0.076 -0.133  0.000                                                 
## WSIM   -0.015  0.044  0.126  0.000                                          
## WVOC    0.056  0.140 -0.158 -0.095  0.000                                   
## WPICCO -0.019 -0.044 -0.082 -0.001  0.074  0.000                            
## WPICAR -0.037 -0.083  0.020  0.000  0.033  0.065  0.000                     
## WBD     0.092  0.042 -0.006  0.128  0.045 -0.059  0.125  0.000              
## WMAZ   -0.056  0.035 -0.024  0.043  0.059  0.009 -0.033 -0.023  0.000       
## WOA    -0.096 -0.283  0.038 -0.041  0.129  0.141  0.173 -0.070  0.001  0.000
## LTOV    0.060 -0.081 -0.060 -0.081  0.132 -0.093 -0.004 -0.001  0.014  0.076
## LTCO    0.012 -0.059  0.033  0.014 -0.034 -0.084  0.119  0.090 -0.080 -0.052
## LTP    -0.218 -0.153 -0.072 -0.129  0.026  0.038 -0.063 -0.062  0.044  0.004
## LOMD    0.013 -0.073  0.018 -0.082 -0.102 -0.109 -0.078 -0.042 -0.031  0.020
## PS      0.080  0.007  0.099 -0.047 -0.008 -0.079  0.090  0.144  0.067 -0.067
## PT      0.092 -0.013  0.028  0.071  0.066 -0.032  0.077  0.160 -0.033  0.160
## PN      0.027 -0.065 -0.114  0.085  0.121  0.055 -0.139  0.095  0.123 -0.034
## PD     -0.042  0.288 -0.191  0.024  0.083  0.118 -0.061 -0.120  0.180  0.009
## PCQ    -0.102 -0.002  0.089  0.083 -0.118 -0.243 -0.012 -0.150 -0.180 -0.122
## PCS    -0.072  0.064  0.045  0.078 -0.115 -0.058 -0.199 -0.181 -0.016 -0.267
## PI      0.027  0.112 -0.157  0.015  0.002 -0.106 -0.140  0.061  0.268 -0.114
## PM      0.143 -0.050  0.081 -0.214 -0.100  0.083  0.089 -0.064 -0.013  0.044
## PSE    -0.031  0.004  0.016  0.126 -0.159 -0.041 -0.017 -0.070 -0.020 -0.099
##        LTOV   LTCO   LTP    LOMD   PS     PT     PN     PD     PCQ    PCS   
## WINF                                                                        
## WCOM                                                                        
## WARI                                                                        
## WSIM                                                                        
## WVOC                                                                        
## WPICCO                                                                      
## WPICAR                                                                      
## WBD                                                                         
## WMAZ                                                                        
## WOA                                                                         
## LTOV    0.000                                                               
## LTCO   -0.082  0.000                                                        
## LTP     0.013  0.110  0.000                                                 
## LOMD    0.084 -0.054 -0.038  0.000                                          
## PS     -0.045 -0.081 -0.033  0.082  0.000                                   
## PT      0.135  0.113 -0.099  0.189 -0.031  0.000                            
## PN      0.069  0.138  0.106 -0.075  0.001 -0.117  0.000                     
## PD     -0.141  0.000  0.060  0.008 -0.041  0.046  0.040  0.000              
## PCQ    -0.132 -0.081 -0.101  0.108  0.028  0.055 -0.041  0.097  0.000       
## PCS    -0.182 -0.037 -0.244  0.093 -0.120  0.099  0.105  0.011  0.240  0.000
## PI      0.081  0.039  0.060 -0.075  0.092 -0.201  0.063 -0.071 -0.222 -0.142
## PM      0.037 -0.022 -0.088  0.004  0.103  0.209 -0.134  0.109  0.078  0.053
## PSE    -0.054  0.077  0.043 -0.001 -0.032 -0.083 -0.031 -0.058  0.071  0.075
##        PI     PM     PSE   
## WINF                       
## WCOM                       
## WARI                       
## WSIM                       
## WVOC                       
## WPICCO                     
## WPICAR                     
## WBD                        
## WMAZ                       
## WOA                        
## LTOV                       
## LTCO                       
## LTP                        
## LOMD                       
## PS                         
## PT                         
## PN                         
## PD                         
## PCQ                        
## PCS                        
## PI      0.000              
## PM     -0.235  0.000       
## PSE     0.248 -0.090  0.000
## [1] 4
## [1] 0

There were four local independence violations but this reduced to zero with scaling.

semPaths(H72K2.fit, "std", title = F, residuals = F, pastel = T, mar = c(2, 1, 3, 1), posCol = c("skyblue4"), layout = "circle", exoCov = T)

Hathaway (1972) - First Grade; n = 56

fa.parallel(H72I.cor, n.obs = nH72)
## Warning in cor.smooth(R): Matrix was not positive definite, smoothing was done
## In smc, smcs < 0 were set to .0
## Warning in cor.smooth(R): Matrix was not positive definite, smoothing was done
## In smc, smcs < 0 were set to .0
## Warning in cor.smooth(R): Matrix was not positive definite, smoothing was done
## In smc, smcs < 0 were set to .0
## Warning in cor.smooth(r): Matrix was not positive definite, smoothing was done
## Warning in fa.stats(r = r, f = f, phi = phi, n.obs = n.obs, np.obs = np.obs, :
## The estimated weights for the factor scores are probably incorrect. Try a
## different factor score estimation method.

## Parallel analysis suggests that the number of factors =  3  and the number of components =  1
fa.parallel(H72IPsy.cor, n.obs = nH72)
## Warning in cor.smooth(R): Matrix was not positive definite, smoothing was done
## Warning in cor.smooth(R): Matrix was not positive definite, smoothing was done

## Warning in cor.smooth(R): Matrix was not positive definite, smoothing was done
## Warning in cor.smooth(r): Matrix was not positive definite, smoothing was done
## Warning in fa.stats(r = r, f = f, phi = phi, n.obs = n.obs, np.obs = np.obs, :
## The estimated weights for the factor scores are probably incorrect. Try a
## different factor score estimation method.

## Parallel analysis suggests that the number of factors =  2  and the number of components =  2
fa.parallel(H72IPia.cor, n.obs = nH72)
## Warning in fa.stats(r = r, f = f, phi = phi, n.obs = n.obs, np.obs = np.obs, :
## The estimated weights for the factor scores are probably incorrect. Try a
## different factor score estimation method.
## Warning in fac(r = r, nfactors = nfactors, n.obs = n.obs, rotate = rotate, : An
## ultra-Heywood case was detected. Examine the results carefully
## Warning in fa.stats(r = r, f = f, phi = phi, n.obs = n.obs, np.obs = np.obs, :
## The estimated weights for the factor scores are probably incorrect. Try a
## different factor score estimation method.
## Warning in fac(r = r, nfactors = nfactors, n.obs = n.obs, rotate = rotate, : An
## ultra-Heywood case was detected. Examine the results carefully
## Warning in fa.stats(r = r, f = f, phi = phi, n.obs = n.obs, np.obs = np.obs, :
## The estimated weights for the factor scores are probably incorrect. Try a
## different factor score estimation method.

## Parallel analysis suggests that the number of factors =  1  and the number of components =  1
fa72K1 <- fa(H72I.cor, n.obs = nH72, nfactors = 1)
## Warning in cor.smooth(R): Matrix was not positive definite, smoothing was done
## In smc, smcs < 0 were set to .0
## Warning in cor.smooth(R): Matrix was not positive definite, smoothing was done
## In smc, smcs < 0 were set to .0
## Warning in cor.smooth(R): Matrix was not positive definite, smoothing was done
## In smc, smcs < 0 were set to .0
## Warning in cor.smooth(r): Matrix was not positive definite, smoothing was done
## Warning in fa.stats(r = r, f = f, phi = phi, n.obs = n.obs, np.obs = np.obs, :
## The estimated weights for the factor scores are probably incorrect. Try a
## different factor score estimation method.
fa72K2 <- fa(H72I.cor, n.obs = nH72, nfactors = 2)
## Warning in cor.smooth(R): Matrix was not positive definite, smoothing was done
## In smc, smcs < 0 were set to .0
## Warning in cor.smooth(R): Matrix was not positive definite, smoothing was done
## In smc, smcs < 0 were set to .0
## Warning in cor.smooth(R): Matrix was not positive definite, smoothing was done
## In smc, smcs < 0 were set to .0
## Warning in cor.smooth(r): Matrix was not positive definite, smoothing was done

## Warning in cor.smooth(r): The estimated weights for the factor scores are
## probably incorrect. Try a different factor score estimation method.
fa72K3 <- fa(H72I.cor, n.obs = nH72, nfactors = 3)
## Warning in cor.smooth(R): Matrix was not positive definite, smoothing was done
## In smc, smcs < 0 were set to .0
## Warning in cor.smooth(R): Matrix was not positive definite, smoothing was done
## In smc, smcs < 0 were set to .0
## Warning in cor.smooth(R): Matrix was not positive definite, smoothing was done
## In smc, smcs < 0 were set to .0
## Warning in cor.smooth(r): Matrix was not positive definite, smoothing was done

## Warning in cor.smooth(r): The estimated weights for the factor scores are
## probably incorrect. Try a different factor score estimation method.
faPsy72K1 <- fa(H72IPsy.cor, n.obs = nH72, nfactors = 1)
## Warning in cor.smooth(R): Matrix was not positive definite, smoothing was done
## Warning in cor.smooth(R): Matrix was not positive definite, smoothing was done

## Warning in cor.smooth(R): Matrix was not positive definite, smoothing was done
## Warning in cor.smooth(r): Matrix was not positive definite, smoothing was done
## Warning in fa.stats(r = r, f = f, phi = phi, n.obs = n.obs, np.obs = np.obs, :
## The estimated weights for the factor scores are probably incorrect. Try a
## different factor score estimation method.
faPsy72K2 <- fa(H72IPsy.cor, n.obs = nH72, nfactors = 2)
## Warning in cor.smooth(R): Matrix was not positive definite, smoothing was done
## Warning in cor.smooth(R): Matrix was not positive definite, smoothing was done

## Warning in cor.smooth(R): Matrix was not positive definite, smoothing was done
## Warning in cor.smooth(r): Matrix was not positive definite, smoothing was done
## Warning in fa.stats(r = r, f = f, phi = phi, n.obs = n.obs, np.obs = np.obs, :
## The estimated weights for the factor scores are probably incorrect. Try a
## different factor score estimation method.
faPia72K1 <- fa(H72IPia.cor, n.obs = nH72, nfactors = 1)
#faPia72K2 <- fa(H72IPia.cor, n.obs = nH72, nfactors = 2) #produces an ultra-Heywood

print(fa72K1$loadings)
## 
## Loadings:
##        MR1  
## WINF   0.727
## WCOM   0.409
## WARI   0.553
## WSIM   0.487
## WVOC   0.355
## WPICCO 0.354
## WPICAR 0.623
## WBD    0.493
## WMAZ   0.445
## WOA    0.116
## LTOV   0.408
## LTCO   0.567
## LTP    0.411
## LOMD   0.346
## PS     0.626
## PT     0.609
## PN     0.514
## PD     0.474
## PCQ    0.513
## PCS    0.437
## PI     0.525
## PM     0.622
## PSE    0.647
## CATRV  0.770
## CATCOM 0.578
## CATAR  0.780
## CATAF  0.579
## CATME  0.775
## CATSP  0.645
## 
##                  MR1
## SS loadings    8.784
## Proportion Var 0.303
print(fa72K2$loadings, cutoff = 0.2)
## 
## Loadings:
##        MR1    MR2   
## WINF    0.756       
## WCOM    0.351       
## WARI    0.555       
## WSIM    0.496       
## WVOC    0.328       
## WPICCO  0.428       
## WPICAR  0.618       
## WBD     0.514       
## WMAZ    0.437       
## WOA    -0.259  0.923
## LTOV    0.460       
## LTCO    0.542       
## LTP     0.436       
## LOMD    0.226  0.303
## PS      0.706       
## PT      0.699       
## PN      0.562       
## PD      0.496       
## PCQ     0.603       
## PCS     0.512       
## PI      0.509       
## PM      0.669       
## PSE     0.703       
## CATRV   0.574  0.526
## CATCOM  0.393  0.482
## CATAR   0.597  0.487
## CATAF   0.441  0.359
## CATME   0.618  0.415
## CATSP   0.438  0.549
## 
##                  MR1   MR2
## SS loadings    8.176 2.523
## Proportion Var 0.282 0.087
## Cumulative Var 0.282 0.369
print(fa72K3$loadings, cutoff = 0.2)
## 
## Loadings:
##        MR1    MR3    MR2   
## WINF           0.679       
## WCOM           0.634       
## WARI           0.511       
## WSIM           0.584       
## WVOC   -0.297  0.762       
## WPICCO         0.332  0.214
## WPICAR  0.253  0.474       
## WBD     0.345  0.215       
## WMAZ    0.326              
## WOA     0.242        -0.876
## LTOV           0.565       
## LTCO    0.333  0.325       
## LTP            0.340       
## LOMD    0.401              
## PS      0.369  0.336  0.342
## PT      0.202  0.493  0.305
## PN      0.274  0.308  0.233
## PD      0.255  0.286       
## PCQ     0.504         0.429
## PCS     0.522         0.397
## PI      0.303  0.305       
## PM      0.377  0.330  0.269
## PSE     0.553         0.361
## CATRV   0.677  0.266 -0.242
## CATCOM  0.339  0.389 -0.332
## CATAR   0.808              
## CATAF   0.921 -0.218       
## CATME   0.418  0.529 -0.221
## CATSP   0.610        -0.298
## 
##                  MR1   MR3   MR2
## SS loadings    4.802 4.379 2.206
## Proportion Var 0.166 0.151 0.076
## Cumulative Var 0.166 0.317 0.393
print(faPsy72K1$loadings, cutoff = 0.2)
## 
## Loadings:
##        MR1  
## WINF   0.697
## WCOM   0.465
## WARI   0.525
## WSIM   0.482
## WVOC   0.393
## WPICCO 0.304
## WPICAR 0.627
## WBD    0.447
## WMAZ   0.402
## WOA    0.303
## LTOV   0.340
## LTCO   0.554
## LTP    0.381
## LOMD   0.376
## CATRV  0.824
## CATCOM 0.663
## CATAR  0.801
## CATAF  0.549
## CATME  0.813
## CATSP  0.727
## 
##                  MR1
## SS loadings    6.262
## Proportion Var 0.313
print(faPsy72K2$loadings, cutoff = 0.2)
## 
## Loadings:
##        MR1    MR2   
## WINF           0.797
## WCOM           0.494
## WARI           0.529
## WSIM           0.601
## WVOC           0.592
## WPICCO         0.418
## WPICAR         0.557
## WBD     0.209  0.312
## WMAZ    0.227  0.238
## WOA     0.646 -0.310
## LTOV           0.513
## LTCO    0.275  0.371
## LTP            0.405
## LOMD    0.461       
## CATRV   0.800       
## CATCOM  0.522  0.237
## CATAR   0.815       
## CATAF   0.818       
## CATME   0.536  0.401
## CATSP   0.770       
## 
##                  MR1   MR2
## SS loadings    4.005 3.461
## Proportion Var 0.200 0.173
## Cumulative Var 0.200 0.373
print(faPia72K1$loadings, cutoff = 0.2)
## 
## Loadings:
##     MR1  
## PS  0.698
## PT  0.607
## PN  0.586
## PD  0.430
## PCQ 0.680
## PCS 0.533
## PI  0.489
## PM  0.626
## PSE 0.708
## 
##                  MR1
## SS loadings    3.262
## Proportion Var 0.362
#print(faPia72K2$loadings, cutoff = 0.2)
EFATOG72HI <- c(.727, .409, .553, .487, .355, .354, .623, .493, .445, .116, .408, .567, .411, .346, .626, .609, .514, .474, .513, .437, .525, .622, .647, .770, .578, .780, .579, .775, .645)
EFATOG72HIPsy <- c(.727, .409, .553, .487, .355, .354, .623, .493, .445, .116, .408, .567, .411, .346, .770, .578, .780, .579, .775, .645); EFATOG72HIPia <- c(.626, .609, .514, .474, .513, .437, .525, .622, .647)
EFASEP72HI <- c(.697, .465, .525, .482, .393, .304, .627, .447, .402, .303, .340, .554, .381, .376, .698, .607, .586, .430, .680, .533, .489, .626, .708, .824, .663, .801, .549, .813, .727)
EFASEP72HIPsy <- c(.697, .465, .525, .482, .393, .304, .627, .447, .402, .303, .340, .554, .381, .376, .824, .663, .801, .549, .813, .727); EFASEP72HIPia <- c(.698, .607, .586, .430, .680, .533, .489, .626, .708)

cor(EFATOG72HI, EFASEP72HI, method = "pearson"); cor(EFATOG72HI, EFASEP72HI, method = "spearman"); CONGO(EFATOG72HI, EFASEP72HI)
## [1] 0.9116304
## [1] 0.9374384
## [1] 0.9938031
cor(EFATOG72HIPsy, EFASEP72HIPsy, method = "pearson"); cor(EFATOG72HIPsy, EFASEP72HIPsy, method = "spearman"); CONGO(EFATOG72HIPsy, EFASEP72HIPsy)
## [1] 0.9375847
## [1] 0.9609023
## [1] 0.9942007
cor(EFATOG72HIPia, EFASEP72HIPia, method = "pearson"); cor(EFATOG72HIPia, EFASEP72HIPia, method = "spearman"); CONGO(EFATOG72HIPia, EFASEP72HIPia)
## [1] 0.7066751
## [1] 0.7333333
## [1] 0.9940796

NPD makes JCFA impossible with this data unless I selectively remove variables. The result when doing so is always practically identical g factors, but removing so many variables, so selectively, makes the analysis hard to interpret legitimately.

Hathaway (1972) - Second Grade; n = 56

fa.parallel(H72II.cor, n.obs = nH72)

## Parallel analysis suggests that the number of factors =  2  and the number of components =  1
fa.parallel(H72IIPsy.cor, n.obs = nH72)

## Parallel analysis suggests that the number of factors =  2  and the number of components =  2
fa.parallel(H72IIPia.cor, n.obs = nH72)
## Warning in fa.stats(r = r, f = f, phi = phi, n.obs = n.obs, np.obs = np.obs, :
## The estimated weights for the factor scores are probably incorrect. Try a
## different factor score estimation method.
## Warning in fac(r = r, nfactors = nfactors, n.obs = n.obs, rotate = rotate, : An
## ultra-Heywood case was detected. Examine the results carefully

## Parallel analysis suggests that the number of factors =  1  and the number of components =  1
fa72K1 <- fa(H72II.cor, n.obs = nH72, nfactors = 1)
fa72K2 <- fa(H72II.cor, n.obs = nH72, nfactors = 2)
fa72K3 <- fa(H72II.cor, n.obs = nH72, nfactors = 3)

faPsy72K1 <- fa(H72IIPsy.cor, n.obs = nH72, nfactors = 1)
faPsy72K2 <- fa(H72IIPsy.cor, n.obs = nH72, nfactors = 2)

faPia72K1 <- fa(H72IIPia.cor, n.obs = nH72, nfactors = 1)
faPia72K2 <- fa(H72IIPia.cor, n.obs = nH72, nfactors = 2)

print(fa72K1$loadings)
## 
## Loadings:
##        MR1  
## WINF   0.727
## WCOM   0.360
## WARI   0.431
## WSIM   0.402
## WVOC   0.478
## WPICCO 0.462
## WPICAR 0.444
## WBD    0.559
## WMAZ   0.427
## WOA    0.275
## LTOV   0.560
## LTCO   0.591
## LTP    0.560
## LOMD   0.382
## PS     0.578
## PT     0.479
## PN     0.405
## PD     0.545
## PCQ    0.538
## PCS    0.481
## PI     0.475
## PM     0.545
## PSE    0.478
## CATRV  0.736
## CATCOM 0.756
## CATAR  0.894
## CATAF  0.771
## CATME  0.644
## CATSP  0.668
## 
##                  MR1
## SS loadings    9.001
## Proportion Var 0.310
print(fa72K2$loadings, cutoff = 0.2)
## 
## Loadings:
##        MR1    MR2   
## WINF           0.687
## WCOM           0.515
## WARI           0.331
## WSIM           0.559
## WVOC           0.708
## WPICCO         0.537
## WPICAR         0.686
## WBD     0.302  0.344
## WMAZ    0.422       
## WOA     0.540 -0.278
## LTOV    0.228  0.434
## LTCO    0.283  0.408
## LTP     0.425  0.204
## LOMD    0.419       
## PS      0.589       
## PT      0.491       
## PN             0.393
## PD      0.282  0.349
## PCQ            0.439
## PCS     0.308  0.240
## PI      0.288  0.256
## PM      0.273  0.361
## PSE     0.499       
## CATRV   0.863       
## CATCOM  0.666       
## CATAR   0.817       
## CATAF   0.803       
## CATME   0.648       
## CATSP   0.848       
## 
##                  MR1   MR2
## SS loadings    6.000 3.824
## Proportion Var 0.207 0.132
## Cumulative Var 0.207 0.339
print(fa72K3$loadings, cutoff = 0.2)
## 
## Loadings:
##        MR1    MR2    MR3   
## WINF           0.521  0.323
## WCOM           0.465       
## WARI           0.254       
## WSIM           0.313  0.498
## WVOC           0.664       
## WPICCO         0.510       
## WPICAR         0.698       
## WBD     0.288  0.237  0.209
## WMAZ    0.439              
## WOA     0.494 -0.284       
## LTOV    0.213  0.300  0.259
## LTCO    0.399  0.547 -0.209
## LTP     0.382         0.237
## LOMD    0.459              
## PS      0.505         0.325
## PT      0.383 -0.200  0.425
## PN             0.285  0.207
## PD      0.256  0.229  0.244
## PCQ                   0.840
## PCS                   0.493
## PI      0.228         0.325
## PM      0.274  0.271       
## PSE     0.508              
## CATRV   0.849              
## CATCOM  0.715  0.213       
## CATAR   0.808              
## CATAF   0.798              
## CATME   0.685              
## CATSP   0.823              
## 
##                  MR1   MR2   MR3
## SS loadings    5.744 2.754 2.128
## Proportion Var 0.198 0.095 0.073
## Cumulative Var 0.198 0.293 0.366
print(faPsy72K1$loadings, cutoff = 0.2)
## 
## Loadings:
##        MR1  
## WINF   0.728
## WCOM   0.366
## WARI   0.418
## WSIM   0.381
## WVOC   0.507
## WPICCO 0.491
## WPICAR 0.455
## WBD    0.547
## WMAZ   0.451
## WOA    0.260
## LTOV   0.531
## LTCO   0.621
## LTP    0.536
## LOMD   0.395
## CATRV  0.749
## CATCOM 0.774
## CATAR  0.893
## CATAF  0.768
## CATME  0.671
## CATSP  0.687
## 
##                  MR1
## SS loadings    6.839
## Proportion Var 0.342
print(faPsy72K2$loadings, cutoff = 0.2)
## 
## Loadings:
##        MR1    MR2   
## WINF    0.237  0.668
## WCOM           0.533
## WARI    0.208  0.286
## WSIM           0.554
## WVOC           0.672
## WPICCO         0.533
## WPICAR         0.718
## WBD     0.298  0.345
## WMAZ    0.443       
## WOA     0.552 -0.322
## LTOV    0.244  0.390
## LTCO    0.323  0.410
## LTP     0.399  0.208
## LOMD    0.490       
## CATRV   0.845       
## CATCOM  0.685       
## CATAR   0.816       
## CATAF   0.820       
## CATME   0.649       
## CATSP   0.844       
## 
##                  MR1   MR2
## SS loadings    4.927 3.047
## Proportion Var 0.246 0.152
## Cumulative Var 0.246 0.399
print(faPia72K1$loadings, cutoff = 0.2)
## 
## Loadings:
##     MR1  
## PS  0.622
## PT  0.520
## PN  0.473
## PD  0.650
## PCQ 0.741
## PCS 0.562
## PI  0.491
## PM  0.480
## PSE 0.363
## 
##                  MR1
## SS loadings    2.772
## Proportion Var 0.308
print(faPia72K2$loadings, cutoff = 0.2)
## 
## Loadings:
##     MR1    MR2   
## PS   0.785       
## PT   0.538       
## PN          0.765
## PD   0.250  0.516
## PCQ  0.409  0.438
## PCS  0.201  0.456
## PI   0.453       
## PM          0.407
## PSE  0.479       
## 
##                  MR1   MR2
## SS loadings    1.656 1.437
## Proportion Var 0.184 0.160
## Cumulative Var 0.184 0.344
EFATOG72HII <- c(.727, .360, .431, .402, .478, .462, .444, .559, .427, .275, .560, .591, .560, .382, .578, .479, .405, .545, .538, .481, .475, .545, .478, .736, .756, .894, .771, .644, .668)
EFATOG72HIIPsy <- c(.727, .360, .431, .402, .478, .462, .444, .559, .427, .275, .560, .591, .560, .382, .736, .756, .894, .771, .644, .668); EFATOG72HIIPia <- c(.578, .479, .405, .545, .538, .481, .475, .545, .478)
EFASEP72HII <- c(.728, .366, .418, .381, .507, .491, .455, .547, .451, .260, .531, .621, .536, .395, .622, .520, .473, .650, .741, .562, .491, .480, .363, .749, .774, .893, .768, .671, .687)
EFASEP72HIIPsy <- c(.728, .366, .418, .381, .507, .491, .455, .547, .451, .260, .531, .621, .536, .395, .749, .774, .893, .768, .671, .687); EFASEP72HIIPia <- c(.622, .520, .473, .650, .741, .562, .491, .480, .363)

cor(EFATOG72HII, EFASEP72HII, method = "pearson"); cor(EFATOG72HII, EFASEP72HII, method = "spearman"); CONGO(EFATOG72HII, EFASEP72HII)
## [1] 0.931689
## [1] 0.9132578
## [1] 0.9955797
cor(EFATOG72HIIPsy, EFASEP72HIIPsy, method = "pearson"); cor(EFATOG72HIIPsy, EFASEP72HIIPsy, method = "spearman"); CONGO(EFATOG72HIIPsy, EFASEP72HIIPsy)
## [1] 0.9935642
## [1] 0.990598
## [1] 0.9994974
cor(EFATOG72HIIPia, EFASEP72HIIPia, method = "pearson"); cor(EFATOG72HIIPia, EFASEP72HIIPia, method = "spearman"); CONGO(EFATOG72HIIPia, EFASEP72HIIPia)
## [1] 0.5787404
## [1] 0.6276206
## [1] 0.98763
H72II1FNo <- '
gPsy =~ WINF + WCOM + WARI + WSIM + WVOC + WPICCO + WPICAR + WBD + WMAZ + WOA + LTOV + LTCO + LTP + LOMD + CATRV + CATCOM + CATAR + CATAF + CATME + CATSP
gPia =~ PS + PT + PN + PD + PCQ + PCS + PI + PM + PSE

gPsy ~~ 0*gPia'

H72II1F <- '
gPsy =~ WINF + WCOM + WARI + WSIM + WVOC + WPICCO + WPICAR + WBD + WMAZ + WOA + LTOV + LTCO + LTP + LOMD + CATRV + CATCOM + CATAR + CATAF + CATME + CATSP
gPia =~ PS + PT + PN + PD + PCQ + PCS + PI + PM + PSE

gPsy ~~ gPia'

H72II1FID <- '
gPsy =~ WINF + WCOM + WARI + WSIM + WVOC + WPICCO + WPICAR + WBD + WMAZ + WOA + LTOV + LTCO + LTP + LOMD + CATRV + CATCOM + CATAR + CATAF + CATME + CATSP
gPia =~ PS + PT + PN + PD + PCQ + PCS + PI + PM + PSE

gPsy ~~ 1*gPia'

H72II2FNo <- '
PERF =~ WMAZ + WOA + LTP + LOMD + CATRV + CATCOM + CATAR + CATAF + CATME + CATSP 
VER =~ WINF + WCOM + WARI + WSIM + WVOC + WPICCO + WPICAR + LTOV + LTCO

gPsy =~ VER + PERF
gPia =~ PS + PT + PN + PD + PCQ + PCS + PI + PM + PSE

gPsy ~~ 0*gPia'

H72II2F <- '
PERF =~ WMAZ + WOA + LTP + LOMD + CATRV + CATCOM + CATAR + CATAF + CATME + CATSP
VER =~ WINF + WCOM + WARI + WSIM + WVOC + WPICCO + WPICAR + LTOV + LTCO

gPsy =~ VER + PERF
gPia =~ PS + PT + PN + PD + PCQ + PCS + PI + PM + PSE

gPsy ~~ gPia'

H72II2FID <- '
PERF =~ WMAZ + WOA + LTP + LOMD + CATRV + CATCOM + CATAR + CATAF + CATME + CATSP
VER =~ WINF + WCOM + WARI + WSIM + WVOC + WPICCO + WPICAR + LTOV + LTCO

gPsy =~ VER + PERF
gPia =~ PS + PT + PN + PD + PCQ + PCS + PI + PM + PSE

gPsy ~~ 1*gPia'

H72IINo1.fit <- cfa(H72II1FNo, sample.cov = H72II.cor, sample.nobs = nH72, std.lv = T)
H72II1.fit <- cfa(H72II1F, sample.cov = H72II.cor, sample.nobs = nH72, std.lv = T)
H72IIID1.fit <- cfa(H72II1FID, sample.cov = H72II.cor, sample.nobs = nH72, std.lv = T)

H72IINo2.fit <- cfa(H72II2FNo, sample.cov = H72II.cor, sample.nobs = nH72, std.lv = T); "\n"
## Warning in lav_model_vcov(lavmodel = lavmodel, lavsamplestats = lavsamplestats, : lavaan WARNING:
##     Could not compute standard errors! The information matrix could
##     not be inverted. This may be a symptom that the model is not
##     identified.
## [1] "\n"
H72II2.fit <- cfa(H72II2F, sample.cov = H72II.cor, sample.nobs = nH72, std.lv = T)
H72IIID2.fit <- cfa(H72II2FID, sample.cov = H72II.cor, sample.nobs = nH72, std.lv = T)

round(cbind("No Rel - 1"   = fitMeasures(H72IINo1.fit, FITM),
            "Free Rel - 1" = fitMeasures(H72II1.fit, FITM),
            "Identical - 1"         = fitMeasures(H72IIID1.fit, FITM),
            "No Rel - 2"   = fitMeasures(H72IINo2.fit, FITM),
            "Free Rel - 2" = fitMeasures(H72II2.fit, FITM),
            "Identical - 2"         = fitMeasures(H72IIID2.fit, FITM)), 3)
##                No Rel - 1 Free Rel - 1 Identical - 1 No Rel - 2 Free Rel - 2
## chisq             593.095      553.396       566.337    506.032      465.462
## df                377.000      376.000       377.000    348.000      347.000
## npar               58.000       59.000        58.000     58.000       59.000
## cfi                 0.680        0.737         0.719      0.757        0.818
## rmsea               0.101        0.092         0.095      0.090        0.078
## rmsea.ci.lower      0.085        0.075         0.078      0.072        0.058
## rmsea.ci.upper      0.116        0.108         0.110      0.107        0.096
## aic              4208.212     4170.512      4181.453   4014.375     3975.805
## bic              4325.682     4290.008      4298.924   4131.845     4095.301
## srmr                0.204        0.102         0.103      0.201        0.095
##                Identical - 2
## chisq                466.577
## df                   348.000
## npar                  58.000
## cfi                    0.818
## rmsea                  0.078
## rmsea.ci.lower         0.058
## rmsea.ci.upper         0.096
## aic                 3974.920
## bic                 4092.390
## srmr                   0.095
#cfaHB(H72II1.fit) <- does not work

1 - pchisq(553.396 - 465.462, 376 - 347, lower.tail = T)
## [1] 7.544906e-08
1 - pchisq(466.577 - 465.462, 1, lower.tail = T)
## [1] 0.2909976
summary(H72II2.fit, stand = T, fit = T)
## lavaan 0.6-12 ended normally after 43 iterations
## 
##   Estimator                                         ML
##   Optimization method                           NLMINB
##   Number of model parameters                        59
## 
##   Number of observations                            56
## 
## Model Test User Model:
##                                                       
##   Test statistic                               465.462
##   Degrees of freedom                               347
##   P-value (Chi-square)                           0.000
## 
## Model Test Baseline Model:
## 
##   Test statistic                              1029.195
##   Degrees of freedom                               378
##   P-value                                        0.000
## 
## User Model versus Baseline Model:
## 
##   Comparative Fit Index (CFI)                    0.818
##   Tucker-Lewis Index (TLI)                       0.802
## 
## Loglikelihood and Information Criteria:
## 
##   Loglikelihood user model (H0)              -1928.902
##   Loglikelihood unrestricted model (H1)      -1696.172
##                                                       
##   Akaike (AIC)                                3975.805
##   Bayesian (BIC)                              4095.301
##   Sample-size adjusted Bayesian (BIC)         3909.866
## 
## Root Mean Square Error of Approximation:
## 
##   RMSEA                                          0.078
##   90 Percent confidence interval - lower         0.058
##   90 Percent confidence interval - upper         0.096
##   P-value RMSEA <= 0.05                          0.012
## 
## Standardized Root Mean Square Residual:
## 
##   SRMR                                           0.095
## 
## Parameter Estimates:
## 
##   Standard errors                             Standard
##   Information                                 Expected
##   Information saturated (h1) model          Structured
## 
## Latent Variables:
##                    Estimate  Std.Err  z-value  P(>|z|)   Std.lv  Std.all
##   PERF =~                                                               
##     WMAZ              0.221    0.079    2.802    0.005    0.436    0.440
##     WOA               0.157    0.074    2.115    0.034    0.309    0.312
##     LTP               0.264    0.083    3.184    0.001    0.520    0.525
##     LOMD              0.193    0.077    2.510    0.012    0.379    0.383
##     CATRV             0.408    0.099    4.120    0.000    0.804    0.811
##     CATCOM            0.405    0.099    4.107    0.000    0.798    0.806
##     CATAR             0.475    0.108    4.386    0.000    0.935    0.943
##     CATAF             0.404    0.098    4.100    0.000    0.796    0.803
##     CATME             0.338    0.091    3.728    0.000    0.666    0.672
##     CATSP             0.398    0.098    4.073    0.000    0.785    0.792
##   VER =~                                                                
##     WINF              0.510    0.099    5.157    0.000    0.819    0.827
##     WCOM              0.308    0.092    3.365    0.001    0.495    0.499
##     WARI              0.259    0.091    2.861    0.004    0.416    0.420
##     WSIM              0.317    0.092    3.460    0.001    0.510    0.515
##     WVOC              0.401    0.094    4.266    0.000    0.644    0.650
##     WPICCO            0.343    0.092    3.711    0.000    0.551    0.556
##     WPICAR            0.372    0.093    3.990    0.000    0.597    0.603
##     LTOV              0.353    0.093    3.809    0.000    0.567    0.572
##     LTCO              0.385    0.093    4.114    0.000    0.618    0.624
##   gPsy =~                                                               
##     VER               1.258    0.341    3.692    0.000    0.783    0.783
##     PERF              1.698    0.528    3.217    0.001    0.862    0.862
##   gPia =~                                                               
##     PS                0.633    0.127    4.999    0.000    0.633    0.638
##     PT                0.538    0.131    4.099    0.000    0.538    0.543
##     PN                0.460    0.134    3.422    0.001    0.460    0.464
##     PD                0.614    0.128    4.811    0.000    0.614    0.619
##     PCQ               0.653    0.126    5.200    0.000    0.653    0.659
##     PCS               0.547    0.131    4.180    0.000    0.547    0.552
##     PI                0.493    0.133    3.708    0.000    0.493    0.498
##     PM                0.540    0.131    4.116    0.000    0.540    0.545
##     PSE               0.429    0.136    3.165    0.002    0.429    0.433
## 
## Covariances:
##                    Estimate  Std.Err  z-value  P(>|z|)   Std.lv  Std.all
##   gPsy ~~                                                               
##     gPia              0.924    0.073   12.747    0.000    0.924    0.924
## 
## Variances:
##                    Estimate  Std.Err  z-value  P(>|z|)   Std.lv  Std.all
##    .WMAZ              0.792    0.152    5.222    0.000    0.792    0.806
##    .WOA               0.887    0.169    5.260    0.000    0.887    0.903
##    .LTP               0.712    0.137    5.182    0.000    0.712    0.725
##    .LOMD              0.838    0.160    5.242    0.000    0.838    0.854
##    .CATRV             0.336    0.071    4.718    0.000    0.336    0.342
##    .CATCOM            0.345    0.073    4.741    0.000    0.345    0.351
##    .CATAR             0.108    0.037    2.917    0.004    0.108    0.110
##    .CATAF             0.349    0.073    4.753    0.000    0.349    0.356
##    .CATME             0.539    0.107    5.051    0.000    0.539    0.549
##    .CATSP             0.366    0.076    4.792    0.000    0.366    0.372
##    .WINF              0.311    0.086    3.618    0.000    0.311    0.316
##    .WCOM              0.737    0.146    5.037    0.000    0.737    0.751
##    .WARI              0.809    0.158    5.128    0.000    0.809    0.824
##    .WSIM              0.722    0.144    5.015    0.000    0.722    0.735
##    .WVOC              0.567    0.120    4.726    0.000    0.567    0.577
##    .WPICCO            0.679    0.137    4.949    0.000    0.679    0.691
##    .WPICAR            0.626    0.129    4.853    0.000    0.626    0.637
##    .LTOV              0.661    0.134    4.918    0.000    0.661    0.673
##    .LTCO              0.600    0.125    4.801    0.000    0.600    0.611
##    .PS                0.582    0.124    4.690    0.000    0.582    0.592
##    .PT                0.693    0.141    4.929    0.000    0.693    0.705
##    .PN                0.771    0.152    5.054    0.000    0.771    0.785
##    .PD                0.605    0.127    4.748    0.000    0.605    0.616
##    .PCQ               0.556    0.120    4.621    0.000    0.556    0.566
##    .PCS               0.683    0.139    4.911    0.000    0.683    0.696
##    .PI                0.739    0.148    5.006    0.000    0.739    0.752
##    .PM                0.691    0.140    4.925    0.000    0.691    0.703
##    .PSE               0.798    0.157    5.092    0.000    0.798    0.813
##    .PERF              1.000                               0.258    0.258
##    .VER               1.000                               0.387    0.387
##     gPsy              1.000                               1.000    1.000
##     gPia              1.000                               1.000    1.000
resid(H72II2.fit, "cor"); sum(abs(resid(H72II2.fit, "cor")$cov > CRITR(56))); sum(abs(resid(H72II2.fit, "cor")$cov > CRITR(56, NP(56))))
## $type
## [1] "cor.bollen"
## 
## $cov
##        WMAZ   WOA    LTP    LOMD   CATRV  CATCOM CATAR  CATAF  CATME  CATSP 
## WMAZ    0.000                                                               
## WOA     0.183  0.000                                                        
## LTP    -0.141 -0.004  0.000                                                 
## LOMD    0.022  0.331  0.019  0.000                                          
## CATRV  -0.057  0.017 -0.046  0.070  0.000                                   
## CATCOM  0.025 -0.051 -0.023  0.022 -0.024  0.000                            
## CATAR   0.015 -0.044 -0.015 -0.061 -0.015  0.040  0.000                     
## CATAF   0.087  0.110 -0.001  0.093 -0.031 -0.027 -0.007  0.000              
## CATME  -0.016 -0.060  0.038 -0.057  0.115  0.029 -0.054  0.021  0.000       
## CATSP  -0.069  0.053  0.034  0.007  0.107 -0.118  0.013  0.004  0.058  0.000
## WINF    0.004 -0.014  0.217 -0.013 -0.042  0.021  0.024  0.102  0.015 -0.042
## WCOM   -0.088 -0.145  0.073 -0.029 -0.103 -0.061 -0.038 -0.050 -0.106 -0.097
## WARI    0.265 -0.098  0.001  0.022  0.070  0.052  0.053  0.143  0.070 -0.074
## WSIM   -0.073 -0.118  0.018 -0.133 -0.052 -0.130 -0.058 -0.089 -0.043 -0.105
## WVOC    0.077 -0.147  0.020  0.122 -0.116 -0.103 -0.104 -0.082 -0.075 -0.117
## WPICCO  0.055 -0.097  0.083  0.017 -0.044  0.038  0.026 -0.031  0.038 -0.077
## WPICAR -0.099 -0.287 -0.033 -0.156 -0.130  0.063  0.007 -0.136 -0.023 -0.192
## LTOV    0.050  0.050  0.028  0.142  0.057  0.149  0.106 -0.060  0.061 -0.076
## LTCO   -0.025 -0.131  0.129  0.199  0.019  0.201  0.093  0.082  0.277  0.037
## PS     -0.044  0.111  0.073  0.025  0.087  0.030  0.070  0.082  0.028 -0.023
## PT     -0.090 -0.035  0.173 -0.135  0.109  0.032  0.112 -0.027 -0.010  0.128
## PN     -0.103 -0.025 -0.084  0.149 -0.140  0.002 -0.009  0.053 -0.128 -0.143
## PD     -0.047 -0.064  0.031  0.141 -0.090  0.023 -0.035 -0.026 -0.011 -0.141
## PCQ    -0.101 -0.074  0.105 -0.101 -0.165 -0.153 -0.125 -0.121 -0.102 -0.146
## PCS    -0.013  0.033  0.029 -0.158 -0.006 -0.054  0.025  0.027 -0.055  0.022
## PI      0.095  0.136  0.242  0.118 -0.092 -0.120 -0.024  0.052 -0.036 -0.164
## PM     -0.051 -0.005 -0.018 -0.026  0.038  0.091  0.131  0.052 -0.001 -0.024
## PSE     0.198  0.082  0.129  0.018  0.120  0.092  0.075  0.193  0.168  0.077
##        WINF   WCOM   WARI   WSIM   WVOC   WPICCO WPICAR LTOV   LTCO   PS    
## WMAZ                                                                        
## WOA                                                                         
## LTP                                                                         
## LOMD                                                                        
## CATRV                                                                       
## CATCOM                                                                      
## CATAR                                                                       
## CATAF                                                                       
## CATME                                                                       
## CATSP                                                                       
## WINF    0.000                                                               
## WCOM    0.037  0.000                                                        
## WARI    0.003 -0.090  0.000                                                 
## WSIM    0.034  0.073  0.094  0.000                                          
## WVOC   -0.018  0.145  0.087  0.015  0.000                                   
## WPICCO  0.010 -0.048  0.037  0.104 -0.051  0.000                            
## WPICAR  0.002 -0.031 -0.193 -0.060  0.048  0.115  0.000                     
## LTOV   -0.073 -0.046 -0.070  0.006  0.088 -0.118 -0.005  0.000              
## LTCO   -0.026 -0.051 -0.012 -0.201 -0.015  0.003  0.114  0.083  0.000       
## PS     -0.052 -0.061 -0.074  0.012 -0.190 -0.137 -0.128  0.136 -0.048  0.000
## PT     -0.075 -0.026 -0.165  0.088 -0.245 -0.208 -0.107  0.225 -0.095  0.144
## PN     -0.008  0.102  0.009 -0.133  0.092  0.053  0.088  0.108  0.001 -0.106
## PD     -0.051 -0.054  0.052 -0.031 -0.111  0.121  0.000  0.014  0.151 -0.015
## PCQ     0.136 -0.138  0.060  0.235 -0.020 -0.075 -0.117  0.127 -0.087  0.020
## PCS     0.060 -0.179  0.242  0.004 -0.060  0.088 -0.071 -0.088 -0.239 -0.142
## PI      0.102 -0.030  0.089  0.075  0.006 -0.100 -0.067  0.214  0.085  0.002
## PM      0.054  0.093  0.144  0.007  0.034 -0.059  0.083  0.165  0.114 -0.038
## PSE     0.041  0.024  0.218 -0.121 -0.064  0.006  0.021 -0.119  0.135  0.114
##        PT     PN     PD     PCQ    PCS    PI     PM     PSE   
## WMAZ                                                          
## WOA                                                           
## LTP                                                           
## LOMD                                                          
## CATRV                                                         
## CATCOM                                                        
## CATAR                                                         
## CATAF                                                         
## CATME                                                         
## CATSP                                                         
## WINF                                                          
## WCOM                                                          
## WARI                                                          
## WSIM                                                          
## WVOC                                                          
## WPICCO                                                        
## WPICAR                                                        
## LTOV                                                          
## LTCO                                                          
## PS                                                            
## PT      0.000                                                 
## PN     -0.082  0.000                                          
## PD     -0.096  0.203  0.000                                   
## PCQ     0.013  0.014  0.092  0.000                            
## PCS    -0.010  0.094 -0.102  0.217  0.000                     
## PI      0.050 -0.081  0.032  0.032 -0.065  0.000              
## PM     -0.066  0.077  0.013 -0.089  0.030 -0.051  0.000       
## PSE    -0.085 -0.161 -0.008 -0.085  0.001  0.045 -0.156  0.000
## [1] 6
## [1] 0

There were six local independence violations but this reduced to zero with scaling.

semPaths(H72II2.fit, "std", title = F, residuals = F, pastel = T, mar = c(2, 1, 3, 1), posCol = c("skyblue4"), layout = "circle", exoCov = T)

Kiga (4-to-6-Year Olds); n = 39

fa.parallel(Kiga46[, 3:10])
## Warning in fa.stats(r = r, f = f, phi = phi, n.obs = n.obs, np.obs = np.obs, :
## The estimated weights for the factor scores are probably incorrect. Try a
## different factor score estimation method.

## Warning in fa.stats(r = r, f = f, phi = phi, n.obs = n.obs, np.obs = np.obs, :
## The estimated weights for the factor scores are probably incorrect. Try a
## different factor score estimation method.
## Warning in fac(r = r, nfactors = nfactors, n.obs = n.obs, rotate = rotate, : An
## ultra-Heywood case was detected. Examine the results carefully
## Warning in fa.stats(r = r, f = f, phi = phi, n.obs = n.obs, np.obs = np.obs, :
## The estimated weights for the factor scores are probably incorrect. Try a
## different factor score estimation method.
## Warning in fac(r = r, nfactors = nfactors, n.obs = n.obs, rotate = rotate, : An
## ultra-Heywood case was detected. Examine the results carefully
## Warning in fa.stats(r = r, f = f, phi = phi, n.obs = n.obs, np.obs = np.obs, :
## The estimated weights for the factor scores are probably incorrect. Try a
## different factor score estimation method.
## Warning in fac(r = r, nfactors = nfactors, n.obs = n.obs, rotate = rotate, : An
## ultra-Heywood case was detected. Examine the results carefully
## Warning in fa.stats(r = r, f = f, phi = phi, n.obs = n.obs, np.obs = np.obs, :
## The estimated weights for the factor scores are probably incorrect. Try a
## different factor score estimation method.

## Warning in fa.stats(r = r, f = f, phi = phi, n.obs = n.obs, np.obs = np.obs, :
## The estimated weights for the factor scores are probably incorrect. Try a
## different factor score estimation method.
## Warning in fac(r = r, nfactors = nfactors, n.obs = n.obs, rotate = rotate, : An
## ultra-Heywood case was detected. Examine the results carefully
## Warning in fa.stats(r = r, f = f, phi = phi, n.obs = n.obs, np.obs = np.obs, :
## The estimated weights for the factor scores are probably incorrect. Try a
## different factor score estimation method.

## Warning in fa.stats(r = r, f = f, phi = phi, n.obs = n.obs, np.obs = np.obs, :
## The estimated weights for the factor scores are probably incorrect. Try a
## different factor score estimation method.

## Warning in fa.stats(r = r, f = f, phi = phi, n.obs = n.obs, np.obs = np.obs, :
## The estimated weights for the factor scores are probably incorrect. Try a
## different factor score estimation method.
## Warning in fac(r = r, nfactors = nfactors, n.obs = n.obs, rotate = rotate, : An
## ultra-Heywood case was detected. Examine the results carefully
## Warning in fa.stats(r = r, f = f, phi = phi, n.obs = n.obs, np.obs = np.obs, :
## The estimated weights for the factor scores are probably incorrect. Try a
## different factor score estimation method.
## Warning in fac(r = r, nfactors = nfactors, n.obs = n.obs, rotate = rotate, : An
## ultra-Heywood case was detected. Examine the results carefully

## Parallel analysis suggests that the number of factors =  3  and the number of components =  1
fa.parallel(Kiga46[, 3:6])
## Warning in fa.stats(r = r, f = f, phi = phi, n.obs = n.obs, np.obs = np.obs, :
## The estimated weights for the factor scores are probably incorrect. Try a
## different factor score estimation method.

## Warning in fa.stats(r = r, f = f, phi = phi, n.obs = n.obs, np.obs = np.obs, :
## An ultra-Heywood case was detected. Examine the results carefully
## Warning in fa.stats(r = r, f = f, phi = phi, n.obs = n.obs, np.obs = np.obs, :
## The estimated weights for the factor scores are probably incorrect. Try a
## different factor score estimation method.

## Warning in fa.stats(r = r, f = f, phi = phi, n.obs = n.obs, np.obs = np.obs, :
## The estimated weights for the factor scores are probably incorrect. Try a
## different factor score estimation method.
## Warning in fac(r = r, nfactors = nfactors, n.obs = n.obs, rotate = rotate, : An
## ultra-Heywood case was detected. Examine the results carefully
## Warning in fa.stats(r = r, f = f, phi = phi, n.obs = n.obs, np.obs = np.obs, :
## The estimated weights for the factor scores are probably incorrect. Try a
## different factor score estimation method.

## Warning in fa.stats(r = r, f = f, phi = phi, n.obs = n.obs, np.obs = np.obs, :
## The estimated weights for the factor scores are probably incorrect. Try a
## different factor score estimation method.
## Warning in fac(r = r, nfactors = nfactors, n.obs = n.obs, rotate = rotate, : An
## ultra-Heywood case was detected. Examine the results carefully
## Warning in fa.stats(r = r, f = f, phi = phi, n.obs = n.obs, np.obs = np.obs, :
## The estimated weights for the factor scores are probably incorrect. Try a
## different factor score estimation method.
## Warning in fac(r = r, nfactors = nfactors, n.obs = n.obs, rotate = rotate, : An
## ultra-Heywood case was detected. Examine the results carefully
## Warning in fa.stats(r = r, f = f, phi = phi, n.obs = n.obs, np.obs = np.obs, :
## The estimated weights for the factor scores are probably incorrect. Try a
## different factor score estimation method.
## Warning in fac(r = r, nfactors = nfactors, n.obs = n.obs, rotate = rotate, : An
## ultra-Heywood case was detected. Examine the results carefully
## Warning in fa.stats(r = r, f = f, phi = phi, n.obs = n.obs, np.obs = np.obs, :
## The estimated weights for the factor scores are probably incorrect. Try a
## different factor score estimation method.

## Warning in fa.stats(r = r, f = f, phi = phi, n.obs = n.obs, np.obs = np.obs, :
## The estimated weights for the factor scores are probably incorrect. Try a
## different factor score estimation method.
## Warning in fac(r = r, nfactors = nfactors, n.obs = n.obs, rotate = rotate, : An
## ultra-Heywood case was detected. Examine the results carefully
## Warning in fa.stats(r = r, f = f, phi = phi, n.obs = n.obs, np.obs = np.obs, :
## The estimated weights for the factor scores are probably incorrect. Try a
## different factor score estimation method.
## Warning in fac(r = r, nfactors = nfactors, n.obs = n.obs, rotate = rotate, : An
## ultra-Heywood case was detected. Examine the results carefully
## Warning in fa.stats(r = r, f = f, phi = phi, n.obs = n.obs, np.obs = np.obs, :
## The estimated weights for the factor scores are probably incorrect. Try a
## different factor score estimation method.
## Warning in fac(r = r, nfactors = nfactors, n.obs = n.obs, rotate = rotate, : An
## ultra-Heywood case was detected. Examine the results carefully
## Warning in fa.stats(r = r, f = f, phi = phi, n.obs = n.obs, np.obs = np.obs, :
## The estimated weights for the factor scores are probably incorrect. Try a
## different factor score estimation method.
## Warning in fac(r = r, nfactors = nfactors, n.obs = n.obs, rotate = rotate, : An
## ultra-Heywood case was detected. Examine the results carefully

## Parallel analysis suggests that the number of factors =  1  and the number of components =  1
fa.parallel(Kiga46[, 7:10])
## Warning in fa.stats(r = r, f = f, phi = phi, n.obs = n.obs, np.obs = np.obs, :
## The estimated weights for the factor scores are probably incorrect. Try a
## different factor score estimation method.

## Warning in fa.stats(r = r, f = f, phi = phi, n.obs = n.obs, np.obs = np.obs, :
## An ultra-Heywood case was detected. Examine the results carefully
## Warning in fa.stats(r = r, f = f, phi = phi, n.obs = n.obs, np.obs = np.obs, :
## The estimated weights for the factor scores are probably incorrect. Try a
## different factor score estimation method.
## Warning in fac(r = r, nfactors = nfactors, n.obs = n.obs, rotate = rotate, : An
## ultra-Heywood case was detected. Examine the results carefully
## Warning in fa.stats(r = r, f = f, phi = phi, n.obs = n.obs, np.obs = np.obs, :
## The estimated weights for the factor scores are probably incorrect. Try a
## different factor score estimation method.
## Warning in fac(r = r, nfactors = nfactors, n.obs = n.obs, rotate = rotate, : An
## ultra-Heywood case was detected. Examine the results carefully
## Warning in fa.stats(r = r, f = f, phi = phi, n.obs = n.obs, np.obs = np.obs, :
## The estimated weights for the factor scores are probably incorrect. Try a
## different factor score estimation method.
## Warning in fac(r = r, nfactors = nfactors, n.obs = n.obs, rotate = rotate, : An
## ultra-Heywood case was detected. Examine the results carefully
## Warning in fa.stats(r = r, f = f, phi = phi, n.obs = n.obs, np.obs = np.obs, :
## The estimated weights for the factor scores are probably incorrect. Try a
## different factor score estimation method.
## Warning in fac(r = r, nfactors = nfactors, n.obs = n.obs, rotate = rotate, : An
## ultra-Heywood case was detected. Examine the results carefully
## Warning in fa.stats(r = r, f = f, phi = phi, n.obs = n.obs, np.obs = np.obs, :
## The estimated weights for the factor scores are probably incorrect. Try a
## different factor score estimation method.

## Warning in fa.stats(r = r, f = f, phi = phi, n.obs = n.obs, np.obs = np.obs, :
## The estimated weights for the factor scores are probably incorrect. Try a
## different factor score estimation method.
## Warning in fac(r = r, nfactors = nfactors, n.obs = n.obs, rotate = rotate, : An
## ultra-Heywood case was detected. Examine the results carefully
## Warning in fa.stats(r = r, f = f, phi = phi, n.obs = n.obs, np.obs = np.obs, :
## The estimated weights for the factor scores are probably incorrect. Try a
## different factor score estimation method.
## Warning in fac(r = r, nfactors = nfactors, n.obs = n.obs, rotate = rotate, : An
## ultra-Heywood case was detected. Examine the results carefully
## Warning in fa.stats(r = r, f = f, phi = phi, n.obs = n.obs, np.obs = np.obs, :
## The estimated weights for the factor scores are probably incorrect. Try a
## different factor score estimation method.

## Parallel analysis suggests that the number of factors =  0  and the number of components =  1
fak461 <- fa(Kiga46[, 3:10], nfactors = 1)
fak462 <- fa(Kiga46[, 3:10], nfactors = 2)
## Warning in fa.stats(r = r, f = f, phi = phi, n.obs = n.obs, np.obs = np.obs, :
## The estimated weights for the factor scores are probably incorrect. Try a
## different factor score estimation method.
## Warning in fac(r = r, nfactors = nfactors, n.obs = n.obs, rotate = rotate, : An
## ultra-Heywood case was detected. Examine the results carefully
fak463 <- fa(Kiga46[, 3:10], nfactors = 3)

faPsyk46 <- fa(Kiga46[, 3:6], nfactors = 1)

faPiak46 <- fa(Kiga46[, 7:10], nfactors = 1)

print(fak461$loadings)
## 
## Loadings:
##         MR1   
## RAVEN1   0.660
## RAVEN2   0.559
## RAVEN3   0.329
## RAVEN4   0.615
## PIAGET1  0.372
## PIAGET2  0.639
## PIAGET3       
## PIAGET4  0.372
## 
##                 MR1
## SS loadings    1.92
## Proportion Var 0.24
print(fak462$loadings)
## 
## Loadings:
##         MR1    MR2   
## RAVEN1   0.723  0.284
## RAVEN2   0.551 -0.122
## RAVEN3   0.349  0.274
## RAVEN4   0.608 -0.124
## PIAGET1  0.373 -0.267
## PIAGET2  0.630 -0.194
## PIAGET3         1.001
## PIAGET4  0.362       
## 
##                  MR1   MR2
## SS loadings    1.983 1.303
## Proportion Var 0.248 0.163
## Cumulative Var 0.248 0.411
print(fak463$loadings)
## 
## Loadings:
##         MR1    MR2    MR3   
## RAVEN1   0.862  0.227       
## RAVEN2   0.615 -0.238       
## RAVEN3          0.395  0.432
## RAVEN4   0.645 -0.230       
## PIAGET1        -0.216  0.425
## PIAGET2  0.285 -0.169  0.522
## PIAGET3         0.882       
## PIAGET4 -0.105         0.710
## 
##                  MR1   MR2   MR3
## SS loadings    1.642 1.171 1.145
## Proportion Var 0.205 0.146 0.143
## Cumulative Var 0.205 0.352 0.495
print(faPsyk46$loadings)
## 
## Loadings:
##        MR1  
## RAVEN1 0.845
## RAVEN2 0.565
## RAVEN3 0.265
## RAVEN4 0.642
## 
##                  MR1
## SS loadings    1.517
## Proportion Var 0.379
print(faPiak46$loadings)
## 
## Loadings:
##         MR1   
## PIAGET1  0.470
## PIAGET2  0.749
## PIAGET3 -0.224
## PIAGET4  0.558
## 
##                  MR1
## SS loadings    1.143
## Proportion Var 0.286
EFATOGK46 <- c(.660, .559, .329, .615, .372, .639, 0, .372)
EFATOGK46Psy <- c(.660, .559, .329, .615); EFATOGK46Pia <- c(.372, .639, 0, .372)
EFASEPK46 <- c(.845, .565, .265, .642, .470, .749, -.224, .558)
EFASEPK46Psy <- c(.845, .565, .265, .642); EFASEPK46Pia <- c(.470, .749, -.224, .558)

cor(EFATOGK46, EFASEPK46, method = "pearson"); cor(EFATOGK46, EFASEPK46, method = "spearman"); CONGO(EFATOGK46, EFASEPK46)
## [1] 0.9579058
## [1] 0.9940298
## [1] 0.9810874
cor(EFATOGK46Psy, EFASEPK46Psy, method = "pearson"); cor(EFATOGK46Psy, EFASEPK46Psy, method = "spearman"); CONGO(EFATOGK46Psy, EFASEPK46Psy)
## [1] 0.9671078
## [1] 1
## [1] 0.9909776
cor(EFATOGK46Pia, EFASEPK46Pia, method = "pearson"); cor(EFATOGK46Pia, EFASEPK46Pia, method = "spearman"); CONGO(EFATOGK46Pia, EFASEPK46Pia)
## [1] 0.969075
## [1] 0.9486833
## [1] 0.9728287
KigaModNo <- '
gRav =~ RAVEN1 + RAVEN2 + RAVEN3 + RAVEN4
gPia =~ PIAGET1 + PIAGET2 + PIAGET3 + PIAGET4

gRav ~~ 0*gPia

PIAGET3 ~~ RAVEN1 + RAVEN3'

KigaMod <- '
gRav =~ RAVEN1 + RAVEN2 + RAVEN3 + RAVEN4
gPia =~ PIAGET1 + PIAGET2 + PIAGET3 + PIAGET4

gRav ~~ gPia

PIAGET3 ~~ RAVEN1 + RAVEN3'

KigaModID <- '
gRav =~ RAVEN1 + RAVEN2 + RAVEN3 + RAVEN4
gPia =~ PIAGET1 + PIAGET2 + PIAGET3 + PIAGET4

gRav ~~ 1*gPia

PIAGET3 ~~ RAVEN1 + RAVEN3'

KigaNo.fit <- cfa(KigaModNo, data = Kiga46, std.lv = T)
Kiga.fit <- cfa(KigaMod, data = Kiga46, std.lv = T)
KigaID.fit <- cfa(KigaModID, data = Kiga46, std.lv = T)

round(cbind("No Relationship"   = fitMeasures(KigaNo.fit, FITM),
            "Free Relationship" = fitMeasures(Kiga.fit, FITM),
            "Identical"         = fitMeasures(KigaID.fit, FITM)),3)
##                No Relationship Free Relationship Identical
## chisq                   22.891            16.380    21.623
## df                      18.000            17.000    18.000
## npar                    18.000            19.000    18.000
## cfi                      0.894             1.000     0.921
## rmsea                    0.083             0.000     0.072
## rmsea.ci.lower           0.000             0.000     0.000
## rmsea.ci.upper           0.174             0.140     0.167
## aic                    825.545           821.034   824.276
## bic                    855.489           852.642   854.220
## srmr                     0.148             0.083     0.099
#cfaHB(Kiga.fit) #will not converge

1 - pchisq(21.623 - 16.380, 1, lower.tail = T)
## [1] 0.02203524
resid(Kiga.fit, "cor"); sum(abs(resid(Kiga.fit, "cor")$cov > CRITR(39))); sum(abs(resid(Kiga.fit, "cor")$cov > CRITR(39, NP(39))))
## $type
## [1] "cor.bollen"
## 
## $cov
##         RAVEN1 RAVEN2 RAVEN3 RAVEN4 PIAGET1 PIAGET2 PIAGET3 PIAGET4
## RAVEN1   0.000                                                     
## RAVEN2  -0.007  0.000                                              
## RAVEN3   0.099 -0.161  0.000                                       
## RAVEN4  -0.017  0.032 -0.098  0.000                                
## PIAGET1 -0.095 -0.034  0.160  0.057  0.000                         
## PIAGET2  0.007  0.084  0.005  0.004 -0.049   0.000                 
## PIAGET3  0.012 -0.065  0.067 -0.075 -0.208   0.012   0.000         
## PIAGET4 -0.110 -0.037  0.194 -0.060  0.077   0.034   0.171   0.000
## [1] 0
## [1] 0

There were two local independence violations that rendered model fit atrocious and they were modeled out with residual covariances. This barely affected the g factor relationship.

summary(Kiga.fit, stand = T, fit = T)
## lavaan 0.6-12 ended normally after 21 iterations
## 
##   Estimator                                         ML
##   Optimization method                           NLMINB
##   Number of model parameters                        19
## 
##                                                   Used       Total
##   Number of observations                            39          40
## 
## Model Test User Model:
##                                                       
##   Test statistic                                16.380
##   Degrees of freedom                                17
##   P-value (Chi-square)                           0.497
## 
## Model Test Baseline Model:
## 
##   Test statistic                                74.074
##   Degrees of freedom                                28
##   P-value                                        0.000
## 
## User Model versus Baseline Model:
## 
##   Comparative Fit Index (CFI)                    1.000
##   Tucker-Lewis Index (TLI)                       1.022
## 
## Loglikelihood and Information Criteria:
## 
##   Loglikelihood user model (H0)               -391.517
##   Loglikelihood unrestricted model (H1)       -383.327
##                                                       
##   Akaike (AIC)                                 821.034
##   Bayesian (BIC)                               852.642
##   Sample-size adjusted Bayesian (BIC)          793.209
## 
## Root Mean Square Error of Approximation:
## 
##   RMSEA                                          0.000
##   90 Percent confidence interval - lower         0.000
##   90 Percent confidence interval - upper         0.140
##   P-value RMSEA <= 0.05                          0.602
## 
## Standardized Root Mean Square Residual:
## 
##   SRMR                                           0.083
## 
## Parameter Estimates:
## 
##   Standard errors                             Standard
##   Information                                 Expected
##   Information saturated (h1) model          Structured
## 
## Latent Variables:
##                    Estimate  Std.Err  z-value  P(>|z|)   Std.lv  Std.all
##   gRav =~                                                               
##     RAVEN1            0.773    0.159    4.864    0.000    0.773    0.778
##     RAVEN2            0.592    0.161    3.675    0.000    0.592    0.600
##     RAVEN3            0.313    0.174    1.802    0.072    0.313    0.316
##     RAVEN4            0.668    0.159    4.197    0.000    0.668    0.676
##   gPia =~                                                               
##     PIAGET1           0.456    0.185    2.468    0.014    0.456    0.458
##     PIAGET2           0.748    0.195    3.831    0.000    0.748    0.785
##     PIAGET3          -0.130    0.175   -0.745    0.456   -0.130   -0.135
##     PIAGET4           0.276    0.117    2.360    0.018    0.276    0.438
## 
## Covariances:
##                    Estimate  Std.Err  z-value  P(>|z|)   Std.lv  Std.all
##   gRav ~~                                                               
##     gPia              0.575    0.189    3.047    0.002    0.575    0.575
##  .RAVEN1 ~~                                                             
##    .PIAGET3           0.330    0.126    2.625    0.009    0.330    0.554
##  .RAVEN3 ~~                                                             
##    .PIAGET3           0.268    0.141    1.899    0.058    0.268    0.299
## 
## Variances:
##                    Estimate  Std.Err  z-value  P(>|z|)   Std.lv  Std.all
##    .RAVEN1            0.390    0.172    2.263    0.024    0.390    0.395
##    .RAVEN2            0.624    0.166    3.758    0.000    0.624    0.641
##    .RAVEN3            0.881    0.206    4.267    0.000    0.881    0.900
##    .RAVEN4            0.529    0.158    3.336    0.001    0.529    0.542
##    .PIAGET1           0.785    0.203    3.873    0.000    0.785    0.790
##    .PIAGET2           0.349    0.236    1.478    0.139    0.349    0.384
##    .PIAGET3           0.911    0.206    4.427    0.000    0.911    0.982
##    .PIAGET4           0.321    0.082    3.939    0.000    0.321    0.808
##     gRav              1.000                               1.000    1.000
##     gPia              1.000                               1.000    1.000
semPaths(Kiga.fit, "std", title = F, residuals = F, pastel = T, mar = c(2, 1, 3, 1), posCol = c("skyblue4"), layout = "circle", exoCov = T)

Shul (6-to-8-Year Olds); n = 35

fa.parallel(Shul68[, 3:14])
## Warning in fa.stats(r = r, f = f, phi = phi, n.obs = n.obs, np.obs = np.obs, :
## The estimated weights for the factor scores are probably incorrect. Try a
## different factor score estimation method.
## Warning in fac(r = r, nfactors = nfactors, n.obs = n.obs, rotate = rotate, : An
## ultra-Heywood case was detected. Examine the results carefully
## Warning in fa.stats(r = r, f = f, phi = phi, n.obs = n.obs, np.obs = np.obs, :
## The estimated weights for the factor scores are probably incorrect. Try a
## different factor score estimation method.

## Parallel analysis suggests that the number of factors =  1  and the number of components =  1
fa.parallel(Shul68[, 3:10])
## Warning in fa.stats(r = r, f = f, phi = phi, n.obs = n.obs, np.obs = np.obs, :
## The estimated weights for the factor scores are probably incorrect. Try a
## different factor score estimation method.
## Warning in fac(r = r, nfactors = nfactors, n.obs = n.obs, rotate = rotate, : An
## ultra-Heywood case was detected. Examine the results carefully
## Warning in fa.stats(r = r, f = f, phi = phi, n.obs = n.obs, np.obs = np.obs, :
## The estimated weights for the factor scores are probably incorrect. Try a
## different factor score estimation method.
## Warning in fac(r = r, nfactors = nfactors, n.obs = n.obs, rotate = rotate, : An
## ultra-Heywood case was detected. Examine the results carefully
## Warning in fa.stats(r = r, f = f, phi = phi, n.obs = n.obs, np.obs = np.obs, :
## The estimated weights for the factor scores are probably incorrect. Try a
## different factor score estimation method.
## Warning in fac(r = r, nfactors = nfactors, n.obs = n.obs, rotate = rotate, : An
## ultra-Heywood case was detected. Examine the results carefully
## Warning in fa.stats(r = r, f = f, phi = phi, n.obs = n.obs, np.obs = np.obs, :
## The estimated weights for the factor scores are probably incorrect. Try a
## different factor score estimation method.

## Warning in fa.stats(r = r, f = f, phi = phi, n.obs = n.obs, np.obs = np.obs, :
## The estimated weights for the factor scores are probably incorrect. Try a
## different factor score estimation method.
## Warning in fac(r = r, nfactors = nfactors, n.obs = n.obs, rotate = rotate, : An
## ultra-Heywood case was detected. Examine the results carefully
## Warning in fa.stats(r = r, f = f, phi = phi, n.obs = n.obs, np.obs = np.obs, :
## The estimated weights for the factor scores are probably incorrect. Try a
## different factor score estimation method.
## Warning in fac(r = r, nfactors = nfactors, n.obs = n.obs, rotate = rotate, : An
## ultra-Heywood case was detected. Examine the results carefully
## Warning in fa.stats(r = r, f = f, phi = phi, n.obs = n.obs, np.obs = np.obs, :
## The estimated weights for the factor scores are probably incorrect. Try a
## different factor score estimation method.
## Warning in fac(r = r, nfactors = nfactors, n.obs = n.obs, rotate = rotate, : An
## ultra-Heywood case was detected. Examine the results carefully
## Warning in fa.stats(r = r, f = f, phi = phi, n.obs = n.obs, np.obs = np.obs, :
## The estimated weights for the factor scores are probably incorrect. Try a
## different factor score estimation method.
## Warning in fac(r = r, nfactors = nfactors, n.obs = n.obs, rotate = rotate, : An
## ultra-Heywood case was detected. Examine the results carefully

## Parallel analysis suggests that the number of factors =  2  and the number of components =  1
fa.parallel(Shul68[, 11:14])
## Warning in fa.stats(r = r, f = f, phi = phi, n.obs = n.obs, np.obs = np.obs, :
## The estimated weights for the factor scores are probably incorrect. Try a
## different factor score estimation method.

## Warning in fa.stats(r = r, f = f, phi = phi, n.obs = n.obs, np.obs = np.obs, :
## An ultra-Heywood case was detected. Examine the results carefully
## Warning in fa.stats(r = r, f = f, phi = phi, n.obs = n.obs, np.obs = np.obs, :
## The estimated weights for the factor scores are probably incorrect. Try a
## different factor score estimation method.
## Warning in fac(r = r, nfactors = nfactors, n.obs = n.obs, rotate = rotate, : An
## ultra-Heywood case was detected. Examine the results carefully
## Warning in fa.stats(r = r, f = f, phi = phi, n.obs = n.obs, np.obs = np.obs, :
## The estimated weights for the factor scores are probably incorrect. Try a
## different factor score estimation method.
## Warning in fac(r = r, nfactors = nfactors, n.obs = n.obs, rotate = rotate, : An
## ultra-Heywood case was detected. Examine the results carefully
## Warning in fa.stats(r = r, f = f, phi = phi, n.obs = n.obs, np.obs = np.obs, :
## The estimated weights for the factor scores are probably incorrect. Try a
## different factor score estimation method.
## Warning in fac(r = r, nfactors = nfactors, n.obs = n.obs, rotate = rotate, : An
## ultra-Heywood case was detected. Examine the results carefully
## Warning in fa.stats(r = r, f = f, phi = phi, n.obs = n.obs, np.obs = np.obs, :
## The estimated weights for the factor scores are probably incorrect. Try a
## different factor score estimation method.
## Warning in fac(r = r, nfactors = nfactors, n.obs = n.obs, rotate = rotate, : An
## ultra-Heywood case was detected. Examine the results carefully
## Warning in fa.stats(r = r, f = f, phi = phi, n.obs = n.obs, np.obs = np.obs, :
## The estimated weights for the factor scores are probably incorrect. Try a
## different factor score estimation method.
## Warning in fac(r = r, nfactors = nfactors, n.obs = n.obs, rotate = rotate, : An
## ultra-Heywood case was detected. Examine the results carefully
## Warning in fa.stats(r = r, f = f, phi = phi, n.obs = n.obs, np.obs = np.obs, :
## The estimated weights for the factor scores are probably incorrect. Try a
## different factor score estimation method.
## Warning in fac(r = r, nfactors = nfactors, n.obs = n.obs, rotate = rotate, : An
## ultra-Heywood case was detected. Examine the results carefully
## Warning in fa.stats(r = r, f = f, phi = phi, n.obs = n.obs, np.obs = np.obs, :
## The estimated weights for the factor scores are probably incorrect. Try a
## different factor score estimation method.
## Warning in fac(r = r, nfactors = nfactors, n.obs = n.obs, rotate = rotate, : An
## ultra-Heywood case was detected. Examine the results carefully
## Warning in fa.stats(r = r, f = f, phi = phi, n.obs = n.obs, np.obs = np.obs, :
## The estimated weights for the factor scores are probably incorrect. Try a
## different factor score estimation method.
## Warning in fac(r = r, nfactors = nfactors, n.obs = n.obs, rotate = rotate, : An
## ultra-Heywood case was detected. Examine the results carefully
## Warning in fa.stats(r = r, f = f, phi = phi, n.obs = n.obs, np.obs = np.obs, :
## The estimated weights for the factor scores are probably incorrect. Try a
## different factor score estimation method.
## Warning in fac(r = r, nfactors = nfactors, n.obs = n.obs, rotate = rotate, : An
## ultra-Heywood case was detected. Examine the results carefully
## Warning in fa.stats(r = r, f = f, phi = phi, n.obs = n.obs, np.obs = np.obs, :
## The estimated weights for the factor scores are probably incorrect. Try a
## different factor score estimation method.
## Warning in fac(r = r, nfactors = nfactors, n.obs = n.obs, rotate = rotate, : An
## ultra-Heywood case was detected. Examine the results carefully

## Parallel analysis suggests that the number of factors =  0  and the number of components =  1
fas68 <- fa(Shul68[, 3:14], nfactors = 1)

faPsys68 <- fa(Shul68[, 3:10], nfactors = 1)

faPias68 <- fa(Shul68[, 11:14], nfactors = 1)

print(fas68$loadings)
## 
## Loadings:
##         MR1  
## ZVT1    0.659
## ZVT2    0.678
## ZVT3    0.860
## ZVT4    0.785
## RAVEN1  0.719
## RAVEN2  0.619
## RAVEN3  0.608
## RAVEN4  0.769
## PIAGET1 0.430
## PIAGET2 0.512
## PIAGET3 0.231
## PIAGET4 0.450
## 
##                  MR1
## SS loadings    4.815
## Proportion Var 0.401
print(faPsys68$loadings)
## 
## Loadings:
##        MR1  
## ZVT1   0.668
## ZVT2   0.690
## ZVT3   0.893
## ZVT4   0.805
## RAVEN1 0.734
## RAVEN2 0.608
## RAVEN3 0.542
## RAVEN4 0.778
## 
##                  MR1
## SS loadings    4.177
## Proportion Var 0.522
print(faPias68$loadings)
## 
## Loadings:
##         MR1  
## PIAGET1 0.702
## PIAGET2 0.703
## PIAGET3 0.304
## PIAGET4 0.331
## 
##                  MR1
## SS loadings    1.189
## Proportion Var 0.297
EFATOGS68 <- c(.659, .678, .860, .785, .719, .619, .608, .769, .430, .512, .231, .450)
EFATOGS68Psy <- c(.659, .678, .860, .785, .719, .619, .608, .769); EFATOGS68Pia <- c(.430, .512, .231, .450)
EFASEPS68 <- c(.668, .690, .893, .805, .734, .608, .542, .778, .702, .703, .304, .331)
EFASEPS68Psy <- c(.668, .690, .893, .805, .734, .608, .542, .778); EFASEPS68Pia <- c(.702, .703, .304, .331)

cor(EFATOGS68, EFASEPS68, method = "pearson"); cor(EFATOGS68, EFASEPS68, method = "spearman"); CONGO(EFATOGS68, EFASEPS68)
## [1] 0.8272534
## [1] 0.7972028
## [1] 0.9880394
cor(EFATOGS68Psy, EFASEPS68Psy, method = "pearson"); cor(EFATOGS68Psy, EFASEPS68Psy, method = "spearman"); CONGO(EFATOGS68Psy, EFASEPS68Psy)
## [1] 0.9838514
## [1] 1
## [1] 0.999246
cor(EFATOGS68Pia, EFASEPS68Pia, method = "pearson"); cor(EFATOGS68Pia, EFASEPS68Pia, method = "spearman"); CONGO(EFATOGS68Pia, EFASEPS68Pia)
## [1] 0.6556697
## [1] 0.8
## [1] 0.9636597
ShulModNo <- '
gRav =~ RAVEN1 + RAVEN2 + RAVEN3 + RAVEN4 
gZVT =~ ZVT1 + ZVT2 + ZVT3 + ZVT4
gPsy =~ gRav + gZVT
gPia =~ PIAGET1 + PIAGET2 + PIAGET3 + PIAGET4

gPsy ~~ 0*gPia'

ShulMod <- '
gRav =~ RAVEN1 + RAVEN2 + RAVEN3 + RAVEN4 
gZVT =~ ZVT1 + ZVT2 + ZVT3 + ZVT4
gPsy =~ gRav + gZVT
gPia =~ PIAGET1 + PIAGET2 + PIAGET3 + PIAGET4

gPsy ~~ gPia'

ShulModID <- '
gRav =~ RAVEN1 + RAVEN2 + RAVEN3 + RAVEN4 
gZVT =~ ZVT1 + ZVT2 + ZVT3 + ZVT4
gPsy =~ gRav + gZVT
gPia =~ PIAGET1 + PIAGET2 + PIAGET3 + PIAGET4

gPsy ~~ 1*gPia'

ShulNo.fit <- cfa(ShulModNo, data = Shul68, std.lv = T); "\n"
## Warning in lav_model_vcov(lavmodel = lavmodel, lavsamplestats = lavsamplestats, : lavaan WARNING:
##     Could not compute standard errors! The information matrix could
##     not be inverted. This may be a symptom that the model is not
##     identified.
## [1] "\n"
Shul.fit <- cfa(ShulMod, data = Shul68, std.lv = T)
ShulID.fit <- cfa(ShulModID, data = Shul68, std.lv = T)

round(cbind("No Relationship"   = fitMeasures(ShulNo.fit, FITM),
            "Free Relationship" = fitMeasures(Shul.fit, FITM),
            "Identical"         = fitMeasures(ShulID.fit, FITM)), 3)
##                No Relationship Free Relationship Identical
## chisq                   66.708            56.982    63.573
## df                      52.000            51.000    52.000
## npar                    26.000            27.000    26.000
## cfi                      0.916             0.966     0.934
## rmsea                    0.090             0.058     0.080
## rmsea.ci.lower           0.000             0.000     0.000
## rmsea.ci.upper           0.148             0.127     0.140
## aic                   1022.107          1014.381  1018.973
## bic                   1062.546          1056.376  1059.412
## srmr                     0.193             0.091     0.082
#cfaHB(Shul.fit) #does not converge

1 - pchisq(63.573 - 56.982, 1, lower.tail = T)
## [1] 0.01024956
resid(Shul.fit, "cor"); sum(abs(resid(Shul.fit, "cor")$cov > CRITR(35))); sum(abs(resid(Shul.fit, "cor")$cov > CRITR(35, NP(35))))
## $type
## [1] "cor.bollen"
## 
## $cov
##         RAVEN1 RAVEN2 RAVEN3 RAVEN4 ZVT1   ZVT2   ZVT3   ZVT4   PIAGET1 PIAGET2
## RAVEN1   0.000                                                                 
## RAVEN2   0.023  0.000                                                          
## RAVEN3  -0.129 -0.031  0.000                                                   
## RAVEN4  -0.006  0.009  0.067  0.000                                            
## ZVT1     0.096 -0.139 -0.014  0.003  0.000                                     
## ZVT2     0.053 -0.132 -0.149 -0.094  0.256  0.000                              
## ZVT3     0.074  0.019 -0.051  0.018 -0.055 -0.012  0.000                       
## ZVT4    -0.009 -0.067  0.121 -0.034 -0.003 -0.020  0.010  0.000                
## PIAGET1 -0.041  0.015  0.219 -0.070  0.020 -0.078 -0.053 -0.079  0.000         
## PIAGET2  0.055  0.032  0.114 -0.137  0.025  0.001  0.048 -0.030  0.016   0.000 
## PIAGET3 -0.029  0.018  0.117  0.193  0.048 -0.077  0.036 -0.019  0.020  -0.049 
## PIAGET4  0.183  0.142  0.297  0.198  0.133  0.155  0.100  0.126 -0.043  -0.046 
##         PIAGET3 PIAGET4
## RAVEN1                 
## RAVEN2                 
## RAVEN3                 
## RAVEN4                 
## ZVT1                   
## ZVT2                   
## ZVT3                   
## ZVT4                   
## PIAGET1                
## PIAGET2                
## PIAGET3  0.000         
## PIAGET4  0.114   0.000
## [1] 0
## [1] 0

There were no violations of local independence.

summary(Shul.fit, stand = T, fit = T)
## lavaan 0.6-12 ended normally after 40 iterations
## 
##   Estimator                                         ML
##   Optimization method                           NLMINB
##   Number of model parameters                        27
## 
##                                                   Used       Total
##   Number of observations                            35          40
## 
## Model Test User Model:
##                                                       
##   Test statistic                                56.982
##   Degrees of freedom                                51
##   P-value (Chi-square)                           0.262
## 
## Model Test Baseline Model:
## 
##   Test statistic                               240.449
##   Degrees of freedom                                66
##   P-value                                        0.000
## 
## User Model versus Baseline Model:
## 
##   Comparative Fit Index (CFI)                    0.966
##   Tucker-Lewis Index (TLI)                       0.956
## 
## Loglikelihood and Information Criteria:
## 
##   Loglikelihood user model (H0)               -480.191
##   Loglikelihood unrestricted model (H1)       -451.700
##                                                       
##   Akaike (AIC)                                1014.381
##   Bayesian (BIC)                              1056.376
##   Sample-size adjusted Bayesian (BIC)          972.069
## 
## Root Mean Square Error of Approximation:
## 
##   RMSEA                                          0.058
##   90 Percent confidence interval - lower         0.000
##   90 Percent confidence interval - upper         0.127
##   P-value RMSEA <= 0.05                          0.420
## 
## Standardized Root Mean Square Residual:
## 
##   SRMR                                           0.091
## 
## Parameter Estimates:
## 
##   Standard errors                             Standard
##   Information                                 Expected
##   Information saturated (h1) model          Structured
## 
## Latent Variables:
##                    Estimate  Std.Err  z-value  P(>|z|)   Std.lv  Std.all
##   gRav =~                                                               
##     RAVEN1            0.298    0.218    1.363    0.173    0.751    0.768
##     RAVEN2            0.285    0.211    1.354    0.176    0.720    0.722
##     RAVEN3            0.240    0.181    1.327    0.184    0.607    0.616
##     RAVEN4            0.335    0.245    1.368    0.171    0.845    0.857
##   gZVT =~                                                               
##     ZVT1              0.335    0.142    2.363    0.018    0.645    0.652
##     ZVT2              0.378    0.153    2.479    0.013    0.728    0.741
##     ZVT3              0.472    0.180    2.625    0.009    0.909    0.936
##     ZVT4              0.445    0.171    2.601    0.009    0.858    0.867
##   gPsy =~                                                               
##     gRav              2.318    1.983    1.169    0.242    0.918    0.918
##     gZVT              1.647    0.855    1.926    0.054    0.855    0.855
##   gPia =~                                                               
##     PIAGET1           0.716    0.173    4.125    0.000    0.716    0.731
##     PIAGET2           0.763    0.166    4.605    0.000    0.763    0.822
##     PIAGET3           0.123    0.164    0.751    0.453    0.123    0.142
##     PIAGET4           0.199    0.135    1.477    0.140    0.199    0.275
## 
## Covariances:
##                    Estimate  Std.Err  z-value  P(>|z|)   Std.lv  Std.all
##   gPsy ~~                                                               
##     gPia              0.628    0.153    4.105    0.000    0.628    0.628
## 
## Variances:
##                    Estimate  Std.Err  z-value  P(>|z|)   Std.lv  Std.all
##    .RAVEN1            0.393    0.118    3.336    0.001    0.393    0.411
##    .RAVEN2            0.477    0.134    3.549    0.000    0.477    0.479
##    .RAVEN3            0.603    0.157    3.832    0.000    0.603    0.621
##    .RAVEN4            0.258    0.101    2.554    0.011    0.258    0.265
##    .ZVT1              0.563    0.143    3.942    0.000    0.563    0.575
##    .ZVT2              0.435    0.115    3.774    0.000    0.435    0.450
##    .ZVT3              0.116    0.064    1.812    0.070    0.116    0.124
##    .ZVT4              0.242    0.080    3.038    0.002    0.242    0.248
##    .PIAGET1           0.447    0.179    2.502    0.012    0.447    0.466
##    .PIAGET2           0.279    0.175    1.596    0.111    0.279    0.324
##    .PIAGET3           0.736    0.177    4.161    0.000    0.736    0.980
##    .PIAGET4           0.482    0.118    4.092    0.000    0.482    0.924
##    .gRav              1.000                               0.157    0.157
##    .gZVT              1.000                               0.269    0.269
##     gPsy              1.000                               1.000    1.000
##     gPia              1.000                               1.000    1.000
semPaths(Shul.fit, "std", title = F, residuals = F, pastel = T, mar = c(2, 1, 3, 1), posCol = c("skyblue4"), layout = "circle", exoCov = T)

Shul (9-to-10-Year Olds); n = 41

fa.parallel(Shul910[, 3:14])

## Parallel analysis suggests that the number of factors =  3  and the number of components =  2
fa.parallel(Shul910[, 3:10])
## Warning in fa.stats(r = r, f = f, phi = phi, n.obs = n.obs, np.obs = np.obs, :
## The estimated weights for the factor scores are probably incorrect. Try a
## different factor score estimation method.
## Warning in fac(r = r, nfactors = nfactors, n.obs = n.obs, rotate = rotate, : An
## ultra-Heywood case was detected. Examine the results carefully
## Warning in fa.stats(r = r, f = f, phi = phi, n.obs = n.obs, np.obs = np.obs, :
## The estimated weights for the factor scores are probably incorrect. Try a
## different factor score estimation method.
## Warning in fac(r = r, nfactors = nfactors, n.obs = n.obs, rotate = rotate, : An
## ultra-Heywood case was detected. Examine the results carefully
## Warning in fa.stats(r = r, f = f, phi = phi, n.obs = n.obs, np.obs = np.obs, :
## The estimated weights for the factor scores are probably incorrect. Try a
## different factor score estimation method.

## Warning in fa.stats(r = r, f = f, phi = phi, n.obs = n.obs, np.obs = np.obs, :
## The estimated weights for the factor scores are probably incorrect. Try a
## different factor score estimation method.
## Warning in fac(r = r, nfactors = nfactors, n.obs = n.obs, rotate = rotate, : An
## ultra-Heywood case was detected. Examine the results carefully
## Warning in fa.stats(r = r, f = f, phi = phi, n.obs = n.obs, np.obs = np.obs, :
## The estimated weights for the factor scores are probably incorrect. Try a
## different factor score estimation method.
## Warning in fac(r = r, nfactors = nfactors, n.obs = n.obs, rotate = rotate, : An
## ultra-Heywood case was detected. Examine the results carefully

## Parallel analysis suggests that the number of factors =  2  and the number of components =  2
fa.parallel(Shul910[, 11:14])
## Warning in fa.stats(r = r, f = f, phi = phi, n.obs = n.obs, np.obs = np.obs, :
## The estimated weights for the factor scores are probably incorrect. Try a
## different factor score estimation method.

## Warning in fa.stats(r = r, f = f, phi = phi, n.obs = n.obs, np.obs = np.obs, :
## An ultra-Heywood case was detected. Examine the results carefully
## Warning in fa.stats(r = r, f = f, phi = phi, n.obs = n.obs, np.obs = np.obs, :
## The estimated weights for the factor scores are probably incorrect. Try a
## different factor score estimation method.
## Warning in fac(r = r, nfactors = nfactors, n.obs = n.obs, rotate = rotate, : An
## ultra-Heywood case was detected. Examine the results carefully
## Warning in fa.stats(r = r, f = f, phi = phi, n.obs = n.obs, np.obs = np.obs, :
## The estimated weights for the factor scores are probably incorrect. Try a
## different factor score estimation method.
## Warning in fac(r = r, nfactors = nfactors, n.obs = n.obs, rotate = rotate, : An
## ultra-Heywood case was detected. Examine the results carefully
## Warning in fa.stats(r = r, f = f, phi = phi, n.obs = n.obs, np.obs = np.obs, :
## The estimated weights for the factor scores are probably incorrect. Try a
## different factor score estimation method.
## Warning in fac(r = r, nfactors = nfactors, n.obs = n.obs, rotate = rotate, : An
## ultra-Heywood case was detected. Examine the results carefully
## Warning in fa.stats(r = r, f = f, phi = phi, n.obs = n.obs, np.obs = np.obs, :
## The estimated weights for the factor scores are probably incorrect. Try a
## different factor score estimation method.
## Warning in fac(r = r, nfactors = nfactors, n.obs = n.obs, rotate = rotate, : An
## ultra-Heywood case was detected. Examine the results carefully
## Warning in fa.stats(r = r, f = f, phi = phi, n.obs = n.obs, np.obs = np.obs, :
## The estimated weights for the factor scores are probably incorrect. Try a
## different factor score estimation method.
## Warning in fac(r = r, nfactors = nfactors, n.obs = n.obs, rotate = rotate, : An
## ultra-Heywood case was detected. Examine the results carefully
## Warning in fa.stats(r = r, f = f, phi = phi, n.obs = n.obs, np.obs = np.obs, :
## The estimated weights for the factor scores are probably incorrect. Try a
## different factor score estimation method.
## Warning in fac(r = r, nfactors = nfactors, n.obs = n.obs, rotate = rotate, : An
## ultra-Heywood case was detected. Examine the results carefully

## Parallel analysis suggests that the number of factors =  1  and the number of components =  1
fas9101 <- fa(Shul910[, 3:14], nfactors = 1)
fas9102 <- fa(Shul910[, 3:14], nfactors = 2)
fas9103 <- fa(Shul910[, 3:14], nfactors = 3)

faPsys9101 <- fa(Shul910[, 3:10], nfactors = 1)
faPsys9102 <- fa(Shul910[, 3:10], nfactors = 2)

faPias910 <- fa(Shul910[, 11:14], nfactors = 1)

print(fas9101$loadings)
## 
## Loadings:
##         MR1  
## ZVT1    0.614
## ZVT2    0.531
## ZVT3    0.639
## ZVT4    0.672
## RAVEN1  0.577
## RAVEN2  0.696
## RAVEN3  0.572
## RAVEN4  0.660
## PIAGET1 0.569
## PIAGET2 0.186
## PIAGET3 0.605
## PIAGET4 0.666
## 
##                  MR1
## SS loadings    4.266
## Proportion Var 0.355
print(fas9102$loadings)
## 
## Loadings:
##         MR1    MR2   
## ZVT1            0.900
## ZVT2            0.840
## ZVT3            0.799
## ZVT4     0.114  0.780
## RAVEN1   0.497  0.179
## RAVEN2   0.814       
## RAVEN3   0.665       
## RAVEN4   0.883       
## PIAGET1  0.511  0.149
## PIAGET2  0.362 -0.172
## PIAGET3  0.500  0.205
## PIAGET4  0.703       
## 
##                  MR1   MR2
## SS loadings    3.292 2.902
## Proportion Var 0.274 0.242
## Cumulative Var 0.274 0.516
print(fas9103$loadings)
## 
## Loadings:
##         MR2    MR1    MR3   
## ZVT1     0.879 -0.129  0.157
## ZVT2     0.827              
## ZVT3     0.785         0.158
## ZVT4     0.826  0.253 -0.181
## RAVEN1   0.216  0.595       
## RAVEN2          0.765  0.128
## RAVEN3          0.644       
## RAVEN4          0.963       
## PIAGET1  0.106         0.806
## PIAGET2 -0.220         0.557
## PIAGET3  0.200  0.256  0.379
## PIAGET4         0.312  0.632
## 
##                  MR2   MR1   MR3
## SS loadings    2.911 2.529 1.620
## Proportion Var 0.243 0.211 0.135
## Cumulative Var 0.243 0.453 0.588
print(faPsys9101$loadings)
## 
## Loadings:
##        MR1  
## ZVT1   0.704
## ZVT2   0.667
## ZVT3   0.705
## ZVT4   0.819
## RAVEN1 0.582
## RAVEN2 0.590
## RAVEN3 0.491
## RAVEN4 0.550
## 
##                  MR1
## SS loadings    3.340
## Proportion Var 0.417
print(faPsys9102$loadings)
## 
## Loadings:
##        MR1    MR2   
## ZVT1    0.908       
## ZVT2    0.833       
## ZVT3    0.811       
## ZVT4    0.759  0.191
## RAVEN1  0.214  0.548
## RAVEN2         0.797
## RAVEN3         0.684
## RAVEN4         0.986
## 
##                  MR1   MR2
## SS loadings    2.809 2.418
## Proportion Var 0.351 0.302
## Cumulative Var 0.351 0.653
print(faPias910$loadings)
## 
## Loadings:
##         MR1  
## PIAGET1 0.809
## PIAGET2 0.457
## PIAGET3 0.599
## PIAGET4 0.798
## 
##                  MR1
## SS loadings    1.860
## Proportion Var 0.465
EFATOGS910 <- c(.614, .531, .639, .672, .577, .696, .572, .660, .569, .186, .605, .666)
EFATOGS910Psy <- c(.614, .531, .639, .672, .577, .696, .572, .660); EFATOGS910Pia <- c(.569, .186, .605, .666)
EFASEPS910 <- c(.704, .667, .705, .719, .582, .590, .491, .550, .809, .457, .599, .798)
EFASEPS910Psy <- c(.704, .667, .705, .719, .582, .590, .491, .550); EFASEPS910Pia <- c(.809, .457, .599, .798)

cor(EFATOGS910, EFASEPS910, method = "pearson"); cor(EFATOGS910, EFASEPS910, method = "spearman"); CONGO(EFATOGS910, EFASEPS910)
## [1] 0.4982116
## [1] 0.3006993
## [1] 0.9806953
cor(EFATOGS910Psy, EFASEPS910Psy, method = "pearson"); cor(EFATOGS910Psy, EFASEPS910Psy, method = "spearman"); CONGO(EFATOGS910Psy, EFASEPS910Psy)
## [1] 0.1435842
## [1] 0.3095238
## [1] 0.9900537
cor(EFATOGS910Pia, EFASEPS910Pia, method = "pearson"); cor(EFATOGS910Pia, EFASEPS910Pia, method = "spearman"); CONGO(EFATOGS910Pia, EFASEPS910Pia)
## [1] 0.8182909
## [1] 0.4
## [1] 0.9767309
ShulNo.fit <- cfa(ShulModNo, data = Shul910, std.lv = T); "\n"
## Warning in lav_model_vcov(lavmodel = lavmodel, lavsamplestats = lavsamplestats, : lavaan WARNING:
##     Could not compute standard errors! The information matrix could
##     not be inverted. This may be a symptom that the model is not
##     identified.
## [1] "\n"
Shul.fit <- cfa(ShulMod, data = Shul910, std.lv = T)
ShulID.fit <- cfa(ShulModID, data = Shul910, std.lv = T)

round(cbind("No Relationship"   = fitMeasures(ShulNo.fit, FITM),
            "Free Relationship" = fitMeasures(Shul.fit, FITM),
            "Identical"         = fitMeasures(ShulID.fit, FITM)), 3)
##                No Relationship Free Relationship Identical
## chisq                   90.574            73.413    73.433
## df                      52.000            51.000    52.000
## npar                    26.000            27.000    26.000
## cfi                      0.846             0.911     0.915
## rmsea                    0.135             0.104     0.100
## rmsea.ci.lower           0.087             0.041     0.036
## rmsea.ci.upper           0.180             0.153     0.150
## aic                   1209.521          1194.360  1192.380
## bic                   1254.073          1240.626  1236.932
## srmr                     0.205             0.095     0.096
#cfaHB(Shul.fit) #does not converge

1 - pchisq(73.433 - 73.413, 1)
## [1] 0.8875371
resid(Shul.fit, "cor"); sum(abs(resid(Shul.fit, "cor")$cov > CRITR(41))); sum(abs(resid(Shul.fit, "cor")$cov > CRITR(41, NP(41))))
## $type
## [1] "cor.bollen"
## 
## $cov
##         RAVEN1 RAVEN2 RAVEN3 RAVEN4 ZVT1   ZVT2   ZVT3   ZVT4   PIAGET1 PIAGET2
## RAVEN1   0.000                                                                 
## RAVEN2   0.055  0.000                                                          
## RAVEN3  -0.054 -0.058  0.000                                                   
## RAVEN4  -0.012 -0.004  0.035  0.000                                            
## ZVT1     0.098 -0.035  0.015 -0.096  0.000                                     
## ZVT2     0.145  0.029 -0.113 -0.057  0.030  0.000                              
## ZVT3     0.169  0.117  0.041 -0.119 -0.003 -0.051  0.000                       
## ZVT4     0.280  0.108  0.181  0.151 -0.035  0.014  0.049  0.000                
## PIAGET1 -0.202 -0.047  0.040 -0.093  0.110 -0.079  0.094  0.004  0.000         
## PIAGET2 -0.035 -0.047 -0.144 -0.054 -0.150 -0.170 -0.153 -0.329  0.106   0.000 
## PIAGET3  0.095  0.034  0.041  0.042  0.070  0.061  0.114  0.178 -0.010   0.004 
## PIAGET4  0.025  0.125  0.005  0.001 -0.027 -0.148  0.116 -0.059  0.005  -0.009 
##         PIAGET3 PIAGET4
## RAVEN1                 
## RAVEN2                 
## RAVEN3                 
## RAVEN4                 
## ZVT1                   
## ZVT2                   
## ZVT3                   
## ZVT4                   
## PIAGET1                
## PIAGET2                
## PIAGET3  0.000         
## PIAGET4 -0.026   0.000
## [1] 0
## [1] 0

There were no violations of local independence.

summary(Shul.fit, stand = T, fit = T)
## lavaan 0.6-12 ended normally after 29 iterations
## 
##   Estimator                                         ML
##   Optimization method                           NLMINB
##   Number of model parameters                        27
## 
##   Number of observations                            41
## 
## Model Test User Model:
##                                                       
##   Test statistic                                73.413
##   Degrees of freedom                                51
##   P-value (Chi-square)                           0.022
## 
## Model Test Baseline Model:
## 
##   Test statistic                               317.140
##   Degrees of freedom                                66
##   P-value                                        0.000
## 
## User Model versus Baseline Model:
## 
##   Comparative Fit Index (CFI)                    0.911
##   Tucker-Lewis Index (TLI)                       0.885
## 
## Loglikelihood and Information Criteria:
## 
##   Loglikelihood user model (H0)               -570.180
##   Loglikelihood unrestricted model (H1)       -533.473
##                                                       
##   Akaike (AIC)                                1194.360
##   Bayesian (BIC)                              1240.626
##   Sample-size adjusted Bayesian (BIC)         1156.105
## 
## Root Mean Square Error of Approximation:
## 
##   RMSEA                                          0.104
##   90 Percent confidence interval - lower         0.041
##   90 Percent confidence interval - upper         0.153
##   P-value RMSEA <= 0.05                          0.069
## 
## Standardized Root Mean Square Residual:
## 
##   SRMR                                           0.095
## 
## Parameter Estimates:
## 
##   Standard errors                             Standard
##   Information                                 Expected
##   Information saturated (h1) model          Structured
## 
## Latent Variables:
##                    Estimate  Std.Err  z-value  P(>|z|)   Std.lv  Std.all
##   gRav =~                                                               
##     RAVEN1            0.450    0.140    3.213    0.001    0.600    0.608
##     RAVEN2            0.627    0.156    4.013    0.000    0.837    0.847
##     RAVEN3            0.530    0.147    3.612    0.000    0.707    0.716
##     RAVEN4            0.682    0.164    4.156    0.000    0.909    0.921
##   gZVT =~                                                               
##     ZVT1              0.803    0.122    6.577    0.000    0.887    0.898
##     ZVT2              0.736    0.126    5.858    0.000    0.813    0.823
##     ZVT3              0.728    0.126    5.772    0.000    0.803    0.813
##     ZVT4              0.708    0.127    5.566    0.000    0.781    0.791
##   gPsy =~                                                               
##     gRav              0.883    0.447    1.976    0.048    0.662    0.662
##     gZVT              0.468    0.230    2.036    0.042    0.424    0.424
##   gPia =~                                                               
##     PIAGET1           0.740    0.143    5.160    0.000    0.740    0.749
##     PIAGET2           0.401    0.161    2.495    0.013    0.401    0.405
##     PIAGET3           0.623    0.150    4.157    0.000    0.623    0.631
##     PIAGET4           0.837    0.138    6.056    0.000    0.837    0.848
## 
## Covariances:
##                    Estimate  Std.Err  z-value  P(>|z|)   Std.lv  Std.all
##   gPsy ~~                                                               
##     gPia              0.964    0.239    4.035    0.000    0.964    0.964
## 
## Variances:
##                    Estimate  Std.Err  z-value  P(>|z|)   Std.lv  Std.all
##    .RAVEN1            0.616    0.145    4.258    0.000    0.616    0.631
##    .RAVEN2            0.275    0.088    3.133    0.002    0.275    0.282
##    .RAVEN3            0.476    0.118    4.026    0.000    0.476    0.488
##    .RAVEN4            0.149    0.079    1.885    0.059    0.149    0.153
##    .ZVT1              0.188    0.074    2.547    0.011    0.188    0.193
##    .ZVT2              0.315    0.090    3.520    0.000    0.315    0.323
##    .ZVT3              0.330    0.092    3.592    0.000    0.330    0.338
##    .ZVT4              0.365    0.098    3.735    0.000    0.365    0.374
##    .PIAGET1           0.428    0.128    3.332    0.001    0.428    0.439
##    .PIAGET2           0.815    0.187    4.362    0.000    0.815    0.836
##    .PIAGET3           0.587    0.149    3.947    0.000    0.587    0.602
##    .PIAGET4           0.274    0.121    2.274    0.023    0.274    0.281
##    .gRav              1.000                               0.562    0.562
##    .gZVT              1.000                               0.820    0.820
##     gPsy              1.000                               1.000    1.000
##     gPia              1.000                               1.000    1.000
semPaths(Shul.fit, "std", title = F, residuals = F, pastel = T, mar = c(2, 1, 3, 1), posCol = c("skyblue4"), layout = "circle", exoCov = T)

6-to-8 versus 9-to-10-Year Olds

ShulAC.fit <- cfa(ShulMod, data = ShulAll, std.lv = T, group = "STUDIE", orthogonal = T)

ShulAM.fit <- cfa(ShulMod, data = ShulAll, std.lv = F, group = "STUDIE", orthogonal = T, group.equal = "loadings"); "\n" #Multigroup sampling error abounds in small multigroup models
## Warning in lav_object_post_check(object): lavaan WARNING: some estimated lv
## variances are negative
## [1] "\n"
ShulAS.fit <- cfa(ShulMod, data = ShulAll, std.lv = F, group = "STUDIE", orthogonal = T, group.equal = c("loadings", "intercepts")); "\n"
## Warning in lav_model_vcov(lavmodel = lavmodel, lavsamplestats = lavsamplestats, : lavaan WARNING:
##     The variance-covariance matrix of the estimated parameters (vcov)
##     does not appear to be positive definite! The smallest eigenvalue
##     (= 3.988161e-16) is close to zero. This may be a symptom that the
##     model is not identified.

## Warning in lav_model_vcov(lavmodel = lavmodel, lavsamplestats = lavsamplestats, : lavaan WARNING: some estimated lv variances are negative
## [1] "\n"
ShulAF.fit <- cfa(ShulMod, data = ShulAll, std.lv = F, group = "STUDIE", orthogonal = T, group.equal = c("loadings", "intercepts", "residuals")); "\n"
## Warning in lav_model_vcov(lavmodel = lavmodel, lavsamplestats = lavsamplestats, : lavaan WARNING:
##     The variance-covariance matrix of the estimated parameters (vcov)
##     does not appear to be positive definite! The smallest eigenvalue
##     (= 9.154988e-16) is close to zero. This may be a symptom that the
##     model is not identified.
## [1] "\n"
ShulAV.fit <- cfa(ShulMod, data = ShulAll, std.lv = T, group = "STUDIE", orthogonal = T, group.equal = c("loadings", "intercepts", "residuals", "lv.covariances"))

ShulAME.fit <- cfa(ShulMod, data = ShulAll, std.lv = T, group = "STUDIE", orthogonal = T, group.equal = c("loadings", "intercepts", "residuals", "lv.covariances", "means"))

round(cbind(CONFIGURAL = fitMeasures(ShulAC.fit, FITM),
            METRIC = fitMeasures(ShulAM.fit, FITM),
            SCALAR = fitMeasures(ShulAS.fit, FITM),
            STRICT = fitMeasures(ShulAF.fit, FITM),
            LVARS = fitMeasures(ShulAV.fit, FITM),
            MEANS = fitMeasures(ShulAME.fit, FITM)),3)
##                CONFIGURAL   METRIC   SCALAR   STRICT    LVARS    MEANS
## chisq             130.395  145.830  146.428  158.626  159.147  159.625
## df                102.000  112.000  120.000  132.000  133.000  137.000
## npar               78.000   68.000   60.000   48.000   47.000   43.000
## cfi                 0.933    0.921    0.938    0.937    0.939    0.947
## rmsea               0.086    0.089    0.076    0.073    0.072    0.066
## rmsea.ci.lower      0.028    0.040    0.000    0.000    0.000    0.000
## rmsea.ci.upper      0.126    0.127    0.116    0.112    0.111    0.106
## aic              2256.741 2252.176 2236.774 2224.972 2223.493 2215.971
## bic              2438.538 2410.666 2376.618 2336.847 2333.037 2316.192
## srmr                0.087    0.109    0.109    0.107    0.112    0.112
1 - pchisq(145.830 - 130.395, 10, lower.tail = T)
## [1] 0.116989
1 - pchisq(158.626 - 146.428, 12, lower.tail = T)
## [1] 0.429912
summary(ShulAME.fit, stand = T, fit = T)
## lavaan 0.6-12 ended normally after 91 iterations
## 
##   Estimator                                         ML
##   Optimization method                           NLMINB
##   Number of model parameters                        82
##   Number of equality constraints                    39
## 
##   Number of observations per group:               Used       Total
##     2Schul68                                        35          40
##     3Schul910                                       41          41
## 
## Model Test User Model:
##                                                       
##   Test statistic                               159.625
##   Degrees of freedom                               137
##   P-value (Chi-square)                           0.090
##   Test statistic for each group:
##     2Schul68                                    75.901
##     3Schul910                                   83.724
## 
## Model Test Baseline Model:
## 
##   Test statistic                               557.589
##   Degrees of freedom                               132
##   P-value                                        0.000
## 
## User Model versus Baseline Model:
## 
##   Comparative Fit Index (CFI)                    0.947
##   Tucker-Lewis Index (TLI)                       0.949
## 
## Loglikelihood and Information Criteria:
## 
##   Loglikelihood user model (H0)              -1064.985
##   Loglikelihood unrestricted model (H1)       -985.173
##                                                       
##   Akaike (AIC)                                2215.971
##   Bayesian (BIC)                              2316.192
##   Sample-size adjusted Bayesian (BIC)         2180.653
## 
## Root Mean Square Error of Approximation:
## 
##   RMSEA                                          0.066
##   90 Percent confidence interval - lower         0.000
##   90 Percent confidence interval - upper         0.106
##   P-value RMSEA <= 0.05                          0.287
## 
## Standardized Root Mean Square Residual:
## 
##   SRMR                                           0.112
## 
## Parameter Estimates:
## 
##   Standard errors                             Standard
##   Information                                 Expected
##   Information saturated (h1) model          Structured
## 
## 
## Group 1 [2Schul68]:
## 
## Latent Variables:
##                    Estimate  Std.Err  z-value  P(>|z|)   Std.lv  Std.all
##   gRav =~                                                               
##     RAVEN1  (.p1.)    0.128    0.383    0.335    0.738    0.684    0.685
##     RAVEN2  (.p2.)    0.151    0.450    0.335    0.737    0.803    0.793
##     RAVEN3  (.p3.)    0.132    0.393    0.335    0.738    0.701    0.699
##     RAVEN4  (.p4.)    0.172    0.514    0.335    0.738    0.916    0.904
##   gZVT =~                                                               
##     ZVT1    (.p5.)    0.522    0.117    4.483    0.000    0.796    0.800
##     ZVT2    (.p6.)    0.520    0.116    4.479    0.000    0.792    0.799
##     ZVT3    (.p7.)    0.558    0.121    4.619    0.000    0.850    0.862
##     ZVT4    (.p8.)    0.546    0.119    4.567    0.000    0.832    0.836
##   gPsy =~                                                               
##     gRav    (.p9.)    5.231   16.168    0.324    0.746    0.982    0.982
##     gZVT    (.10.)    1.150    0.375    3.067    0.002    0.755    0.755
##   gPia =~                                                               
##     PIAGET1 (.11.)    0.492    0.110    4.459    0.000    0.492    0.552
##     PIAGET2 (.12.)    0.338    0.102    3.326    0.001    0.338    0.369
##     PIAGET3 (.13.)    0.372    0.100    3.707    0.000    0.372    0.423
##     PIAGET4 (.14.)    0.488    0.103    4.750    0.000    0.488    0.629
## 
## Covariances:
##                    Estimate  Std.Err  z-value  P(>|z|)   Std.lv  Std.all
##   gPsy ~~                                                               
##     gPia    (.15.)    0.819    0.143    5.729    0.000    0.819    0.819
## 
## Intercepts:
##                    Estimate  Std.Err  z-value  P(>|z|)   Std.lv  Std.all
##    .RAVEN1  (.32.)    0.016    0.111    0.142    0.887    0.016    0.016
##    .RAVEN2  (.33.)    0.005    0.112    0.045    0.964    0.005    0.005
##    .RAVEN3  (.34.)    0.000    0.112    0.000    1.000    0.000    0.000
##    .RAVEN4  (.35.)    0.000    0.110    0.001    1.000    0.000    0.000
##    .ZVT1    (.36.)    0.031    0.111    0.281    0.779    0.031    0.031
##    .ZVT2    (.37.)    0.021    0.111    0.191    0.849    0.021    0.021
##    .ZVT3    (.38.)    0.040    0.110    0.368    0.713    0.040    0.041
##    .ZVT4    (.39.)    0.015    0.111    0.133    0.894    0.015    0.015
##    .PIAGET1 (.40.)    0.020    0.109    0.181    0.857    0.020    0.022
##    .PIAGET2 (.41.)    0.053    0.108    0.491    0.623    0.053    0.058
##    .PIAGET3 (.42.)    0.063    0.105    0.605    0.545    0.063    0.072
##    .PIAGET4 (.43.)    0.085    0.096    0.884    0.377    0.085    0.110
##    .gRav              0.000                               0.000    0.000
##    .gZVT              0.000                               0.000    0.000
##     gPsy              0.000                               0.000    0.000
##     gPia              0.000                               0.000    0.000
## 
## Variances:
##                    Estimate  Std.Err  z-value  P(>|z|)   Std.lv  Std.all
##    .RAVEN1  (.16.)    0.530    0.096    5.520    0.000    0.530    0.531
##    .RAVEN2  (.17.)    0.382    0.079    4.856    0.000    0.382    0.372
##    .RAVEN3  (.18.)    0.514    0.094    5.463    0.000    0.514    0.511
##    .RAVEN4  (.19.)    0.188    0.064    2.933    0.003    0.188    0.183
##    .ZVT1    (.20.)    0.356    0.072    4.938    0.000    0.356    0.360
##    .ZVT2    (.21.)    0.356    0.072    4.952    0.000    0.356    0.362
##    .ZVT3    (.22.)    0.251    0.060    4.165    0.000    0.251    0.258
##    .ZVT4    (.23.)    0.297    0.065    4.548    0.000    0.297    0.301
##    .PIAGET1 (.24.)    0.553    0.115    4.830    0.000    0.553    0.695
##    .PIAGET2 (.25.)    0.729    0.128    5.699    0.000    0.729    0.864
##    .PIAGET3 (.26.)    0.638    0.116    5.518    0.000    0.638    0.821
##    .PIAGET4 (.27.)    0.364    0.088    4.152    0.000    0.364    0.605
##    .gRav              1.000                               0.035    0.035
##    .gZVT              1.000                               0.431    0.431
##     gPsy              1.000                               1.000    1.000
##     gPia              1.000                               1.000    1.000
## 
## 
## Group 2 [3Schul910]:
## 
## Latent Variables:
##                    Estimate  Std.Err  z-value  P(>|z|)   Std.lv  Std.all
##   gRav =~                                                               
##     RAVEN1  (.p1.)    0.128    0.383    0.335    0.738    0.642    0.661
##     RAVEN2  (.p2.)    0.151    0.450    0.335    0.737    0.754    0.773
##     RAVEN3  (.p3.)    0.132    0.393    0.335    0.738    0.658    0.676
##     RAVEN4  (.p4.)    0.172    0.514    0.335    0.738    0.860    0.893
##   gZVT =~                                                               
##     ZVT1    (.p5.)    0.522    0.117    4.483    0.000    0.795    0.800
##     ZVT2    (.p6.)    0.520    0.116    4.479    0.000    0.791    0.798
##     ZVT3    (.p7.)    0.558    0.121    4.619    0.000    0.850    0.861
##     ZVT4    (.p8.)    0.546    0.119    4.567    0.000    0.831    0.836
##   gPsy =~                                                               
##     gRav    (.p9.)    5.231   16.168    0.324    0.746    0.650    0.650
##     gZVT    (.10.)    1.150    0.375    3.067    0.002    0.469    0.469
##   gPia =~                                                               
##     PIAGET1 (.11.)    0.492    0.110    4.459    0.000    0.736    0.703
##     PIAGET2 (.12.)    0.338    0.102    3.326    0.001    0.506    0.510
##     PIAGET3 (.13.)    0.372    0.100    3.707    0.000    0.556    0.572
##     PIAGET4 (.14.)    0.488    0.103    4.750    0.000    0.729    0.770
## 
## Covariances:
##                    Estimate  Std.Err  z-value  P(>|z|)   Std.lv  Std.all
##   gPsy ~~                                                               
##     gPia    (.15.)    0.819    0.143    5.729    0.000    0.882    0.882
## 
## Intercepts:
##                    Estimate  Std.Err  z-value  P(>|z|)   Std.lv  Std.all
##    .RAVEN1  (.32.)    0.016    0.111    0.142    0.887    0.016    0.016
##    .RAVEN2  (.33.)    0.005    0.112    0.045    0.964    0.005    0.005
##    .RAVEN3  (.34.)    0.000    0.112    0.000    1.000    0.000    0.000
##    .RAVEN4  (.35.)    0.000    0.110    0.001    1.000    0.000    0.000
##    .ZVT1    (.36.)    0.031    0.111    0.281    0.779    0.031    0.031
##    .ZVT2    (.37.)    0.021    0.111    0.191    0.849    0.021    0.021
##    .ZVT3    (.38.)    0.040    0.110    0.368    0.713    0.040    0.041
##    .ZVT4    (.39.)    0.015    0.111    0.133    0.894    0.015    0.015
##    .PIAGET1 (.40.)    0.020    0.109    0.181    0.857    0.020    0.019
##    .PIAGET2 (.41.)    0.053    0.108    0.491    0.623    0.053    0.054
##    .PIAGET3 (.42.)    0.063    0.105    0.605    0.545    0.063    0.065
##    .PIAGET4 (.43.)    0.085    0.096    0.884    0.377    0.085    0.090
##    .gRav              0.000                               0.000    0.000
##    .gZVT              0.000                               0.000    0.000
##     gPsy              0.000                               0.000    0.000
##     gPia              0.000                               0.000    0.000
## 
## Variances:
##                    Estimate  Std.Err  z-value  P(>|z|)   Std.lv  Std.all
##    .RAVEN1  (.16.)    0.530    0.096    5.520    0.000    0.530    0.563
##    .RAVEN2  (.17.)    0.382    0.079    4.856    0.000    0.382    0.402
##    .RAVEN3  (.18.)    0.514    0.094    5.463    0.000    0.514    0.543
##    .RAVEN4  (.19.)    0.188    0.064    2.933    0.003    0.188    0.203
##    .ZVT1    (.20.)    0.356    0.072    4.938    0.000    0.356    0.360
##    .ZVT2    (.21.)    0.356    0.072    4.952    0.000    0.356    0.362
##    .ZVT3    (.22.)    0.251    0.060    4.165    0.000    0.251    0.258
##    .ZVT4    (.23.)    0.297    0.065    4.548    0.000    0.297    0.301
##    .PIAGET1 (.24.)    0.553    0.115    4.830    0.000    0.553    0.505
##    .PIAGET2 (.25.)    0.729    0.128    5.699    0.000    0.729    0.740
##    .PIAGET3 (.26.)    0.638    0.116    5.518    0.000    0.638    0.673
##    .PIAGET4 (.27.)    0.364    0.088    4.152    0.000    0.364    0.407
##    .gRav             14.424   85.560    0.169    0.866    0.577    0.577
##    .gZVT              1.808    0.891    2.028    0.043    0.780    0.780
##     gPsy              0.386    0.211    1.828    0.068    1.000    1.000
##     gPia              2.233    0.817    2.732    0.006    1.000    1.000

Combined Groups

fa.parallel(ShulAll[, 3:14])
## Warning in fa.stats(r = r, f = f, phi = phi, n.obs = n.obs, np.obs = np.obs, :
## The estimated weights for the factor scores are probably incorrect. Try a
## different factor score estimation method.

## Warning in fa.stats(r = r, f = f, phi = phi, n.obs = n.obs, np.obs = np.obs, :
## The estimated weights for the factor scores are probably incorrect. Try a
## different factor score estimation method.
## Warning in fac(r = r, nfactors = nfactors, n.obs = n.obs, rotate = rotate, : An
## ultra-Heywood case was detected. Examine the results carefully

## Parallel analysis suggests that the number of factors =  2  and the number of components =  2
fa.parallel(ShulAll[, 3:10])
## Warning in fa.stats(r = r, f = f, phi = phi, n.obs = n.obs, np.obs = np.obs, :
## The estimated weights for the factor scores are probably incorrect. Try a
## different factor score estimation method.

## Warning in fa.stats(r = r, f = f, phi = phi, n.obs = n.obs, np.obs = np.obs, :
## An ultra-Heywood case was detected. Examine the results carefully
## Warning in fa.stats(r = r, f = f, phi = phi, n.obs = n.obs, np.obs = np.obs, :
## The estimated weights for the factor scores are probably incorrect. Try a
## different factor score estimation method.
## Warning in fac(r = r, nfactors = nfactors, n.obs = n.obs, rotate = rotate, : An
## ultra-Heywood case was detected. Examine the results carefully
## Warning in fa.stats(r = r, f = f, phi = phi, n.obs = n.obs, np.obs = np.obs, :
## The estimated weights for the factor scores are probably incorrect. Try a
## different factor score estimation method.
## Warning in fac(r = r, nfactors = nfactors, n.obs = n.obs, rotate = rotate, : An
## ultra-Heywood case was detected. Examine the results carefully
## Warning in fa.stats(r = r, f = f, phi = phi, n.obs = n.obs, np.obs = np.obs, :
## The estimated weights for the factor scores are probably incorrect. Try a
## different factor score estimation method.
## Warning in fac(r = r, nfactors = nfactors, n.obs = n.obs, rotate = rotate, : An
## ultra-Heywood case was detected. Examine the results carefully
## Warning in fa.stats(r = r, f = f, phi = phi, n.obs = n.obs, np.obs = np.obs, :
## The estimated weights for the factor scores are probably incorrect. Try a
## different factor score estimation method.
## Warning in fac(r = r, nfactors = nfactors, n.obs = n.obs, rotate = rotate, : An
## ultra-Heywood case was detected. Examine the results carefully
## Warning in fa.stats(r = r, f = f, phi = phi, n.obs = n.obs, np.obs = np.obs, :
## The estimated weights for the factor scores are probably incorrect. Try a
## different factor score estimation method.
## Warning in fac(r = r, nfactors = nfactors, n.obs = n.obs, rotate = rotate, : An
## ultra-Heywood case was detected. Examine the results carefully
## Warning in fa.stats(r = r, f = f, phi = phi, n.obs = n.obs, np.obs = np.obs, :
## The estimated weights for the factor scores are probably incorrect. Try a
## different factor score estimation method.
## Warning in fac(r = r, nfactors = nfactors, n.obs = n.obs, rotate = rotate, : An
## ultra-Heywood case was detected. Examine the results carefully
## Warning in fa.stats(r = r, f = f, phi = phi, n.obs = n.obs, np.obs = np.obs, :
## The estimated weights for the factor scores are probably incorrect. Try a
## different factor score estimation method.
## Warning in fac(r = r, nfactors = nfactors, n.obs = n.obs, rotate = rotate, : An
## ultra-Heywood case was detected. Examine the results carefully
## Warning in fa.stats(r = r, f = f, phi = phi, n.obs = n.obs, np.obs = np.obs, :
## The estimated weights for the factor scores are probably incorrect. Try a
## different factor score estimation method.

## Parallel analysis suggests that the number of factors =  2  and the number of components =  2
fa.parallel(ShulAll[, 11:14])
## Warning in fa.stats(r = r, f = f, phi = phi, n.obs = n.obs, np.obs = np.obs, :
## The estimated weights for the factor scores are probably incorrect. Try a
## different factor score estimation method.
## Warning in fac(r = r, nfactors = nfactors, n.obs = n.obs, rotate = rotate, : An
## ultra-Heywood case was detected. Examine the results carefully
## Warning in fa.stats(r = r, f = f, phi = phi, n.obs = n.obs, np.obs = np.obs, :
## The estimated weights for the factor scores are probably incorrect. Try a
## different factor score estimation method.
## Warning in fac(r = r, nfactors = nfactors, n.obs = n.obs, rotate = rotate, : An
## ultra-Heywood case was detected. Examine the results carefully
## Warning in fa.stats(r = r, f = f, phi = phi, n.obs = n.obs, np.obs = np.obs, :
## The estimated weights for the factor scores are probably incorrect. Try a
## different factor score estimation method.
## Warning in fac(r = r, nfactors = nfactors, n.obs = n.obs, rotate = rotate, : An
## ultra-Heywood case was detected. Examine the results carefully
## Warning in fa.stats(r = r, f = f, phi = phi, n.obs = n.obs, np.obs = np.obs, :
## The estimated weights for the factor scores are probably incorrect. Try a
## different factor score estimation method.
## Warning in fac(r = r, nfactors = nfactors, n.obs = n.obs, rotate = rotate, : An
## ultra-Heywood case was detected. Examine the results carefully

## Parallel analysis suggests that the number of factors =  2  and the number of components =  1
fasAll1 <- fa(ShulAll[, 3:14], nfactors = 1)
fasAll2 <- fa(ShulAll[, 3:14], nfactors = 2)
fasAll3 <- fa(ShulAll[, 3:14], nfactors = 3)

faPsysAll1 <- fa(ShulAll[, 3:10], nfactors = 1)
faPsysAll2 <- fa(ShulAll[, 3:10], nfactors = 2)

faPiasAll1 <- fa(ShulAll[, 11:14], nfactors = 1)
faPiasAll2 <- fa(ShulAll[, 11:14], nfactors = 2)

print(fasAll1$loadings)
## 
## Loadings:
##         MR1  
## ZVT1    0.644
## ZVT2    0.609
## ZVT3    0.742
## ZVT4    0.731
## RAVEN1  0.643
## RAVEN2  0.657
## RAVEN3  0.587
## RAVEN4  0.709
## PIAGET1 0.490
## PIAGET2 0.323
## PIAGET3 0.426
## PIAGET4 0.564
## 
##                  MR1
## SS loadings    4.407
## Proportion Var 0.367
print(fasAll2$loadings)
## 
## Loadings:
##         MR1    MR2   
## ZVT1            0.847
## ZVT2    -0.115  0.905
## ZVT3     0.149  0.749
## ZVT4     0.158  0.724
## RAVEN1   0.488  0.244
## RAVEN2   0.767       
## RAVEN3   0.708       
## RAVEN4   0.841       
## PIAGET1  0.502       
## PIAGET2  0.400       
## PIAGET3  0.395       
## PIAGET4  0.579       
## 
##                  MR1   MR2
## SS loadings    3.001 2.698
## Proportion Var 0.250 0.225
## Cumulative Var 0.250 0.475
print(fasAll3$loadings)
## 
## Loadings:
##         MR2    MR1    MR3   
## ZVT1     0.852 -0.110  0.116
## ZVT2     0.888              
## ZVT3     0.747  0.131       
## ZVT4     0.728  0.228       
## RAVEN1   0.252  0.595 -0.101
## RAVEN2          0.762       
## RAVEN3          0.576  0.209
## RAVEN4          0.924       
## PIAGET1                0.849
## PIAGET2                0.488
## PIAGET3  0.101  0.194  0.298
## PIAGET4         0.332  0.369
## 
##                  MR2   MR1   MR3
## SS loadings    2.685 2.362 1.265
## Proportion Var 0.224 0.197 0.105
## Cumulative Var 0.224 0.421 0.526
print(faPsysAll1$loadings)
## 
## Loadings:
##        MR1  
## ZVT1   0.680
## ZVT2   0.673
## ZVT3   0.785
## ZVT4   0.809
## RAVEN1 0.661
## RAVEN2 0.607
## RAVEN3 0.523
## RAVEN4 0.662
## 
##                  MR1
## SS loadings    3.703
## Proportion Var 0.463
print(faPsysAll2$loadings)
## 
## Loadings:
##        MR1    MR2   
## ZVT1    0.862       
## ZVT2    0.892 -0.101
## ZVT3    0.760  0.135
## ZVT4    0.713  0.206
## RAVEN1  0.244  0.546
## RAVEN2         0.773
## RAVEN3         0.653
## RAVEN4         0.941
## 
##                  MR1   MR2
## SS loadings    2.684 2.284
## Proportion Var 0.335 0.285
## Cumulative Var 0.335 0.621
print(faPiasAll1$loadings)
## 
## Loadings:
##         MR1  
## PIAGET1 0.708
## PIAGET2 0.515
## PIAGET3 0.520
## PIAGET4 0.635
## 
##                 MR1
## SS loadings    1.44
## Proportion Var 0.36
print(faPiasAll2$loadings)
## 
## Loadings:
##         MR1    MR2   
## PIAGET1  0.134  0.650
## PIAGET2         0.729
## PIAGET3  0.382  0.179
## PIAGET4  0.999       
## 
##                  MR1   MR2
## SS loadings    1.170 0.986
## Proportion Var 0.292 0.246
## Cumulative Var 0.292 0.539
EFATOGSAll <- c(.644, .609, .742, .731, .643, .657, .587, .709, .490, .323, .426, .564)
EFATOGSAllPsy <- c(.644, .609, .742, .731, .643, .657, .587, .709); EFATOGSAllPia <- c(.490, .323, .426, .564)
EFASEPSAll <- c(.680, .673, .785, .809, .661, .607, .523, .662, .708, .515, .520, .635)
EFASEPSAllPsy <- c(.680, .673, .785, .809, .661, .607, .523, .662); EFASEPSAllPia <- c(.708, .515, .520, .635)

cor(EFATOGSAll, EFASEPSAll, method = "pearson"); cor(EFATOGSAll, EFASEPSAll, method = "spearman"); CONGO(EFATOGSAll, EFASEPSAll)
## [1] 0.7202227
## [1] 0.6643357
## [1] 0.990308
cor(EFATOGSAllPsy, EFASEPSAllPsy, method = "pearson"); cor(EFATOGSAllPsy, EFASEPSAllPsy, method = "spearman"); CONGO(EFATOGSAllPsy, EFASEPSAllPsy)
## [1] 0.8154537
## [1] 0.6666667
## [1] 0.9970956
cor(EFATOGSAllPia, EFASEPSAllPia, method = "pearson"); cor(EFATOGSAllPia, EFASEPSAllPia, method = "spearman"); CONGO(EFATOGSAllPia, EFASEPSAllPia)
## [1] 0.7320973
## [1] 0.8
## [1] 0.991335
ShulNo.fit <- cfa(ShulModNo, data = ShulAll, std.lv = T); "\n"
## Warning in lav_model_vcov(lavmodel = lavmodel, lavsamplestats = lavsamplestats, : lavaan WARNING:
##     Could not compute standard errors! The information matrix could
##     not be inverted. This may be a symptom that the model is not
##     identified.
## [1] "\n"
Shul.fit <- cfa(ShulMod, data = ShulAll, std.lv = T)
ShulID.fit <- cfa(ShulModID, data = ShulAll, std.lv = T)

round(cbind("No Relationship"   = fitMeasures(ShulNo.fit, FITM),
            "Free Relationship" = fitMeasures(Shul.fit, FITM),
            "Identical"         = fitMeasures(ShulID.fit, FITM)), 3)
##                No Relationship Free Relationship Identical
## chisq                  103.291            79.525    83.188
## df                      52.000            51.000    52.000
## npar                    26.000            27.000    26.000
## cfi                      0.874             0.930     0.923
## rmsea                    0.114             0.086     0.089
## rmsea.ci.lower           0.081             0.046     0.051
## rmsea.ci.upper           0.146             0.121     0.123
## aic                   2215.498          2193.732  2195.395
## bic                   2276.097          2256.662  2255.994
## srmr                     0.185             0.069     0.083
#cfaHB(Shul.fit) #does not converge

1 - pchisq(83.188 - 79.525, df = 1, lower.tail = T)
## [1] 0.05563338
resid(Shul.fit, "cor"); sum(abs(resid(Shul.fit, "cor")$cov > CRITR(76))); sum(abs(resid(Shul.fit, "cor")$cov > CRITR(76, NP(76))))
## $type
## [1] "cor.bollen"
## 
## $cov
##         RAVEN1 RAVEN2 RAVEN3 RAVEN4 ZVT1   ZVT2   ZVT3   ZVT4   PIAGET1 PIAGET2
## RAVEN1   0.000                                                                 
## RAVEN2   0.043  0.000                                                          
## RAVEN3  -0.095 -0.054  0.000                                                   
## RAVEN4  -0.006  0.000  0.037  0.000                                            
## ZVT1     0.089 -0.118 -0.033 -0.086  0.000                                     
## ZVT2     0.110 -0.068 -0.154 -0.094  0.108  0.000                              
## ZVT3     0.171  0.082  0.002 -0.038 -0.031 -0.036  0.000                       
## ZVT4     0.177  0.023  0.144  0.068 -0.047 -0.028  0.044  0.000                
## PIAGET1 -0.128 -0.019  0.115 -0.086  0.080 -0.064  0.061 -0.012  0.000         
## PIAGET2  0.031  0.000 -0.024 -0.075 -0.046 -0.059 -0.001 -0.145  0.143   0.000 
## PIAGET3 -0.008 -0.011  0.037  0.057  0.033 -0.034  0.041  0.049 -0.033  -0.072 
## PIAGET4  0.034  0.100  0.084  0.032  0.014 -0.065  0.074 -0.030 -0.028  -0.070 
##         PIAGET3 PIAGET4
## RAVEN1                 
## RAVEN2                 
## RAVEN3                 
## RAVEN4                 
## ZVT1                   
## ZVT2                   
## ZVT3                   
## ZVT4                   
## PIAGET1                
## PIAGET2                
## PIAGET3  0.000         
## PIAGET4  0.044   0.000
## [1] 0
## [1] 0

There were two violations of local independence and none with scaling.

summary(Shul.fit, stand = T, fit = T)
## lavaan 0.6-12 ended normally after 31 iterations
## 
##   Estimator                                         ML
##   Optimization method                           NLMINB
##   Number of model parameters                        27
## 
##                                                   Used       Total
##   Number of observations                            76          81
## 
## Model Test User Model:
##                                                       
##   Test statistic                                79.525
##   Degrees of freedom                                51
##   P-value (Chi-square)                           0.006
## 
## Model Test Baseline Model:
## 
##   Test statistic                               473.252
##   Degrees of freedom                                66
##   P-value                                        0.000
## 
## User Model versus Baseline Model:
## 
##   Comparative Fit Index (CFI)                    0.930
##   Tucker-Lewis Index (TLI)                       0.909
## 
## Loglikelihood and Information Criteria:
## 
##   Loglikelihood user model (H0)              -1069.866
##   Loglikelihood unrestricted model (H1)      -1030.104
##                                                       
##   Akaike (AIC)                                2193.732
##   Bayesian (BIC)                              2256.662
##   Sample-size adjusted Bayesian (BIC)         2171.556
## 
## Root Mean Square Error of Approximation:
## 
##   RMSEA                                          0.086
##   90 Percent confidence interval - lower         0.046
##   90 Percent confidence interval - upper         0.121
##   P-value RMSEA <= 0.05                          0.065
## 
## Standardized Root Mean Square Residual:
## 
##   SRMR                                           0.069
## 
## Parameter Estimates:
## 
##   Standard errors                             Standard
##   Information                                 Expected
##   Information saturated (h1) model          Structured
## 
## Latent Variables:
##                    Estimate  Std.Err  z-value  P(>|z|)   Std.lv  Std.all
##   gRav =~                                                               
##     RAVEN1            0.347    0.133    2.612    0.009    0.660    0.671
##     RAVEN2            0.412    0.153    2.696    0.007    0.784    0.790
##     RAVEN3            0.354    0.135    2.623    0.009    0.675    0.684
##     RAVEN4            0.464    0.171    2.719    0.007    0.883    0.895
##   gZVT =~                                                               
##     ZVT1              0.640    0.093    6.914    0.000    0.794    0.803
##     ZVT2              0.640    0.092    6.942    0.000    0.795    0.806
##     ZVT3              0.669    0.092    7.305    0.000    0.831    0.847
##     ZVT4              0.665    0.092    7.198    0.000    0.825    0.835
##   gPsy =~                                                               
##     gRav              1.620    0.809    2.001    0.045    0.851    0.851
##     gZVT              0.736    0.214    3.447    0.001    0.593    0.593
##   gPia =~                                                               
##     PIAGET1           0.689    0.116    5.938    0.000    0.689    0.700
##     PIAGET2           0.491    0.118    4.143    0.000    0.491    0.510
##     PIAGET3           0.467    0.116    4.044    0.000    0.467    0.500
##     PIAGET4           0.599    0.104    5.764    0.000    0.599    0.682
## 
## Covariances:
##                    Estimate  Std.Err  z-value  P(>|z|)   Std.lv  Std.all
##   gPsy ~~                                                               
##     gPia              0.764    0.123    6.217    0.000    0.764    0.764
## 
## Variances:
##                    Estimate  Std.Err  z-value  P(>|z|)   Std.lv  Std.all
##    .RAVEN1            0.532    0.097    5.497    0.000    0.532    0.549
##    .RAVEN2            0.370    0.078    4.714    0.000    0.370    0.375
##    .RAVEN3            0.519    0.095    5.447    0.000    0.519    0.533
##    .RAVEN4            0.194    0.067    2.924    0.003    0.194    0.200
##    .ZVT1              0.347    0.072    4.851    0.000    0.347    0.355
##    .ZVT2              0.340    0.070    4.821    0.000    0.340    0.350
##    .ZVT3              0.271    0.063    4.314    0.000    0.271    0.282
##    .ZVT4              0.296    0.066    4.489    0.000    0.296    0.303
##    .PIAGET1           0.494    0.117    4.220    0.000    0.494    0.510
##    .PIAGET2           0.684    0.125    5.469    0.000    0.684    0.739
##    .PIAGET3           0.657    0.119    5.509    0.000    0.657    0.750
##    .PIAGET4           0.413    0.094    4.406    0.000    0.413    0.535
##    .gRav              1.000                               0.276    0.276
##    .gZVT              1.000                               0.649    0.649
##     gPsy              1.000                               1.000    1.000
##     gPia              1.000                               1.000    1.000
semPaths(Shul.fit, "std", title = F, residuals = F, pastel = T, mar = c(2, 1, 3, 1), posCol = c("skyblue4"), layout = "circle", exoCov = T)

For Posterity: Lim (1988), Again

lowerLIM <-'
1                                                                                       
0.34    1                                                                                   
0.25    0.48    1                                                                               
0.34    0.58    0.68    1                                                                           
0.43    0.54    0.53    0.6 1                                                                       
0.32    0.55    0.51    0.56    0.54    1                                                                   
0.29    0.52    0.77    0.64    0.59    0.54    1                                                               
0.41    0.38    0.35    0.43    0.45    0.31    0.37    1                                                           
0.35    0.29    0.26    0.34    0.32    0.29    0.27    0.56    1                                                       
0.41    0.33    0.24    0.36    0.38    0.23    0.27    0.62    0.59    1                                                   
0.34    0.37    0.37    0.41    0.39    0.33    0.34    0.64    0.67    0.59    1                                               
0.44    0.37    0.38    0.42    0.43    0.32    0.38    0.63    0.58    0.61    0.67    1                                           
0.23    0.23    0.17    0.21    0.29    0.21    0.23    0.31    0.14    0.21    0.23    0.25    1                                       
0.26    0.3 0.24    0.28    0.27    0.29    0.25    0.29    0.21    0.25    0.28    0.31    0.24    1                                   
0.01    0.1 -0.01   0.04    0   0.1 0.02    -0.07   0   -0.01   0   -0.03   -0.09   0.04    1                               
0.29    0.29    0.27    0.27    0.35    0.27    0.28    0.27    0.18    0.21    0.27    0.27    0.24    0.29    0.01    1                           
0.35    0.36    0.28    0.37    0.36    0.3 0.32    0.4 0.27    0.39    0.34    0.34    0.28    0.31    -0.02   0.29    1                       
0.13    0.26    0.21    0.21    0.27    0.22    0.25    0.15    0.08    0.21    0.14    0.14    0.17    0.14    0.04    0.14    0.2 1                   
0.02    0.09    0.05    0.04    0.03    -0.01   0.03    0.07    -0.01   0.05    0.06    0.06    0.03    0.06    0.02    0   0.03    0.03    1               
0.28    0.24    0.21    0.22    0.33    0.24    0.21    0.26    0.16    0.2 0.27    0.27    0.24    0.3 0.03    0.28    0.29    0.09    0.12    1           
0.39    0.35    0.3 0.32    0.42    0.32    0.36    0.4 0.26    0.3 0.37    0.37    0.44    0.34    -0.01   0.32    0.41    0.2 0.05    0.33    1       
0.36    0.38    0.26    0.3 0.39    0.33    0.31    0.3 0.24    0.27    0.29    0.29    0.28    0.3 -0.04   0.26    0.37    0.18    0.04    0.3 0.48    1   
0.41    0.41    0.32    0.38    0.43    0.37    0.34    0.43    0.32    0.36    0.42    0.42    0.34    0.37    -0.05   0.31    0.46    0.24    0.03    0.28    0.53    0.45    1'

lowerLIMPSYC <- '
1                                           
0.34    1                                       
0.25    0.48    1                                   
0.34    0.58    0.68    1                               
0.43    0.54    0.53    0.6 1                           
0.32    0.55    0.51    0.56    0.54    1                       
0.29    0.52    0.77    0.64    0.59    0.54    1                   
0.41    0.38    0.35    0.43    0.45    0.31    0.37    1               
0.35    0.29    0.26    0.34    0.32    0.29    0.27    0.56    1           
0.41    0.33    0.24    0.36    0.38    0.23    0.27    0.62    0.59    1       
0.34    0.37    0.37    0.41    0.39    0.33    0.34    0.64    0.67    0.59    1   
0.44    0.37    0.38    0.42    0.43    0.32    0.38    0.63    0.58    0.61    0.67    1'

lowerLIMPIA <- '
1                                       
0.24    1                                   
-0.09   0.04    1                               
0.24    0.29    0.01    1                           
0.28    0.31    -0.02   0.29    1                       
0.17    0.14    0.04    0.14    0.2 1                   
0.03    0.06    0.02    0   0.03    0.03    1               
0.24    0.3 0.03    0.28    0.29    0.09    0.12    1           
0.44    0.34    -0.01   0.32    0.41    0.2 0.05    0.33    1       
0.28    0.3 -0.04   0.26    0.37    0.18    0.04    0.3 0.48    1   
0.34    0.37    -0.05   0.31    0.46    0.24    0.03    0.28    0.53    0.45    1'

nLIM <- 459

LIM.cor = getCov(lowerLIM, names = c("APM", "B1", "B2", "B3", "B4", "B5", "B6", "B7", "B8", "B9", "B10", "B11", "C1", "C2", "C3", "C4", "C5", "C6", "C7", "C8", "T1", "T2", "T3"))
LIMPSYC.cor = getCov(lowerLIMPSYC, names = c("APM", "B1", "B2", "B3", "B4", "B5", "B6", "B7", "B8", "B9", "B10", "B11"))
LIMPIA.cor = getCov(lowerLIMPIA, names = c("C1", "C2", "C3", "C4", "C5", "C6", "C7", "C8", "T1", "T2", "T3"))
fa.parallel(LIM.cor, n.obs = nLIM)

## Parallel analysis suggests that the number of factors =  3  and the number of components =  3
fa.parallel(LIMPSYC.cor, n.obs = nLIM)

## Parallel analysis suggests that the number of factors =  2  and the number of components =  2
fa.parallel(LIMPIA.cor, n.obs = nLIM)

## Parallel analysis suggests that the number of factors =  1  and the number of components =  1
FATOT1 <- fa(LIM.cor, n.obs = nLIM, nfactors = 1)
FATOT2 <- fa(LIM.cor, n.obs = nLIM, nfactors = 2)
FATOT3 <- fa(LIM.cor, n.obs = nLIM, nfactors = 3)

FAPSYC1 <- fa(LIMPSYC.cor, n.obs = nLIM, nfactors = 1)
FAPSYC2 <- fa(LIMPSYC.cor, n.obs = nLIM, nfactors = 2)

FAPIA <- fa(LIMPIA.cor, n.obs = nLIM, nfactors = 1)

print(FATOT1$loadings)
## 
## Loadings:
##     MR1  
## APM 0.568
## B1  0.658
## B2  0.624
## B3  0.699
## B4  0.723
## B5  0.611
## B6  0.659
## B7  0.698
## B8  0.564
## B9  0.612
## B10 0.678
## B11 0.692
## C1  0.411
## C2  0.462
## C3       
## C4  0.448
## C5  0.567
## C6  0.305
## C7       
## C8  0.418
## T1  0.608
## T2  0.535
## T3  0.646
## 
##                  MR1
## SS loadings    7.333
## Proportion Var 0.319
print(FATOT2$loadings)
## 
## Loadings:
##     MR1    MR2   
## APM  0.224  0.418
## B1   0.656       
## B2   0.789       
## B3   0.737       
## B4   0.672  0.141
## B5   0.709       
## B6   0.838       
## B7          0.751
## B8          0.749
## B9          0.795
## B10         0.781
## B11         0.749
## C1   0.226  0.236
## C2   0.273  0.246
## C3               
## C4   0.321  0.181
## C5   0.296  0.342
## C6   0.309       
## C7               
## C8   0.246  0.223
## T1   0.351  0.331
## T2   0.358  0.241
## T3   0.341  0.386
## 
##                  MR1   MR2
## SS loadings    4.172 3.791
## Proportion Var 0.181 0.165
## Cumulative Var 0.181 0.346
print(FATOT3$loadings)
## 
## Loadings:
##     MR1    MR2    MR3   
## APM         0.249  0.382
## B1   0.536         0.222
## B2   0.877        -0.102
## B3   0.766  0.110       
## B4   0.531         0.271
## B5   0.614         0.161
## B6   0.855              
## B7          0.686  0.139
## B8          0.841 -0.127
## B9          0.769       
## B10         0.795       
## B11         0.730       
## C1                 0.541
## C2                 0.443
## C3   0.113              
## C4   0.122         0.398
## C5          0.126  0.488
## C6   0.199         0.208
## C7                      
## C8                 0.462
## T1                 0.735
## T2                 0.606
## T3          0.115  0.615
## 
##                  MR1   MR2   MR3
## SS loadings    3.127 3.054 2.783
## Proportion Var 0.136 0.133 0.121
## Cumulative Var 0.136 0.269 0.390
print(FAPSYC1$loadings)
## 
## Loadings:
##     MR1  
## APM 0.526
## B1  0.649
## B2  0.667
## B3  0.742
## B4  0.715
## B5  0.613
## B6  0.687
## B7  0.707
## B8  0.614
## B9  0.629
## B10 0.703
## B11 0.717
## 
##                  MR1
## SS loadings    5.334
## Proportion Var 0.444
print(FAPSYC2$loadings)
## 
## Loadings:
##     MR1    MR2   
## APM  0.199  0.395
## B1   0.624  0.109
## B2   0.839       
## B3   0.773       
## B4   0.647  0.160
## B5   0.687       
## B6   0.868       
## B7          0.740
## B8          0.789
## B9          0.811
## B10         0.797
## B11         0.760
## 
##                  MR1   MR2
## SS loadings    3.395 3.249
## Proportion Var 0.283 0.271
## Cumulative Var 0.283 0.554
print(FAPIA$loadings)
## 
## Loadings:
##    MR1   
## C1  0.507
## C2  0.516
## C3       
## C4  0.471
## C5  0.602
## C6  0.296
## C7       
## C8  0.475
## T1  0.731
## T2  0.614
## T3  0.709
## 
##                  MR1
## SS loadings    2.840
## Proportion Var 0.258
EFATOGL88 <- c(.568, .658, .624, .699, .723, .611, .659, .698, .564, .612, .678, .692, .411, .462, .448, .567, .305, .418, .608, .535, .646)
EFATOGL88Psy <- c(.568, .658, .624, .699, .723, .611, .659, .698, .564, .612, .678, .692); EFATOGL88Pia <- c(.411, .462, .448, .567, .305, .418, .608, .535, .646)
EFASEPL88 <- c(.526, .649, .667, .742, .715, .613, .687, .707, .614, .629, .703, .717, .507, .516, .471, .602, .296, .475, .731, .614, .709)
EFASEPL88Psy <- c(.526, .649, .667, .742, .715, .613, .687, .707, .614, .629, .703, .717); EFASEPL88Pia <- c(.507, .516, .471, .602, .296, .475, .731, .614, .709)

cor(EFATOGL88, EFASEPL88, method = "pearson"); cor(EFATOGL88, EFASEPL88, method = "spearman"); CONGO(EFATOGL88, EFASEPL88)
## [1] 0.9424816
## [1] 0.8749594
## [1] 0.998043
cor(EFATOGL88Psy, EFASEPL88Psy, method = "pearson"); cor(EFATOGL88Psy, EFASEPL88Psy, method = "spearman"); CONGO(EFATOGL88Psy, EFASEPL88Psy)
## [1] 0.9017536
## [1] 0.9370629
## [1] 0.9992802
cor(EFATOGL88Pia, EFASEPL88Pia, method = "pearson"); cor(EFATOGL88Pia, EFASEPL88Pia, method = "spearman"); CONGO(EFATOGL88Pia, EFASEPL88Pia)
## [1] 0.9693414
## [1] 0.9
## [1] 0.9983633
LIMTOG.model <- '
F1 =~ APM + B7 + B8 + B9 + B10 + B11
F2 =~ B1 + B2 + B3 + B4 + B5 + B6
F3 =~ C1 + C5 + C6 + T1 + T2 + T3
F4 =~ C2 + C4 + C8

g =~ F1 + F2 + F3 + F4'

LIMCGF.model <- '
F1 =~ APM + B7 + B8 + B9 + B10 + B11
F2 =~ B1 + B2 + B3 + B4 + B5 + B6
F3 =~ C1 + C5 + C6 + T1 + T2 + T3
F4 =~ C2 + C4 + C8'

LIMBF.model <- '
F1 =~ APM + B7 + B8 + B9 + B10 + B11
F2 =~ B1 + B2 + B3 + B4 + B5 + B6
F3 =~ C1 + C5 + C6 + T1 + T2 + T3
F4 =~ C2 + C4 + C8

g =~ APM + B7 + B8 + B9 + B10 + B11 + B1 + B2 + B3 + B4 + B5 + B6 + C1 + C5 + C6 + T1 + T2 + T3 + C2 + C4 + C8'

LIMTOG.fit <- cfa(LIMTOG.model, sample.cov = LIM.cor, sample.nobs = nLIM, std.lv = T)
LIMCGF.fit <- cfa(LIMCGF.model, sample.cov = LIM.cor, sample.nobs = nLIM, std.lv = T)
LIMBF.fit <- cfa(LIMBF.model, sample.cov = LIM.cor, sample.nobs = nLIM, std.lv = T, orthogonal = T)

round(cbind("HOF" = fitMeasures(LIMTOG.fit, FITM),
            "CGF" = fitMeasures(LIMCGF.fit, FITM),
            "BF"  = fitMeasures(LIMBF.fit, FITM)), 3)
##                      HOF       CGF        BF
## chisq            412.428   405.420   236.523
## df               185.000   183.000   168.000
## npar              46.000    48.000    63.000
## cfi                0.945     0.947     0.984
## rmsea              0.052     0.051     0.030
## rmsea.ci.lower     0.045     0.045     0.020
## rmsea.ci.upper     0.058     0.058     0.038
## aic            23460.726 23457.717 23318.821
## bic            23650.662 23655.912 23578.951
## srmr               0.053     0.052     0.031
1 - pchisq(c((412.428-405.420), (412.428-236.523), (405.420-236.523)), c(2, 17, 15), lower.tail = T)
## [1] 0.03007684 0.00000000 0.00000000

Not significant with scaling, but p just below 0.05 without it. BIC is better with the HOF and AIC is slightly worse. Other fit indices are indeterminate. The evidence is more strongly in favor of the higher-order model. Considering the bifactor model, it fits staggeringly better than both, but it is theoretically inadmissible for a number of reasons, ranging from how invariance is tested to the fact that it seems to model away local independence violations that may, better, be interpreted as part of the specificities, rather than group factors, alongside modeling out misfit (i.e., overfitting).

LIMTOIINo.model <- '
F1 =~ APM + B7 + B8 + B9 + B10 + B11
F2 =~ B1 + B2 + B3 + B4 + B5 + B6

gPSY =~ F1 + F2

F3 =~ C1 + C5 + C6 + T1 + T2 + T3
F4 =~ C2 + C4 + C8

gPIA =~ F3 + F4

gPSY ~~ 0*gPIA'

LIMTOII.model <- '
F1 =~ APM + B7 + B8 + B9 + B10 + B11
F2 =~ B1 + B2 + B3 + B4 + B5 + B6

gPSY =~ F1 + F2

F3 =~ C1 + C5 + C6 + T1 + T2 + T3
F4 =~ C2 + C4 + C8

gPIA =~ F3 + F4

gPSY ~~ gPIA'

LIMTOIIID.model <- '
F1 =~ APM + B7 + B8 + B9 + B10 + B11
F2 =~ B1 + B2 + B3 + B4 + B5 + B6

gPSY =~ F1 + F2

F3 =~ C1 + C5 + C6 + T1 + T2 + T3
F4 =~ C2 + C4 + C8

gPIA =~ F3 + F4

gPSY ~~ 1*gPIA'

LIMPTOIINo.fit <- cfa(LIMTOIINo.model, sample.cov = LIM.cor, sample.nobs = nLIM, std.lv = T); "\n"
## Warning in lav_model_vcov(lavmodel = lavmodel, lavsamplestats = lavsamplestats, : lavaan WARNING:
##     Could not compute standard errors! The information matrix could
##     not be inverted. This may be a symptom that the model is not
##     identified.
## [1] "\n"
LIMPTOII.fit <- cfa(LIMTOII.model, sample.cov = LIM.cor, sample.nobs = nLIM, std.lv = T)
LIMPTOIIID.fit <- cfa(LIMTOIIID.model, sample.cov = LIM.cor, sample.nobs = nLIM, std.lv = T)

round(cbind("Lim No" = fitMeasures(LIMPTOIINo.fit, FITM),
            "Lim" = fitMeasures(LIMPTOII.fit, FITM),
            "Lim ID" = fitMeasures(LIMPTOIIID.fit, FITM)), 3)
##                   Lim No       Lim    Lim ID
## chisq            663.253   405.657   412.428
## df               185.000   184.000   185.000
## npar              46.000    47.000    46.000
## cfi                0.885     0.947     0.945
## rmsea              0.075     0.051     0.052
## rmsea.ci.lower     0.069     0.044     0.045
## rmsea.ci.upper     0.081     0.058     0.058
## aic            23711.551 23455.954 23460.726
## bic            23901.487 23650.020 23650.662
## srmr               0.205     0.052     0.053
1 - pchisq(412.428 - 405.657, 1, lower.tail = T)
## [1] 0.009265091

Not significant with scaling.

resid(LIMPTOII.fit, "cor"); sum(abs(resid(LIMPTOII.fit, "cor")$cov > CRITR(459))); sum(abs(resid(LIMPTOII.fit, "cor")$cov > CRITR(459, NP(459))))
## $type
## [1] "cor.bollen"
## 
## $cov
##     APM    B7     B8     B9     B10    B11    B1     B2     B3     B4    
## APM  0.000                                                               
## B7  -0.003  0.000                                                        
## B8  -0.035 -0.025  0.000                                                 
## B9   0.017  0.022  0.033  0.000                                          
## B10 -0.086 -0.007  0.067 -0.026  0.000                                   
## B11  0.019 -0.010 -0.017  0.000  0.010  0.000                            
## B1   0.128  0.058 -0.011  0.023  0.038  0.041  0.000                     
## B2   0.004 -0.025 -0.089 -0.117 -0.016 -0.002 -0.073  0.000              
## B3   0.090  0.050 -0.015 -0.002  0.018  0.032  0.018  0.027  0.000       
## B4   0.202  0.104 -0.003  0.050  0.033  0.076  0.028 -0.064 -0.004  0.000
## B5   0.108 -0.012 -0.011 -0.077 -0.002 -0.009  0.074 -0.043 -0.002  0.028
## B6   0.038 -0.013 -0.087 -0.095 -0.055 -0.011 -0.046  0.112 -0.028 -0.018
## C1   0.059  0.051 -0.102 -0.037 -0.037 -0.015  0.005 -0.091 -0.055  0.048
## C5   0.139  0.079 -0.030  0.084  0.009  0.012  0.081 -0.044  0.041  0.060
## C6   0.022 -0.014 -0.073  0.054 -0.029 -0.028  0.118  0.044  0.042  0.117
## T1   0.142  0.023 -0.091 -0.059 -0.018 -0.015  0.023 -0.080 -0.066  0.069
## T2   0.149 -0.021 -0.060 -0.036 -0.041 -0.038  0.101 -0.064 -0.029  0.090
## T3   0.159  0.049 -0.036 -0.003  0.027  0.031  0.079 -0.064 -0.010  0.075
## C2   0.075  0.009 -0.052 -0.018 -0.010  0.023  0.056 -0.043 -0.008  0.008
## C4   0.118  0.008 -0.064 -0.039  0.000  0.003  0.063  0.006  0.002  0.106
## C8   0.112  0.005 -0.078 -0.043  0.007  0.010  0.019 -0.047 -0.041  0.092
##     B5     B6     C1     C5     C6     T1     T2     T3     C2     C4    
## APM                                                                      
## B7                                                                       
## B8                                                                       
## B9                                                                       
## B10                                                                      
## B11                                                                      
## B1                                                                       
## B2                                                                       
## B3                                                                       
## B4                                                                       
## B5   0.000                                                               
## B6  -0.026  0.000                                                        
## C1  -0.015 -0.037  0.000                                                 
## C5   0.021 -0.012 -0.027  0.000                                          
## C6   0.078  0.081  0.013  0.005  0.000                                   
## T1  -0.007 -0.029  0.080 -0.037 -0.028  0.000                            
## T2   0.051 -0.022 -0.027 -0.011 -0.015  0.033  0.000                     
## T3   0.039 -0.053 -0.025  0.008  0.009  0.000 -0.002  0.000              
## C2   0.046 -0.040  0.000  0.012 -0.012 -0.009  0.002  0.017  0.000       
## C4   0.043  0.010  0.016  0.013 -0.002 -0.005 -0.017 -0.019 -0.011  0.000
## C8   0.019 -0.053  0.022  0.020 -0.048  0.013  0.030 -0.040  0.006  0.006
##     C8    
## APM       
## B7        
## B8        
## B9        
## B10       
## B11       
## B1        
## B2        
## B3        
## B4        
## B5        
## B6        
## C1        
## C5        
## C6        
## T1        
## T2        
## T3        
## C2        
## C4        
## C8   0.000
## [1] 32
## [1] 2

There were 32 violations of local independence but this reduced to two with scaling. This could be reduced further with a handful of residual covariances, but that does not affect the result and does not matter, since they could be entirely attributed to intra-psychometric residual covariances.

summary(LIMPTOII.fit, stand = T, fit = T)
## lavaan 0.6-12 ended normally after 63 iterations
## 
##   Estimator                                         ML
##   Optimization method                           NLMINB
##   Number of model parameters                        47
## 
##   Number of observations                           459
## 
## Model Test User Model:
##                                                       
##   Test statistic                               405.657
##   Degrees of freedom                               184
##   P-value (Chi-square)                           0.000
## 
## Model Test Baseline Model:
## 
##   Test statistic                              4376.977
##   Degrees of freedom                               210
##   P-value                                        0.000
## 
## User Model versus Baseline Model:
## 
##   Comparative Fit Index (CFI)                    0.947
##   Tucker-Lewis Index (TLI)                       0.939
## 
## Loglikelihood and Information Criteria:
## 
##   Loglikelihood user model (H0)             -11680.977
##   Loglikelihood unrestricted model (H1)     -11478.149
##                                                       
##   Akaike (AIC)                               23455.954
##   Bayesian (BIC)                             23650.020
##   Sample-size adjusted Bayesian (BIC)        23500.855
## 
## Root Mean Square Error of Approximation:
## 
##   RMSEA                                          0.051
##   90 Percent confidence interval - lower         0.044
##   90 Percent confidence interval - upper         0.058
##   P-value RMSEA <= 0.05                          0.372
## 
## Standardized Root Mean Square Residual:
## 
##   SRMR                                           0.052
## 
## Parameter Estimates:
## 
##   Standard errors                             Standard
##   Information                                 Expected
##   Information saturated (h1) model          Structured
## 
## Latent Variables:
##                    Estimate  Std.Err  z-value  P(>|z|)   Std.lv  Std.all
##   F1 =~                                                                 
##     APM               0.332    0.034    9.903    0.000    0.520    0.521
##     B7                0.505    0.036   13.893    0.000    0.791    0.792
##     B8                0.471    0.036   13.195    0.000    0.738    0.739
##     B9                0.482    0.036   13.408    0.000    0.754    0.755
##     B10               0.521    0.037   14.195    0.000    0.816    0.817
##     B11               0.516    0.037   14.098    0.000    0.808    0.809
##   F2 =~                                                                 
##     B1                0.443    0.035   12.640    0.000    0.689    0.690
##     B2                0.514    0.036   14.184    0.000    0.801    0.802
##     B3                0.522    0.036   14.343    0.000    0.813    0.814
##     B4                0.476    0.036   13.374    0.000    0.741    0.741
##     B5                0.443    0.035   12.638    0.000    0.689    0.690
##     B6                0.526    0.036   14.418    0.000    0.819    0.820
##   gPSY =~                                                               
##     F1                1.204    0.126    9.557    0.000    0.769    0.769
##     F2                1.194    0.124    9.629    0.000    0.767    0.767
##   F3 =~                                                                 
##     C1                0.163    0.049    3.345    0.001    0.497    0.498
##     C5                0.201    0.059    3.412    0.001    0.617    0.617
##     C6                0.103    0.033    3.089    0.002    0.315    0.315
##     T1                0.236    0.069    3.441    0.001    0.723    0.724
##     T2                0.202    0.059    3.412    0.001    0.617    0.617
##     T3                0.239    0.069    3.442    0.001    0.731    0.732
##   F4 =~                                                                 
##     C2                0.252    0.061    4.102    0.000    0.568    0.569
##     C4                0.234    0.057    4.085    0.000    0.529    0.530
##     C8                0.228    0.056    4.074    0.000    0.516    0.516
##   gPIA =~                                                               
##     F3                2.893    0.922    3.137    0.002    0.945    0.945
##     F4                2.024    0.525    3.852    0.000    0.897    0.897
## 
## Covariances:
##                    Estimate  Std.Err  z-value  P(>|z|)   Std.lv  Std.all
##   gPSY ~~                                                               
##     gPIA              0.904    0.037   24.169    0.000    0.904    0.904
## 
## Variances:
##                    Estimate  Std.Err  z-value  P(>|z|)   Std.lv  Std.all
##    .APM               0.727    0.050   14.536    0.000    0.727    0.728
##    .B7                0.372    0.030   12.336    0.000    0.372    0.373
##    .B8                0.453    0.034   13.151    0.000    0.453    0.454
##    .B9                0.430    0.033   12.947    0.000    0.430    0.431
##    .B10               0.332    0.028   11.798    0.000    0.332    0.333
##    .B11               0.345    0.029   11.987    0.000    0.345    0.346
##    .B1                0.523    0.038   13.703    0.000    0.523    0.524
##    .B2                0.356    0.029   12.262    0.000    0.356    0.357
##    .B3                0.336    0.028   11.991    0.000    0.336    0.337
##    .B4                0.449    0.034   13.202    0.000    0.449    0.450
##    .B5                0.523    0.038   13.704    0.000    0.523    0.524
##    .B6                0.326    0.028   11.845    0.000    0.326    0.327
##    .C1                0.750    0.053   14.227    0.000    0.750    0.752
##    .C5                0.618    0.046   13.412    0.000    0.618    0.619
##    .C6                0.899    0.061   14.842    0.000    0.899    0.901
##    .T1                0.475    0.040   12.007    0.000    0.475    0.476
##    .T2                0.617    0.046   13.412    0.000    0.617    0.619
##    .T3                0.463    0.039   11.845    0.000    0.463    0.464
##    .C2                0.675    0.055   12.227    0.000    0.675    0.676
##    .C4                0.718    0.056   12.848    0.000    0.718    0.719
##    .C8                0.732    0.056   13.029    0.000    0.732    0.733
##    .F1                1.000                               0.408    0.408
##    .F2                1.000                               0.412    0.412
##     gPSY              1.000                               1.000    1.000
##    .F3                1.000                               0.107    0.107
##    .F4                1.000                               0.196    0.196
##     gPIA              1.000                               1.000    1.000
LIMLAT <- list(
  F1 = c("APM", "B7", "B8", "B9", "B10", "B11"),
  F2 = c("B1", "B2", "B3", "B4", "B5", "B6"),
  F3 = c("C1", "C5", "C6", "T1", "T2", "T3"),
  F4 = c("C2", "C4", "C8"))

semPaths(LIMPTOII.fit, "model", "std", title = F, residuals = F, groups = "LIMLAT", pastel = T, mar = c(2, 1, 3, 1), bifactor = c("gPSY", "gPIA"), layout = "tree2", exoCov = T)

Overview

Dataset Sample Size Good Fit? Consistent Loadings? Base relationship Standard Error Identity? Dimensionality?
DeVries & Kohlberg (1977) 47 Yes Yes 0.876 0.096 Yes Yes
DeVries (1974) - A 50 No Yes 0.812 0.109 Yes Yes
DeVries (1974) - B 79 No Yes 0.788 0.096 Yes Yes
DeVries (1974) - C 126 NA Yes NA NA NA NA
Hathaway (1972) - K 56 No Yes 0.892 0.109 Yes Yes
Hathaway (1972) - I 56 NA Yes NA NA NA NA
Hathaway (1972) - II 56 No Yes 0.924 0.073 Yes Yes
Kiga 39 Yes Yes 0.575 0.189 No* Yes
Shul (6-8) 35 Yes Yes 0.628 0.153 No* Yes
Shul (9-10) 41 Yes Yes 0.964 0.239 Yes Yes
Shul (Combined) 76 Yes Yes 0.764 0.123 Yes Yes
Lim (1988) 459 Yes Yes 0.904 0.037 Yes Yes

Sample sizes were all on the small side, CFA/JCFA fits were mostly poor, as is the case with most summary-derived data from small samples, and some datasets did not allow fit to be tested because they lacked the number of indicators (DeVries (1974) - C) or they suffered from NPD (Hathaway (1972) - I). Despite this, all datasets showed exceptional stability in factor loadings, when possible, good-to-excellent construct identity despite substantial psychometric sampling error, and good unidimensionality as indicated by the distributions of eigenvalues and the evaluation of local independence. Identity was based on identity with scaled p-values. The consistency of loadings for DeVries - C was based on comparison with the other batteries from those studies.

* There was low power to discriminate between all models, but in this case, that issue was somewhat more egregious than the rest. In it, it appeared the information-theoretic fit criteria were acceptably worse and the \(\chi^2\) fit decrement was likewise acceptable, but because of low power, this ought to be considered tentative at best. Combining both Shul studies’ data - which is viable because invariance held - there was stronger evidence for identity between the two constructs, but power was clearly still low.

Meta-Analysis

The Shul Combined datapoint is not admissible when included along the two it is composed of. However, if the two others are removed and it is used instead, it is more accurately estimated than either and we know the scores can be compared between the two samples, so it absolutely should be worth it. I checked both possibilities - Shul (6-8)/Shul (9-10) separate and Shul (Combined) used - and using the combined sample produced tighter, but slightly lower, estimates. It would be improper to do the same with the Hathaway sample because of the fact that it is not separate, but is, instead, just the same sample, aged. Importantly, because of the developmental predictions of Piagetian theory, the Hathaway samples are doubly valid for separate inclusion, since they should be unconfounded due to the discrete nature of stages. One could argue that the Shul samples need included separately because measurement invariance may have actually failed and we could not detect it due to the small sample size, but that is (1) immaterial because the results are virtually identical and (2) impossible to show.

MetaData <- data.frame("Dataset" = c("DeVries & Kohlberg", "DeVries A", "DeVries B", "Hathaway K", "Hathaway II", "Kiga", "Shul Combined", "Lim"), "R" = c(0.876, 0.812, 0.788, 0.892, 0.924, 0.575, 0.764, 0.904), "SE" = c(0.096, 0.109, 0.096, 0.109, 0.073, 0.189, 0.123, 0.037))

MetaData$ZR <- atanh(MetaData$R); MetaData$ZSE <- atanh(MetaData$SE)

p_load(meta, metafor, dmetar, metasens)

PiaMeta <- metagen(TE = ZR,
                seTE = ZSE,
                MetaData,
                studlab = Dataset,
                sm = "ZCOR",
                method.tau = "SJ",
                hakn = T,
                backtrans = T,
                title = "Piagetian g Correlations with Psychometric g"); summary(PiaMeta)
## Review:     Piagetian g Correlations with Psychometric g
## 
##                       COR           95%-CI %W(common) %W(random)
## DeVries & Kohlberg 0.8760 [0.8241; 0.9133]        7.7       12.8
## DeVries A          0.8120 [0.7251; 0.8734]        6.0       12.5
## DeVries B          0.7880 [0.7051; 0.8496]        7.7       12.8
## Hathaway K         0.8920 [0.8388; 0.9283]        6.0       12.5
## Hathaway II        0.9240 [0.9000; 0.9424]       13.4       13.4
## Kiga               0.5750 [0.2729; 0.7739]        2.0        9.9
## Shul Combined      0.7640 [0.6431; 0.8477]        4.7       12.0
## Lim                0.9040 [0.8898; 0.9164]       52.4       14.1
## 
## Number of studies combined: k = 8
## 
##                         COR           95%-CI   z|t  p-value
## Common effect model  0.8857 [0.8739; 0.8965] 52.30        0
## Random effects model 0.8465 [0.7578; 0.9045] 11.63 < 0.0001
## 
## Quantifying heterogeneity:
##  tau^2 = 0.0808 [0.0259; 0.3890]; tau = 0.2843 [0.1608; 0.6237]
##  I^2 = 88.1% [78.8%; 93.3%]; H = 2.90 [2.17; 3.86]
## 
## Test of heterogeneity:
##      Q d.f.  p-value
##  58.69    7 < 0.0001
## 
## Details on meta-analytical method:
## - Inverse variance method
## - Sidik-Jonkman estimator for tau^2
## - Q-profile method for confidence interval of tau^2 and tau
## - Hartung-Knapp adjustment for random effects model
## - Fisher's z transformation of correlations
forest.meta(PiaMeta, layout = "JAMA",
            col.diamond = "gold",
            sortvar = TE,
            predict = T,
            print.tau2 = T,
            leftlabs = c("Author", "Correlation", "95% CI"),
            xlim = c(-0.15, 1.05),
            digits = 3)

metabias(PiaMeta, method.bias = "linreg"); pcurve(PiaMeta); TF = trimfill(PiaMeta, side = "left"); summary(TF); funnel(TF, legend = T); limit <- limitmeta(PiaMeta); summary(limit); funnel.limitmeta(limit, xlim = c(0.3, 2.0), shrunken = T)
## Warning: Number of studies (k=8) too small to test for small study effects
## (k.min=10). Change argument 'k.min' if appropriate.

## P-curve analysis 
##  ----------------------- 
## - Total number of provided studies: k = 8 
## - Total number of p<0.05 studies included into the analysis: k = 8 (100%) 
## - Total number of studies with p<0.025: k = 8 (100%) 
##    
## Results 
##  ----------------------- 
##                     pBinomial   zFull pFull   zHalf pHalf
## Right-skewness test     0.004 -19.956     0 -19.638     0
## Flatness test           1.000  19.996     1  20.172     1
## Note: p-values of 0 or 1 correspond to p<0.001 and p>0.999, respectively.   
## Power Estimate: 99% (99%-99%)
##    
## Evidential value 
##  ----------------------- 
## - Evidential value present: yes 
## - Evidential value absent/inadequate: no
## Review:     Piagetian g Correlations with Psychometric g
## 
##                          COR           95%-CI %W(random)
## DeVries & Kohlberg    0.8760 [0.8241; 0.9133]        9.3
## DeVries A             0.8120 [0.7251; 0.8734]        9.2
## DeVries B             0.7880 [0.7051; 0.8496]        9.3
## Hathaway K            0.8920 [0.8388; 0.9283]        9.2
## Hathaway II           0.9240 [0.9000; 0.9424]        9.5
## Kiga                  0.5750 [0.2729; 0.7739]        8.2
## Shul Combined         0.7640 [0.6431; 0.8477]        9.0
## Lim                   0.9040 [0.8898; 0.9164]        9.7
## Filled: DeVries B     0.9541 [0.9338; 0.9683]        9.3
## Filled: Shul Combined 0.9592 [0.9346; 0.9747]        9.0
## Filled: Kiga          0.9796 [0.9573; 0.9903]        8.2
## 
## Number of studies combined: k = 11 (with 3 added studies)
## 
##                         COR           95%-CI     t  p-value
## Random effects model 0.8940 [0.8120; 0.9414] 10.40 < 0.0001
## 
## Quantifying heterogeneity:
##  tau^2 = 0.1978 [0.0836; 0.6611]; tau = 0.4447 [0.2891; 0.8131]
##  I^2 = 91.3% [86.5%; 94.4%]; H = 3.40 [2.72; 4.24]
## 
## Test of heterogeneity:
##       Q d.f.  p-value
##  115.39   10 < 0.0001
## 
## Details on meta-analytical method:
## - Inverse variance method
## - Sidik-Jonkman estimator for tau^2
## - Q-profile method for confidence interval of tau^2 and tau
## - Hartung-Knapp adjustment for random effects model
## - Trim-and-fill method to adjust for funnel plot asymmetry
## - Fisher's z transformation of correlations

## Results for individual studies
## (left: original data; right: shrunken estimates)
## 
##                         COR           95%-CI       COR           95%-CI
## DeVries & Kohlberg   0.8760 [0.8241; 0.9133]    0.9243 [0.8915; 0.9475]
## DeVries A            0.8120 [0.7251; 0.8734]    0.9006 [0.8513; 0.9341]
## DeVries B            0.7880 [0.7051; 0.8496]    0.8719 [0.8185; 0.9104]
## Hathaway K           0.8920 [0.8388; 0.9283]    0.9419 [0.9121; 0.9618]
## Hathaway II          0.9240 [0.9000; 0.9424]    0.9428 [0.9245; 0.9567]
## Kiga                 0.5750 [0.2729; 0.7739]    0.9233 [0.8442; 0.9630]
## Shul Combined        0.7640 [0.6431; 0.8477]    0.8941 [0.8336; 0.9334]
## Lim                  0.9040 [0.8898; 0.9164]    0.9110 [0.8979; 0.9226]
## 
## Result of limit meta-analysis:
## 
##  Random effects model    COR           95%-CI     z     pval
##     Adjusted estimate 0.9166 [0.8379; 0.9579]  8.70 < 0.0001
##   Unadjusted estimate 0.8465 [0.7578; 0.9045] 11.63 < 0.0001
## 
## Quantifying heterogeneity:
## tau^2 = 0.0808; I^2 = 88.1% [78.8%; 93.3%]; G^2 = 2.4%
## 
## Test of heterogeneity:
##      Q d.f.  p-value
##  58.69    7 < 0.0001
## 
## Test of small-study effects:
##   Q-Q' d.f.  p-value
##  34.29    1 < 0.0001
## 
## Test of residual heterogeneity beyond small-study effects:
##     Q' d.f.  p-value
##  24.40    6   0.0004
## 
## Details on adjustment method:
## - expectation (beta0)

Discussion

It should be noted that many of the members of these samples were bright, provoking concern about range restriction.

Humphreys & Parson’s (1979) conclusion that the scale scores from different tests seem to be as correlated with one another as they are with the ones from conceptually unrelated tests like Piaget’s seems to be confirmed. For example, in the Hathaway Kindergarten data, the WISC PIQ and VIQ scales correlate at r = 0.51 and they each correlate with the total Piagetian score at r’s of 0.46 and 0.51, respectively. The Piaget total score correlated with the Lorge-Thorndike total at 0.41, while the WISC PIQ and VIQ correlated with it at 0.65 and 0.34. In the later age group - second grade -, these correlations become 0.54, 0.54, 0.55, 0.49, 0.45, and 0.61. In this older year, the PIQ, VIQ, Lorge-Thorndike total and Piaget total correlated with the CAT reading, arithmetic and language totals and the aggregate CAT total at 0.54/0.61/0.46/0.62 (PIQ), 0.41/0.48/0.37/0.50 (VIQ), 0.57/0.47/0.49/0.56 (LT), 0.57/0.66/0.48/0.65 (Piaget). All of this occurs despite a lack of content overlap.

Loadings estimated in batteries with Piagetian or psychometric tests only do not produce meaningfully different g loadings, indicating that, as Jensen was wont to exclaim (2001; see also Thorndike, 1987; Vernon, 1989), the g loadings of measures are more a product of the measure than of the battery of tests they are found among. It is undoubtedly true that, despite a lack of shared content, these diverse tests are strongly related and they share perplexingly much in common if one subscribes to the idea that there is no psychometric g. It is always interesting to see just how robust the indifference of the indicator is.

In those batteries in which it could be tested, it appeared that a model with an identical g between the measures was better, and especially so when psychometric sampling error was to some degree reduced. The inability to fully handle that issue surely drives down g factor correlations, but in all batteries in which testing was possible, it is clearly the case that the constructs share too much to interpret independently; despite no shared content in many cases, the measures lack discriminant validity. Sampling error of the regular sort could explain much of the rest of this. The local independence observed reinforces this point. There was low power for reasonably definitive tests and, again, because the data were summary statistics, it’s hard to conclude much from these, but the result does seem to generalize, as in Lim’s (1988) data, which is much higher quality and yields basically the same conclusion.

Unfortunately, invariance between grades could not be tested in Hathaway’s data due to NPD issues and poor overall fits.

An interesting thing to note based on the Hathaway data is that the Piagetian total was always more related to the cognitive tests than it was to chronological age, but, it is worth noting that age was range restricted and there was a discrepancy in Hathaway’s summary statistics, where the standard deviation for age somehow increased in the last grade when that would only be possible if, say, testing were lagged for some members of the sample or unmentioned attrition took place. Despite these issues, this finding is similar to Carpenter’s (1955), that mental age (IQ) is more closely related to Piagetian concept formation development than is chronological age (see also Russell, 1942; Elkind, 1961; Freyberg, 1966).

I am grateful that Laura Ackermann provided me with the data from her analysis (Rindermann & Ackermann, 2020). My reanalysis reached much the same conclusion as the other studies analyzed here, but with raw data. These datasets were small, had minimal missingness, and suffered considerably from psychometric sampling error, but they nonetheless confirmed that psychometric general intelligence is at least strongly related to Piagetian intelligence and their data is by no means inconsistent with these constructs being identical. Interestingly, invariance was achievably in Shul’s different-aged samples.

From these samples, it appears that Piagetian “intelligence” is comparable to more typically-defined psychometric or achievement-based measurement/representation of intelligence. At the least, it is too similar to be reliably distinguished from it, despite sharing, in many cases, no content between their testing instruments.

References

Carroll, J. B., Kohlberg, L., & DeVries, R. (1984). Psychometric and Piagetian intelligences: Toward resolution of controversy. Intelligence, 8(1), 67–91. https://doi.org/10.1016/0160-2896(84)90007-2

DeVries, R., & Kohlberg, L. (1977). Relations between Piagetian and psychometric assessments of intelligence. In L. Katz (Ed.), Current topics in early childhood education (Vol. 1). Ablex Publishing.

DeVries, R. (1974). Relationships among Piagetian, IQ, and Achievement Assessments. Child Development, 45(3), 746–756. https://doi.org/10.2307/1127841

Hathaway, Walter Ennis. The Degree and Nature of the Relations Between Traditional Psychometric and Piagetian Developmental Measures of Mental Development, 1972. https://eric.ed.gov/?id=ED075485.

Humphreys, L. G., & Parsons, C. K. (1979). Piagetian tasks measure intelligence and intelligence tests assess cognitive development: A reanalysis. Intelligence, 3(4), 369–381. https://doi.org/10.1016/0160-2896(79)90005-9

Jensen, A. R. (2001). Vocabulary and general intelligence. Behavioral and Brain Sciences, 24(6), 1109–1110. https://doi.org/10.1017/S0140525X01280133

Thorndike, R. L. (1987). Stability of factor loadings. Personality and Individual Differences, 8(4), 585–586. https://doi.org/10.1016/0191-8869(87)90224-8

Vernon, P. A. (1989). The generality of g. Personality and Individual Differences, 10(7), 803–804. https://doi.org/10.1016/0191-8869(89)90129-3

Lim, T. K. (1988). Relationships between standardized psychometric and Piagetian measures of intelligence at the formal operations level. Intelligence, 12(2), 167–182. https://doi.org/10.1016/0160-2896(88)90014-1

Carpenter, T. E. (1955). A Pilot Study for a Quantitative Investigation of Jean Piaget’s Original Work on Concept Formation. Educational Review, 7(2), 142–149. https://doi.org/10.1080/0013191550070207

Russell, R. W. (1942). Studies in animism: V. Animism in older children. The Pedagogical Seminary and Journal of Genetic Psychology, 60, 329–335. https://doi.org/10.1080/08856559.1942.10534642

Elkind, D. (1961). The development of quantitative thinking: A systematic replication of Piaget’s studies. The Journal of Genetic Psychology: Research and Theory on Human Development, 98, 37–46. https://doi.org/10.1080/00221325.1961.10534349

Freyberg, P. S. (1966). Concept development in Piagetian terms in relation to school attainment. Journal of Educational Psychology, 57(3), 164–168. https://doi.org/10.1037/h0023411

Rindermann, H., & Ackermann, A.L., (2020). Piagetian Tasks and Psychometric Intelligence: Different or Similar Constructs? Psychological Reports. https://doi.org/10.1177/0033294120965876

Post-Script: Laura Ackermann’s Data

#Provided with permission from Dr. Ackermann.

datatable(Kiga46, extensions = c("Buttons", "FixedColumns"), options = list(dom = 'Bfrtip', buttons = c('copy', 'csv', 'print'), scrollX = T, fixedColumns = list(leftColumns = 3)))
datatable(ShulAll, extensions = c("Buttons", "FixedColumns"), options = list(dom = 'Bfrtip', buttons = c('copy', 'csv', 'print'), scrollX = T, fixedColumns = list(leftColumns = 3)))