Initial Program - Disposition
ELED
## Load the package
library(psych)
library(irr)
## Loading required package: lpSolve
library(Metrics)
## ELED
rater1 <- c(2,2,2,2,4,4,4,4,3,3,3,4,3,3,3,3) # rater one's ratings
rater2 <- c(3,3,3,3,4,3,4,3,2,4,3,3,3,3,3,4) # rater one's ratings
cohen.kappa(x=cbind(rater1,rater2),alpha=.5)
## Call: cohen.kappa1(x = x, w = w, n.obs = n.obs, alpha = alpha, levels = levels)
##
## Cohen Kappa and Weighted Kappa correlation coefficients and confidence boundaries
## lower estimate upper
## unweighted kappa -0.1530 -0.032 0.089
## weighted kappa 0.0088 0.266 0.523
##
## Number of subjects = 16
kappa2(cbind(rater1,rater2))
## Cohen's Kappa for 2 Raters (Weights: unweighted)
##
## Subjects = 16
## Raters = 2
## Kappa = -0.0323
##
## z = -0.194
## p-value = 0.847
ScoreQuadraticWeightedKappa(rater1,rater2)
## [1] 0.266055
- Unweighted kappa = - 0.032
- Weighted kappa = 0.266
- Quadratic Weighted Kappa = 0.266
SEEE
rater1 <- c(3,3,3,3) # rater one's ratings
rater2 <- c(3,3,3,3) # rater one's ratings
cohen.kappa(cbind(rater1,rater2))
## Your data seem to have no variance and in complete agreement across raters. Check your data.
## Call: cohen.kappa1(x = x, w = w, n.obs = n.obs, alpha = alpha, levels = levels)
##
## Cohen Kappa and Weighted Kappa correlation coefficients and confidence boundaries
## lower estimate upper
## unweighted kappa NA NA NA
## weighted kappa NaN NaN NaN
##
## Number of subjects = 4
kappa2(cbind(rater1,rater2))
## Cohen's Kappa for 2 Raters (Weights: unweighted)
##
## Subjects = 4
## Raters = 2
## Kappa = NaN
##
## z = NaN
## p-value = NaN
ScoreQuadraticWeightedKappa(rater1,rater2)
## [1] NaN
- Can not calculate because the complete agreement. Kappa=1
SESI
rater1 <- c(2,2,3,3) # rater one's ratings
rater2 <- c(3,3,3,3) # rater one's ratings
cohen.kappa(cbind(rater1,rater2))
## Call: cohen.kappa1(x = x, w = w, n.obs = n.obs, alpha = alpha, levels = levels)
##
## Cohen Kappa and Weighted Kappa correlation coefficients and confidence boundaries
## lower estimate upper
## unweighted kappa 0 0 0
## weighted kappa 0 0 0
##
## Number of subjects = 4
kappa2(cbind(rater1,rater2))
## Cohen's Kappa for 2 Raters (Weights: unweighted)
##
## Subjects = 4
## Raters = 2
## Kappa = 0
##
## z = NaN
## p-value = NaN
- Kappa = 0 (No Z score and P-value because one of the rater only has 1 level)
HPPE
rater1 <- c(3,3,3,3) # rater one's ratings
rater2 <- c(3,3,3,3) # rater one's ratings
cohen.kappa(cbind(rater1,rater2))
## Your data seem to have no variance and in complete agreement across raters. Check your data.
## Call: cohen.kappa1(x = x, w = w, n.obs = n.obs, alpha = alpha, levels = levels)
##
## Cohen Kappa and Weighted Kappa correlation coefficients and confidence boundaries
## lower estimate upper
## unweighted kappa NA NA NA
## weighted kappa NaN NaN NaN
##
## Number of subjects = 4
kappa2(cbind(rater1,rater2))
## Cohen's Kappa for 2 Raters (Weights: unweighted)
##
## Subjects = 4
## Raters = 2
## Kappa = NaN
##
## z = NaN
## p-value = NaN
- Can not calculate because the complete agreement. Kappa=1
SELA
rater1 <- c(3,4,3,3) # rater one's ratings
rater2 <- c(3,3,3,3) # rater one's ratings
cohen.kappa(cbind(rater1,rater2))
## Call: cohen.kappa1(x = x, w = w, n.obs = n.obs, alpha = alpha, levels = levels)
##
## Cohen Kappa and Weighted Kappa correlation coefficients and confidence boundaries
## lower estimate upper
## unweighted kappa 0 0 0
## weighted kappa 0 0 0
##
## Number of subjects = 4
kappa2(cbind(rater1,rater2))
## Cohen's Kappa for 2 Raters (Weights: unweighted)
##
## Subjects = 4
## Raters = 2
## Kappa = 0
##
## z = NaN
## p-value = NaN
- Kappa = 0 (No Z score and P-value because one of the rater only has 1 level)
SPCO
rater1 <- c(3,4,3,4) # rater one's ratings
rater2 <- c(4,3,4,3) # rater one's ratings
cohen.kappa(cbind(rater1,rater2))
## Call: cohen.kappa1(x = x, w = w, n.obs = n.obs, alpha = alpha, levels = levels)
##
## Cohen Kappa and Weighted Kappa correlation coefficients and confidence boundaries
## lower estimate upper
## unweighted kappa -1 -1 -1
## weighted kappa -1 -1 -1
##
## Number of subjects = 4
kappa2(cbind(rater1,rater2))
## Cohen's Kappa for 2 Raters (Weights: unweighted)
##
## Subjects = 4
## Raters = 2
## Kappa = -1
##
## z = -2
## p-value = 0.0455
- Kappa=-1, Totally disagreement.
SESS
rater1 <- c(3,2,2,2) # rater one's ratings
rater2 <- c(2,3,2,3) # rater one's ratings
cohen.kappa(cbind(rater1,rater2))
## Warning in cohen.kappa1(x, w = w, n.obs = n.obs, alpha = alpha, levels =
## levels): upper or lower confidence interval exceed abs(1) and set to +/- 1.
## Call: cohen.kappa1(x = x, w = w, n.obs = n.obs, alpha = alpha, levels = levels)
##
## Cohen Kappa and Weighted Kappa correlation coefficients and confidence boundaries
## lower estimate upper
## unweighted kappa -1 -0.5 0.23
## weighted kappa -1 -0.5 0.23
##
## Number of subjects = 4
kappa2(cbind(rater1,rater2))
## Cohen's Kappa for 2 Raters (Weights: unweighted)
##
## Subjects = 4
## Raters = 2
## Kappa = -0.5
##
## z = -1.15
## p-value = 0.248
ScoreQuadraticWeightedKappa(rater1,rater2)
## [1] -0.5
- Unweighted kappa = -0.5
- Weighted kappa = -0.5
- Quadratic Weighted Kappa = -0.5
Initial Program - Portfolio
ELED
rater1 <- c(2,2,2,2,2,2,3,4,3,4,2,2,3,4,3,4,4,3,3,3,3,3,3,3) # rater one's ratings
rater2 <- c(3,3,4,3,3,2,4,4,4,4,4,4,4,3,3,4,3,3,3,3,4,4,3,3) # rater one's ratings
cohen.kappa(cbind(rater1,rater2))
## Call: cohen.kappa1(x = x, w = w, n.obs = n.obs, alpha = alpha, levels = levels)
##
## Cohen Kappa and Weighted Kappa correlation coefficients and confidence boundaries
## lower estimate upper
## unweighted kappa -0.14 0.12 0.38
## weighted kappa -0.33 0.16 0.65
##
## Number of subjects = 24
kappa2(cbind(rater1,rater2))
## Cohen's Kappa for 2 Raters (Weights: unweighted)
##
## Subjects = 24
## Raters = 2
## Kappa = 0.118
##
## z = 0.943
## p-value = 0.346
ScoreQuadraticWeightedKappa(rater1,rater2)
## [1] 0.1636364
- Unweighted kappa = 0.12
- weighted kappa = 0.16
- Quadratic Weighted Kappa = 0.16
SEEE
rater1 <- c(3,3,3,3,3,3) # rater one's ratings
rater2 <- c(3,3,3,3,2,3) # rater one's ratings
cohen.kappa(cbind(rater1,rater2))
## Call: cohen.kappa1(x = x, w = w, n.obs = n.obs, alpha = alpha, levels = levels)
##
## Cohen Kappa and Weighted Kappa correlation coefficients and confidence boundaries
## lower estimate upper
## unweighted kappa -1.3e-07 0 1.3e-07
## weighted kappa -1.2e-07 0 1.2e-07
##
## Number of subjects = 6
kappa2(cbind(rater1,rater2))
## Cohen's Kappa for 2 Raters (Weights: unweighted)
##
## Subjects = 6
## Raters = 2
## Kappa = 0
##
## z = 0
## p-value = 1
- Kappa = 0 (one of the rater only has 1 level)
SESI
rater1 <- c(2,3,3,3,3,2) # rater one's ratings
rater2 <- c(3,3,3,3,3,3) # rater one's ratings
cohen.kappa(cbind(rater1,rater2))
## Call: cohen.kappa1(x = x, w = w, n.obs = n.obs, alpha = alpha, levels = levels)
##
## Cohen Kappa and Weighted Kappa correlation coefficients and confidence boundaries
## lower estimate upper
## unweighted kappa 0 0 0
## weighted kappa 0 0 0
##
## Number of subjects = 6
kappa2(cbind(rater1,rater2))
## Warning in sqrt(varkappa): NaNs produced
## Cohen's Kappa for 2 Raters (Weights: unweighted)
##
## Subjects = 6
## Raters = 2
## Kappa = 0
##
## z = NaN
## p-value = NaN
- Kappa = 0 (No Z score and P-value because one of the rater only has 1 level)
HPPE
rater1 <- c(3,3,3,3,4,3) # rater one's ratings
rater2 <- c(3,3,3,3,3,3) # rater one's ratings
cohen.kappa(cbind(rater1,rater2))
## Call: cohen.kappa1(x = x, w = w, n.obs = n.obs, alpha = alpha, levels = levels)
##
## Cohen Kappa and Weighted Kappa correlation coefficients and confidence boundaries
## lower estimate upper
## unweighted kappa -1.3e-07 0 1.3e-07
## weighted kappa -1.2e-07 0 1.2e-07
##
## Number of subjects = 6
kappa2(cbind(rater1,rater2))
## Cohen's Kappa for 2 Raters (Weights: unweighted)
##
## Subjects = 6
## Raters = 2
## Kappa = 0
##
## z = 0
## p-value = 1
- Kappa = 0 (one of the rater only has 1 level)
SELA
rater1 <- c(3,3,3,3,3,3) # rater one's ratings
rater2 <- c(3,3,3,3,3,3) # rater one's ratings
cohen.kappa(cbind(rater1,rater2))
## Your data seem to have no variance and in complete agreement across raters. Check your data.
## Call: cohen.kappa1(x = x, w = w, n.obs = n.obs, alpha = alpha, levels = levels)
##
## Cohen Kappa and Weighted Kappa correlation coefficients and confidence boundaries
## lower estimate upper
## unweighted kappa NA NA NA
## weighted kappa NaN NaN NaN
##
## Number of subjects = 6
kappa2(cbind(rater1,rater2))
## Cohen's Kappa for 2 Raters (Weights: unweighted)
##
## Subjects = 6
## Raters = 2
## Kappa = NaN
##
## z = NaN
## p-value = NaN
- Can not calculate because the complete agreement. Kappa=1
SPCO
rater1 <- c(4,4,3,3,4,3) # rater one's ratings
rater2 <- c(4,3,4,4,3,3) # rater one's ratings
cohen.kappa(cbind(rater1,rater2))
## Warning in cohen.kappa1(x, w = w, n.obs = n.obs, alpha = alpha, levels =
## levels): upper or lower confidence interval exceed abs(1) and set to +/- 1.
## Call: cohen.kappa1(x = x, w = w, n.obs = n.obs, alpha = alpha, levels = levels)
##
## Cohen Kappa and Weighted Kappa correlation coefficients and confidence boundaries
## lower estimate upper
## unweighted kappa -1 -0.33 0.42
## weighted kappa -1 -0.33 0.42
##
## Number of subjects = 6
kappa2(cbind(rater1,rater2))
## Cohen's Kappa for 2 Raters (Weights: unweighted)
##
## Subjects = 6
## Raters = 2
## Kappa = -0.333
##
## z = -0.816
## p-value = 0.414
ScoreQuadraticWeightedKappa(rater1,rater2)
## [1] -0.3333333
- Unweighted kappa = -0.33
- Weighted kappa = -0.33
- Quadratic Weighted Kappa = -0.33
SESS
rater1 <- c(2,2,3,3,2,1) # rater one's ratings
rater2 <- c(3,3,2,2,3,2) # rater one's ratings
cohen.kappa(cbind(rater1,rater2))
## Warning in cohen.kappa1(x, w = w, n.obs = n.obs, alpha = alpha, levels =
## levels): upper or lower confidence interval exceed abs(1) and set to +/- 1.
## Call: cohen.kappa1(x = x, w = w, n.obs = n.obs, alpha = alpha, levels = levels)
##
## Cohen Kappa and Weighted Kappa correlation coefficients and confidence boundaries
## lower estimate upper
## unweighted kappa -1 -0.71 -0.32
## weighted kappa -1 -0.20 1.00
##
## Number of subjects = 6
kappa2(cbind(rater1,rater2))
## Cohen's Kappa for 2 Raters (Weights: unweighted)
##
## Subjects = 6
## Raters = 2
## Kappa = -0.714
##
## z = -2.27
## p-value = 0.0229
ScoreQuadraticWeightedKappa(rater1,rater2)
## [1] -0.2
- Unweighted kappa = -0.71
- Weighted kappa = -0.20
- Quadratic Weighted Kappa = -0.20
MA Level - Disposition
COED
rater1 <- c(3,4,3,4) # rater one's ratings
rater2 <- c(3,3,4,3) # rater one's ratings
cohen.kappa(cbind(rater1,rater2))
## Warning in cohen.kappa1(x, w = w, n.obs = n.obs, alpha = alpha, levels =
## levels): upper or lower confidence interval exceed abs(1) and set to +/- 1.
## Call: cohen.kappa1(x = x, w = w, n.obs = n.obs, alpha = alpha, levels = levels)
##
## Cohen Kappa and Weighted Kappa correlation coefficients and confidence boundaries
## lower estimate upper
## unweighted kappa -1 -0.5 0.23
## weighted kappa -1 -0.5 0.23
##
## Number of subjects = 4
kappa2(cbind(rater1,rater2))
## Cohen's Kappa for 2 Raters (Weights: unweighted)
##
## Subjects = 4
## Raters = 2
## Kappa = -0.5
##
## z = -1.15
## p-value = 0.248
ScoreQuadraticWeightedKappa(rater1,rater2)
## [1] -0.5
- Unweighted kappa = -0.5
- Weighted kappa = -0.5
- Quadratic Weighted Kappa = -0.5
ELED
rater1 <- c(3,3,3,3) # rater one's ratings
rater2 <- c(3,2,3,2) # rater one's ratings
cohen.kappa(cbind(rater1,rater2))
## Call: cohen.kappa1(x = x, w = w, n.obs = n.obs, alpha = alpha, levels = levels)
##
## Cohen Kappa and Weighted Kappa correlation coefficients and confidence boundaries
## lower estimate upper
## unweighted kappa 0 0 0
## weighted kappa 0 0 0
##
## Number of subjects = 4
kappa2(cbind(rater1,rater2))
## Cohen's Kappa for 2 Raters (Weights: unweighted)
##
## Subjects = 4
## Raters = 2
## Kappa = 0
##
## z = NaN
## p-value = NaN
- Kappa = 0 (one of the rater only has 1 level)
SPGT
rater1 <- c(4,3,3,4) # rater one's ratings
rater2 <- c(3,3,3,3) # rater one's ratings
cohen.kappa(cbind(rater1,rater2))
## Call: cohen.kappa1(x = x, w = w, n.obs = n.obs, alpha = alpha, levels = levels)
##
## Cohen Kappa and Weighted Kappa correlation coefficients and confidence boundaries
## lower estimate upper
## unweighted kappa 0 0 0
## weighted kappa 0 0 0
##
## Number of subjects = 4
kappa2(cbind(rater1,rater2))
## Cohen's Kappa for 2 Raters (Weights: unweighted)
##
## Subjects = 4
## Raters = 2
## Kappa = 0
##
## z = NaN
## p-value = NaN
- Kappa = 0 (one of the rater only has 1 level)
EDLE
rater1 <- c(3,3,3,3) # rater one's ratings
rater2 <- c(3,3,3,3) # rater one's ratings
cohen.kappa(cbind(rater1,rater2))
## Your data seem to have no variance and in complete agreement across raters. Check your data.
## Call: cohen.kappa1(x = x, w = w, n.obs = n.obs, alpha = alpha, levels = levels)
##
## Cohen Kappa and Weighted Kappa correlation coefficients and confidence boundaries
## lower estimate upper
## unweighted kappa NA NA NA
## weighted kappa NaN NaN NaN
##
## Number of subjects = 4
kappa2(cbind(rater1,rater2))
## Cohen's Kappa for 2 Raters (Weights: unweighted)
##
## Subjects = 4
## Raters = 2
## Kappa = NaN
##
## z = NaN
## p-value = NaN
- Can not calculate because the complete agreement. Kappa=1
SPCO
rater1 <- c(4,3,3,3) # rater one's ratings
rater2 <- c(4,3,3,3) # rater one's ratings
cohen.kappa(x=cbind(rater1,rater2),alpha=.5)
## Call: cohen.kappa1(x = x, w = w, n.obs = n.obs, alpha = alpha, levels = levels)
##
## Cohen Kappa and Weighted Kappa correlation coefficients and confidence boundaries
## lower estimate upper
## unweighted kappa 1 1 1
## weighted kappa 1 1 1
##
## Number of subjects = 4
kappa2(cbind(rater1,rater2))
## Cohen's Kappa for 2 Raters (Weights: unweighted)
##
## Subjects = 4
## Raters = 2
## Kappa = 1
##
## z = 2
## p-value = 0.0455
ScoreQuadraticWeightedKappa(rater1,rater2)
## [1] 1
EDSY
rater1 <- c(4,4,4,4) # rater one's ratings
rater2 <- c(4,4,3,3) # rater one's ratings
cohen.kappa(x=cbind(rater1,rater2),alpha=.5)
## Call: cohen.kappa1(x = x, w = w, n.obs = n.obs, alpha = alpha, levels = levels)
##
## Cohen Kappa and Weighted Kappa correlation coefficients and confidence boundaries
## lower estimate upper
## unweighted kappa 0 0 0
## weighted kappa 0 0 0
##
## Number of subjects = 4
kappa2(cbind(rater1,rater2))
## Cohen's Kappa for 2 Raters (Weights: unweighted)
##
## Subjects = 4
## Raters = 2
## Kappa = 0
##
## z = NaN
## p-value = NaN
ScoreQuadraticWeightedKappa(rater1,rater2)
## [1] 0
- Kappa = 0 (one of the rater only has 1 level)
MA Level - Portfolio
COED
rater1 <- c(3,3,3,3,4,4) # rater one's ratings
rater2 <- c(3,3,3,3,3,3) # rater one's ratings
cohen.kappa(x=cbind(rater1,rater2),alpha=.5)
## Call: cohen.kappa1(x = x, w = w, n.obs = n.obs, alpha = alpha, levels = levels)
##
## Cohen Kappa and Weighted Kappa correlation coefficients and confidence boundaries
## lower estimate upper
## unweighted kappa 0 0 0
## weighted kappa 0 0 0
##
## Number of subjects = 6
kappa2(cbind(rater1,rater2))
## Warning in sqrt(varkappa): NaNs produced
## Cohen's Kappa for 2 Raters (Weights: unweighted)
##
## Subjects = 6
## Raters = 2
## Kappa = 0
##
## z = NaN
## p-value = NaN
ScoreQuadraticWeightedKappa(rater1,rater2)
## [1] 0
- Kappa = 0 (one of the rater only has 1 level)
ELED
rater1 <- c(4,3,4,3,3,4) # rater one's ratings
rater2 <- c(2,3,2,2,3,2) # rater one's ratings
cohen.kappa(x=cbind(rater1,rater2),alpha=.5)
## Call: cohen.kappa1(x = x, w = w, n.obs = n.obs, alpha = alpha, levels = levels)
##
## Cohen Kappa and Weighted Kappa correlation coefficients and confidence boundaries
## lower estimate upper
## unweighted kappa 0.13 0.20 0.2697
## weighted kappa -0.36 -0.18 -0.0018
##
## Number of subjects = 6
kappa2(cbind(rater1,rater2))
## Cohen's Kappa for 2 Raters (Weights: unweighted)
##
## Subjects = 6
## Raters = 2
## Kappa = 0.2
##
## z = 1.73
## p-value = 0.0833
ScoreQuadraticWeightedKappa(rater1,rater2)
## [1] -0.1818182
- Unweighted kappa = 0.20
- Weighted kappa = -0.182
- Quadratic Weighted Kappa = -0.182
SPGT
rater1 <- c(3,4,3,4,4,4) # rater one's ratings
rater2 <- c(3,3,3,3,3,3) # rater one's ratings
cohen.kappa(x=cbind(rater1,rater2),alpha=.5)
## Call: cohen.kappa1(x = x, w = w, n.obs = n.obs, alpha = alpha, levels = levels)
##
## Cohen Kappa and Weighted Kappa correlation coefficients and confidence boundaries
## lower estimate upper
## unweighted kappa 0 0 0
## weighted kappa 0 0 0
##
## Number of subjects = 6
kappa2(cbind(rater1,rater2))
## Warning in sqrt(varkappa): NaNs produced
## Cohen's Kappa for 2 Raters (Weights: unweighted)
##
## Subjects = 6
## Raters = 2
## Kappa = 0
##
## z = NaN
## p-value = NaN
ScoreQuadraticWeightedKappa(rater1,rater2)
## [1] 0
- Kappa = 0 (one of the rater only has 1 level)
EDLE
rater1 <- c(3,3,3,3,2,3) # rater one's ratings
rater2 <- c(3,3,3,3,3,3) # rater one's ratings
cohen.kappa(x=cbind(rater1,rater2),alpha=.5)
## Call: cohen.kappa1(x = x, w = w, n.obs = n.obs, alpha = alpha, levels = levels)
##
## Cohen Kappa and Weighted Kappa correlation coefficients and confidence boundaries
## lower estimate upper
## unweighted kappa -4.5e-08 0 4.5e-08
## weighted kappa -4.1e-08 0 4.1e-08
##
## Number of subjects = 6
kappa2(cbind(rater1,rater2))
## Cohen's Kappa for 2 Raters (Weights: unweighted)
##
## Subjects = 6
## Raters = 2
## Kappa = 0
##
## z = 0
## p-value = 1
ScoreQuadraticWeightedKappa(rater1,rater2)
## [1] 0
- Kappa = 0 (one of the rater only has 1 level)
SPCO
rater1 <- c(3,3,3,3,4,3) # rater one's ratings
rater2 <- c(3,3,3,3,4,3) # rater one's ratings
cohen.kappa(x=cbind(rater1,rater2),alpha=.5)
## Warning in cohen.kappa1(x, w = w, n.obs = n.obs, alpha = alpha, levels =
## levels): upper or lower confidence interval exceed abs(1) and set to +/- 1.
## Call: cohen.kappa1(x = x, w = w, n.obs = n.obs, alpha = alpha, levels = levels)
##
## Cohen Kappa and Weighted Kappa correlation coefficients and confidence boundaries
## lower estimate upper
## unweighted kappa 1 1 1
## weighted kappa 1 1 1
##
## Number of subjects = 6
kappa2(cbind(rater1,rater2))
## Cohen's Kappa for 2 Raters (Weights: unweighted)
##
## Subjects = 6
## Raters = 2
## Kappa = 1
##
## z = 2.45
## p-value = 0.0143
ScoreQuadraticWeightedKappa(rater1,rater2)
## [1] 1
- Can not calculate because the complete agreement. Kappa=1
EDSY
rater1 <- c(3,4,3,3,4,4) # rater one's ratings
rater2 <- c(3,3,4,3,4,3) # rater one's ratings
cohen.kappa(x=cbind(rater1,rater2),alpha=.5)
## Call: cohen.kappa1(x = x, w = w, n.obs = n.obs, alpha = alpha, levels = levels)
##
## Cohen Kappa and Weighted Kappa correlation coefficients and confidence boundaries
## lower estimate upper
## unweighted kappa -0.26 0 0.26
## weighted kappa -0.26 0 0.26
##
## Number of subjects = 6
kappa2(cbind(rater1,rater2))
## Cohen's Kappa for 2 Raters (Weights: unweighted)
##
## Subjects = 6
## Raters = 2
## Kappa = 0
##
## z = 0
## p-value = 1
ScoreQuadraticWeightedKappa(rater1,rater2)
## [1] 0
- Unweighted kappa = 0
- Weighted kappa = 0
- Quadratic Weighted Kappa = 0
EDS - Disposition
EDSP
rater1 <- c(2,2,3,2) # rater one's ratings
rater2 <- c(3,4,4,3) # rater one's ratings
cohen.kappa(x=cbind(rater1,rater2),alpha=.5)
## Call: cohen.kappa1(x = x, w = w, n.obs = n.obs, alpha = alpha, levels = levels)
##
## Cohen Kappa and Weighted Kappa correlation coefficients and confidence boundaries
## lower estimate upper
## unweighted kappa -0.22 -0.14 -0.065
## weighted kappa 0.12 0.12 0.125
##
## Number of subjects = 4
kappa2(cbind(rater1,rater2))
## Cohen's Kappa for 2 Raters (Weights: unweighted)
##
## Subjects = 4
## Raters = 2
## Kappa = -0.143
##
## z = -1.15
## p-value = 0.248
ScoreQuadraticWeightedKappa(rater1,rater2)
## [1] 0.125
- Unweighted kappa - 0.14
- Weighted kappa = 0.125
- Quadratic Weighted Kappa = 0.125
EDLE
rater1 <- c(3,3,3,3) # rater one's ratings
rater2 <- c(3,3,3,3) # rater one's ratings
cohen.kappa(x=cbind(rater1,rater2),alpha=.5)
## Your data seem to have no variance and in complete agreement across raters. Check your data.
## Call: cohen.kappa1(x = x, w = w, n.obs = n.obs, alpha = alpha, levels = levels)
##
## Cohen Kappa and Weighted Kappa correlation coefficients and confidence boundaries
## lower estimate upper
## unweighted kappa NA NA NA
## weighted kappa NaN NaN NaN
##
## Number of subjects = 4
kappa2(cbind(rater1,rater2))
## Cohen's Kappa for 2 Raters (Weights: unweighted)
##
## Subjects = 4
## Raters = 2
## Kappa = NaN
##
## z = NaN
## p-value = NaN
ScoreQuadraticWeightedKappa(rater1,rater2)
## [1] NaN
- Can not calculate because the complete agreement. Kappa=1
ELED
rater1 <- c(2,1,2,2) # rater one's ratings
rater2 <- c(4,4,4,4) # rater one's ratings
cohen.kappa(x=cbind(rater1,rater2),alpha=.5)
## Call: cohen.kappa1(x = x, w = w, n.obs = n.obs, alpha = alpha, levels = levels)
##
## Cohen Kappa and Weighted Kappa correlation coefficients and confidence boundaries
## lower estimate upper
## unweighted kappa 0 0 0
## weighted kappa 0 0 0
##
## Number of subjects = 4
kappa2(cbind(rater1,rater2))
## Cohen's Kappa for 2 Raters (Weights: unweighted)
##
## Subjects = 4
## Raters = 2
## Kappa = 0
##
## z = NaN
## p-value = NaN
ScoreQuadraticWeightedKappa(rater1,rater2)
## [1] 0
- Kappa = 0 (one of the rater only has 1 level)
COED
rater1 <- c(4,4,4,4) # rater one's ratings
rater2 <- c(3,3,3,2) # rater one's ratings
cohen.kappa(x=cbind(rater1,rater2),alpha=.5)
## Call: cohen.kappa1(x = x, w = w, n.obs = n.obs, alpha = alpha, levels = levels)
##
## Cohen Kappa and Weighted Kappa correlation coefficients and confidence boundaries
## lower estimate upper
## unweighted kappa 0 0 0
## weighted kappa 0 0 0
##
## Number of subjects = 4
kappa2(cbind(rater1,rater2))
## Cohen's Kappa for 2 Raters (Weights: unweighted)
##
## Subjects = 4
## Raters = 2
## Kappa = 0
##
## z = NaN
## p-value = NaN
ScoreQuadraticWeightedKappa(rater1,rater2)
## [1] 0
- Kappa = 0 (one of the rater only has 1 level)
EDS - Portfolio
EDSP
rater1 <- c(3,2,2,2,3,2) # rater one's ratings
rater2 <- c(3,3,3,3,4,3) # rater one's ratings
cohen.kappa(x=cbind(rater1,rater2),alpha=.5)
## Call: cohen.kappa1(x = x, w = w, n.obs = n.obs, alpha = alpha, levels = levels)
##
## Cohen Kappa and Weighted Kappa correlation coefficients and confidence boundaries
## lower estimate upper
## unweighted kappa -0.24 -0.15 -0.071
## weighted kappa 0.21 0.21 0.211
##
## Number of subjects = 6
kappa2(cbind(rater1,rater2))
## Cohen's Kappa for 2 Raters (Weights: unweighted)
##
## Subjects = 6
## Raters = 2
## Kappa = -0.154
##
## z = -1.55
## p-value = 0.121
ScoreQuadraticWeightedKappa(rater1,rater2)
## [1] 0.2105263
- Unweighted kappa = -0.15
- Weighted kappa = 0.211
- Quadratic Weighted Kappa = 0.211
EDLE
rater1 <- c(3,3,3,3,3,3) # rater one's ratings
rater2 <- c(3,3,3,3,3,3) # rater one's ratings
cohen.kappa(x=cbind(rater1,rater2),alpha=.5)
## Your data seem to have no variance and in complete agreement across raters. Check your data.
## Call: cohen.kappa1(x = x, w = w, n.obs = n.obs, alpha = alpha, levels = levels)
##
## Cohen Kappa and Weighted Kappa correlation coefficients and confidence boundaries
## lower estimate upper
## unweighted kappa NA NA NA
## weighted kappa NaN NaN NaN
##
## Number of subjects = 6
kappa2(cbind(rater1,rater2))
## Cohen's Kappa for 2 Raters (Weights: unweighted)
##
## Subjects = 6
## Raters = 2
## Kappa = NaN
##
## z = NaN
## p-value = NaN
ScoreQuadraticWeightedKappa(rater1,rater2)
## [1] NaN
- Can not calculate because the complete agreement. Kappa=1
ELED
rater1 <- c(2,2,2,2,2,1) # rater one's ratings
rater2 <- c(4,4,4,4,3,3) # rater one's ratings
cohen.kappa(x=cbind(rater1,rater2),alpha=.5)
## Call: cohen.kappa1(x = x, w = w, n.obs = n.obs, alpha = alpha, levels = levels)
##
## Cohen Kappa and Weighted Kappa correlation coefficients and confidence boundaries
## lower estimate upper
## unweighted kappa 0.00 0.00 0.00
## weighted kappa 0.06 0.06 0.06
##
## Number of subjects = 6
kappa2(cbind(rater1,rater2))
## Cohen's Kappa for 2 Raters (Weights: unweighted)
##
## Subjects = 6
## Raters = 2
## Kappa = 0
##
## z = NaN
## p-value = NaN
ScoreQuadraticWeightedKappa(rater1,rater2)
## [1] 0.05970149
- Unweighted kappa = 0
- Weighted kappa = 0.06
- Quadratic Weighted Kappa = 0.059
COED
rater1 <- c(4,4,3,4,4,3) # rater one's ratings
rater2 <- c(3,3,3,4,4,3) # rater one's ratings
cohen.kappa(x=cbind(rater1,rater2),alpha=.5)
## Call: cohen.kappa1(x = x, w = w, n.obs = n.obs, alpha = alpha, levels = levels)
##
## Cohen Kappa and Weighted Kappa correlation coefficients and confidence boundaries
## lower estimate upper
## unweighted kappa 0.21 0.4 0.59
## weighted kappa 0.21 0.4 0.59
##
## Number of subjects = 6
kappa2(cbind(rater1,rater2))
## Cohen's Kappa for 2 Raters (Weights: unweighted)
##
## Subjects = 6
## Raters = 2
## Kappa = 0.4
##
## z = 1.22
## p-value = 0.221
ScoreQuadraticWeightedKappa(rater1,rater2)
## [1] 0.4
- Unweighted kappa = 0.4
- Weighted kappa = 0.4
- Quadratic Weighted Kappa = 0.4
Standards for Initial ELED - Dispositions
Standard 1 - Diversity
rater1 <- c(2,4,3,3) # rater one's ratings
rater2 <- c(3,4,2,3) # rater one's ratings
cohen.kappa(x=cbind(rater1,rater2),alpha=.5)
## Call: cohen.kappa1(x = x, w = w, n.obs = n.obs, alpha = alpha, levels = levels)
##
## Cohen Kappa and Weighted Kappa correlation coefficients and confidence boundaries
## lower estimate upper
## unweighted kappa -0.08 0.2 0.48
## weighted kappa 0.19 0.5 0.81
##
## Number of subjects = 4
kappa2(cbind(rater1,rater2))
## Cohen's Kappa for 2 Raters (Weights: unweighted)
##
## Subjects = 4
## Raters = 2
## Kappa = 0.2
##
## z = 0.555
## p-value = 0.579
ScoreQuadraticWeightedKappa(rater1,rater2)
## [1] 0.5
- Unweighted kappa = 0.2
- Quadratic Weighted Kappa = 0.5
Standard 2 - Fairness and Equity
rater1 <- c(2,4,3,3) # rater one's ratings
rater2 <- c(3,3,4,3) # rater one's ratings
cohen.kappa(x=cbind(rater1,rater2),alpha=.5)
## Warning in cohen.kappa1(x, w = w, n.obs = n.obs, alpha = alpha, levels =
## levels): upper or lower confidence interval exceed abs(1) and set to +/- 1.
## Call: cohen.kappa1(x = x, w = w, n.obs = n.obs, alpha = alpha, levels = levels)
##
## Cohen Kappa and Weighted Kappa correlation coefficients and confidence boundaries
## lower estimate upper
## unweighted kappa -0.5 -0.33 -0.17
## weighted kappa -1.0 0.00 1.00
##
## Number of subjects = 4
kappa2(cbind(rater1,rater2))
## Cohen's Kappa for 2 Raters (Weights: unweighted)
##
## Subjects = 4
## Raters = 2
## Kappa = -0.333
##
## z = -1.04
## p-value = 0.296
ScoreQuadraticWeightedKappa(rater1,rater2)
## [1] 0
- Unweighted kappa = -0.33
- Quadratic Weighted Kappa = 0
Standard 3 - Collaboration
rater1 <- c(2,4,3,3) # rater one's ratings
rater2 <- c(3,4,3,3) # rater one's ratings
cohen.kappa(x=cbind(rater1,rater2),alpha=.5)
## Call: cohen.kappa1(x = x, w = w, n.obs = n.obs, alpha = alpha, levels = levels)
##
## Cohen Kappa and Weighted Kappa correlation coefficients and confidence boundaries
## lower estimate upper
## unweighted kappa 0.34 0.56 0.77
## weighted kappa 0.67 0.67 0.67
##
## Number of subjects = 4
kappa2(cbind(rater1,rater2))
## Cohen's Kappa for 2 Raters (Weights: unweighted)
##
## Subjects = 4
## Raters = 2
## Kappa = 0.556
##
## z = 1.74
## p-value = 0.0817
ScoreQuadraticWeightedKappa(rater1,rater2)
## [1] 0.6666667
- Unweighted kappa = 0.56
- Quadratic Weighted Kappa = 0.667
Standard 4 - Reflective Practice
rater1 <- c(2,4,4,3) # rater one's ratings
rater2 <- c(3,3,3,4) # rater one's ratings
cohen.kappa(x=cbind(rater1,rater2),alpha=.5)
## Call: cohen.kappa1(x = x, w = w, n.obs = n.obs, alpha = alpha, levels = levels)
##
## Cohen Kappa and Weighted Kappa correlation coefficients and confidence boundaries
## lower estimate upper
## unweighted kappa -0.72 -0.45 -0.19
## weighted kappa -0.14 -0.14 -0.14
##
## Number of subjects = 4
kappa2(cbind(rater1,rater2))
## Cohen's Kappa for 2 Raters (Weights: unweighted)
##
## Subjects = 4
## Raters = 2
## Kappa = -0.455
##
## z = -1.74
## p-value = 0.0817
ScoreQuadraticWeightedKappa(rater1,rater2)
## [1] -0.1428571
- Unweighted kappa = -0.45
- Quadratic Weighted Kappa = -0.14
Standards for Initial ELED - Portfolio
Standard 1 - The Learner and Learning
rater1 <- c(2,3,3,3) # rater one's ratings
rater2 <- c(3,4,4,4) # rater one's ratings
cohen.kappa(x=cbind(rater1,rater2),alpha=.5)
## Call: cohen.kappa1(x = x, w = w, n.obs = n.obs, alpha = alpha, levels = levels)
##
## Cohen Kappa and Weighted Kappa correlation coefficients and confidence boundaries
## lower estimate upper
## unweighted kappa -0.341 -0.23 -0.12
## weighted kappa -0.064 0.27 0.61
##
## Number of subjects = 4
kappa2(cbind(rater1,rater2))
## Cohen's Kappa for 2 Raters (Weights: unweighted)
##
## Subjects = 4
## Raters = 2
## Kappa = -0.231
##
## z = -2
## p-value = 0.0455
ScoreQuadraticWeightedKappa(rater1,rater2)
## [1] 0.2727273
- Unweighted kappa = -0.23
- Quadratic Weighted Kappa = 0.27
Standard 2 - Learning Environments
rater1 <- c(2,4,4,3) # rater one's ratings
rater2 <- c(3,4,3,3) # rater one's ratings
cohen.kappa(x=cbind(rater1,rater2),alpha=.5)
## Call: cohen.kappa1(x = x, w = w, n.obs = n.obs, alpha = alpha, levels = levels)
##
## Cohen Kappa and Weighted Kappa correlation coefficients and confidence boundaries
## lower estimate upper
## unweighted kappa 0.11 0.27 0.44
## weighted kappa 0.32 0.43 0.54
##
## Number of subjects = 4
kappa2(cbind(rater1,rater2))
## Cohen's Kappa for 2 Raters (Weights: unweighted)
##
## Subjects = 4
## Raters = 2
## Kappa = 0.273
##
## z = 1.04
## p-value = 0.296
ScoreQuadraticWeightedKappa(rater1,rater2)
## [1] 0.4285714
- Unweighted kappa = 0.27
- Quadratic Weighted Kappa = 0.43
Standard 3 - Content Knowledge
rater1 <- c(2,3,3,3) # rater one's ratings
rater2 <- c(4,4,3,4) # rater one's ratings
cohen.kappa(x=cbind(rater1,rater2),alpha=.5)
## Call: cohen.kappa1(x = x, w = w, n.obs = n.obs, alpha = alpha, levels = levels)
##
## Cohen Kappa and Weighted Kappa correlation coefficients and confidence boundaries
## lower estimate upper
## unweighted kappa 0.02 0.077 0.13
## weighted kappa -0.63 -0.091 0.45
##
## Number of subjects = 4
kappa2(cbind(rater1,rater2))
## Cohen's Kappa for 2 Raters (Weights: unweighted)
##
## Subjects = 4
## Raters = 2
## Kappa = 0.0769
##
## z = 0.667
## p-value = 0.505
ScoreQuadraticWeightedKappa(rater1,rater2)
## [1] -0.09090909
- Unweighted kappa = 0.077
- Quadratic Weighted Kappa = -0.091
Standard 4 - Instructional Practice
rater1 <- c(2,4,4,3) # rater one's ratings
rater2 <- c(3,4,4,4) # rater one's ratings
cohen.kappa(x=cbind(rater1,rater2),alpha=.5)
## Call: cohen.kappa1(x = x, w = w, n.obs = n.obs, alpha = alpha, levels = levels)
##
## Cohen Kappa and Weighted Kappa correlation coefficients and confidence boundaries
## lower estimate upper
## unweighted kappa -0.026 0.11 0.25
## weighted kappa 0.518 0.56 0.59
##
## Number of subjects = 4
kappa2(cbind(rater1,rater2))
## Cohen's Kappa for 2 Raters (Weights: unweighted)
##
## Subjects = 4
## Raters = 2
## Kappa = 0.111
##
## z = 0.348
## p-value = 0.728
ScoreQuadraticWeightedKappa(rater1,rater2)
## [1] 0.5555556
- Unweighted kappa = 0.11
- Quadratic Weighted Kappa = 0.56
Standard 5 - Professional Learning and Ethical Practice
rater1 <- c(2,2,4,3) # rater one's ratings
rater2 <- c(3,4,3,3) # rater one's ratings
cohen.kappa(x=cbind(rater1,rater2),alpha=.5)
## Call: cohen.kappa1(x = x, w = w, n.obs = n.obs, alpha = alpha, levels = levels)
##
## Cohen Kappa and Weighted Kappa correlation coefficients and confidence boundaries
## lower estimate upper
## unweighted kappa -0.079 0.00 0.079
## weighted kappa -0.333 -0.33 -0.333
##
## Number of subjects = 4
kappa2(cbind(rater1,rater2))
## Cohen's Kappa for 2 Raters (Weights: unweighted)
##
## Subjects = 4
## Raters = 2
## Kappa = 0
##
## z = 0
## p-value = 1
ScoreQuadraticWeightedKappa(rater1,rater2)
## [1] -0.3333333
- Unweighted kappa = 0
- Quadratic Weighted Kappa = -0.33
Standard 6 - Leadership and Collaboration
rater1 <- c(2,2,3,3) # rater one's ratings
rater2 <- c(2,4,3,3) # rater one's ratings
cohen.kappa(x=cbind(rater1,rater2),alpha=.5)
## Call: cohen.kappa1(x = x, w = w, n.obs = n.obs, alpha = alpha, levels = levels)
##
## Cohen Kappa and Weighted Kappa correlation coefficients and confidence boundaries
## lower estimate upper
## unweighted kappa 0.43 0.6 0.77
## weighted kappa -0.65 0.0 0.65
##
## Number of subjects = 4
kappa2(cbind(rater1,rater2))
## Cohen's Kappa for 2 Raters (Weights: unweighted)
##
## Subjects = 4
## Raters = 2
## Kappa = 0.6
##
## z = 1.81
## p-value = 0.0704
ScoreQuadraticWeightedKappa(rater1,rater2)
## [1] 0
- Unweighted kappa = 0.6
- Quadratic Weighted Kappa = 0