library(readxl) # to read Excel file
library(psych) # for PCA
library(eRm) # for this Rasch analysis
##
## Attaching package: 'eRm'
## The following object is masked from 'package:psych':
##
## sim.rasch
library(TAM) # for reliability
## Loading required package: CDM
## Loading required package: mvtnorm
## **********************************
## ** CDM 8.2-6 (2022-08-25 15:43:23)
## ** Cognitive Diagnostic Models **
## **********************************
## * TAM 4.2-21 (2024-02-19 18:52:08)
response_raw = read_xls("data_10.xls", sheet = 1)[-1,] |> as.data.frame()
key = read_xls("data_10.xls", sheet = 1)[1,] |> as.character()
head(response_raw) # not well prepared yet for analysis!
Correct = 1, Incorrect = 0
response = response_raw
for (i in 2:ncol(response)) {
response[,i] = ifelse(response_raw[,i] == rep(key[i],nrow(response_raw)), 1, 0)
}
head(response)
soalan = names(response[,2:ncol(response)])
fit_rm = RM(response[,soalan])
summary(fit_rm)
##
## Results of RM estimation:
##
## Call: RM(X = response[, soalan])
##
## Conditional log-likelihood: -499.4192
## Number of iterations: 10
## Number of parameters: 9
##
## Item (Category) Difficulty Parameters (eta): with 0.95 CI:
## Estimate Std. Error lower CI upper CI
## Soalan2 -0.697 0.179 -1.049 -0.346
## Soalan3 0.922 0.189 0.552 1.292
## Soalan4 -1.183 0.195 -1.566 -0.801
## Soalan5 1.666 0.228 1.218 2.113
## Soalan6 0.596 0.179 0.245 0.947
## Soalan7 1.789 0.238 1.323 2.254
## Soalan8 -1.959 0.244 -2.436 -1.481
## Soalan9 -1.062 0.190 -1.435 -0.689
## Soalan10 0.201 0.173 -0.138 0.540
##
## Item Easiness Parameters (beta) with 0.95 CI:
## Estimate Std. Error lower CI upper CI
## beta Soalan1 0.272 0.173 -0.067 0.610
## beta Soalan2 0.697 0.179 0.346 1.049
## beta Soalan3 -0.922 0.189 -1.292 -0.552
## beta Soalan4 1.183 0.195 0.801 1.566
## beta Soalan5 -1.666 0.228 -2.113 -1.218
## beta Soalan6 -0.596 0.179 -0.947 -0.245
## beta Soalan7 -1.789 0.238 -2.254 -1.323
## beta Soalan8 1.959 0.244 1.481 2.436
## beta Soalan9 1.062 0.190 0.689 1.435
## beta Soalan10 -0.201 0.173 -0.540 0.138
To get difficulty, multiply easiness by -1 (ref: Wind & Hua (2022) https://bookdown.org/chua/new_rasch_demo2/)
istats = data.frame(Difficulty = as.numeric(fit_rm$betapar*-1), SE = fit_rm$se.beta)
rownames(istats) = colnames(fit_rm$X)
istats
pparam = person.parameter(fit_rm)
summary(pparam)
##
## Estimation of Ability Parameters
##
## Collapsed log-likelihood: -39.45266
## Number of iterations: 8
## Number of parameters: 8
##
## ML estimated ability parameters (without spline interpolated values):
## Estimate Std. Err. 2.5 % 97.5 %
## theta P1 1.106052468 0.7803438 -0.42339334 2.63549827
## theta P2 -0.006463216 0.7287909 -1.43486713 1.42194069
## theta P3 -0.006463216 0.7287909 -1.43486713 1.42194069
## theta P4 0.531180215 0.7413633 -0.92186507 1.98422550
## theta P5 0.531180215 0.7413633 -0.92186507 1.98422550
## theta P6 0.531180215 0.7413633 -0.92186507 1.98422550
## theta P7 1.106052468 0.7803438 -0.42339334 2.63549827
## theta P8 -0.006463216 0.7287909 -1.43486713 1.42194069
## theta P9 1.106052468 0.7803438 -0.42339334 2.63549827
## theta P10 -0.006463216 0.7287909 -1.43486713 1.42194069
## theta P11 -0.006463216 0.7287909 -1.43486713 1.42194069
## theta P12 -0.006463216 0.7287909 -1.43486713 1.42194069
## theta P13 -0.541595270 0.7380553 -1.98815706 0.90496652
## theta P14 0.531180215 0.7413633 -0.92186507 1.98422550
## theta P15 -1.109767648 0.7751141 -2.62896343 0.40942813
## theta P16 -1.109767648 0.7751141 -2.62896343 0.40942813
## theta P17 0.531180215 0.7413633 -0.92186507 1.98422550
## theta P18 1.106052468 0.7803438 -0.42339334 2.63549827
## theta P19 -0.541595270 0.7380553 -1.98815706 0.90496652
## theta P20 -0.006463216 0.7287909 -1.43486713 1.42194069
## theta P21 -0.541595270 0.7380553 -1.98815706 0.90496652
## theta P22 0.531180215 0.7413633 -0.92186507 1.98422550
## theta P23 -0.006463216 0.7287909 -1.43486713 1.42194069
## theta P24 -0.541595270 0.7380553 -1.98815706 0.90496652
## theta P25 0.531180215 0.7413633 -0.92186507 1.98422550
## theta P26 -0.006463216 0.7287909 -1.43486713 1.42194069
## theta P27 0.531180215 0.7413633 -0.92186507 1.98422550
## theta P28 -1.109767648 0.7751141 -2.62896343 0.40942813
## theta P29 -0.541595270 0.7380553 -1.98815706 0.90496652
## theta P30 0.531180215 0.7413633 -0.92186507 1.98422550
## theta P31 -0.541595270 0.7380553 -1.98815706 0.90496652
## theta P32 0.531180215 0.7413633 -0.92186507 1.98422550
## theta P33 0.531180215 0.7413633 -0.92186507 1.98422550
## theta P34 -1.109767648 0.7751141 -2.62896343 0.40942813
## theta P35 1.106052468 0.7803438 -0.42339334 2.63549827
## theta P36 1.106052468 0.7803438 -0.42339334 2.63549827
## theta P37 -0.541595270 0.7380553 -1.98815706 0.90496652
## theta P38 1.106052468 0.7803438 -0.42339334 2.63549827
## theta P39 0.531180215 0.7413633 -0.92186507 1.98422550
## theta P40 0.531180215 0.7413633 -0.92186507 1.98422550
## theta P41 -0.541595270 0.7380553 -1.98815706 0.90496652
## theta P42 1.106052468 0.7803438 -0.42339334 2.63549827
## theta P43 -1.109767648 0.7751141 -2.62896343 0.40942813
## theta P44 -0.006463216 0.7287909 -1.43486713 1.42194069
## theta P45 -0.541595270 0.7380553 -1.98815706 0.90496652
## theta P46 -0.541595270 0.7380553 -1.98815706 0.90496652
## theta P47 -0.006463216 0.7287909 -1.43486713 1.42194069
## theta P48 1.106052468 0.7803438 -0.42339334 2.63549827
## theta P49 0.531180215 0.7413633 -0.92186507 1.98422550
## theta P50 -0.541595270 0.7380553 -1.98815706 0.90496652
## theta P51 0.531180215 0.7413633 -0.92186507 1.98422550
## theta P52 -0.006463216 0.7287909 -1.43486713 1.42194069
## theta P53 -0.006463216 0.7287909 -1.43486713 1.42194069
## theta P54 -1.109767648 0.7751141 -2.62896343 0.40942813
## theta P55 -0.006463216 0.7287909 -1.43486713 1.42194069
## theta P56 -1.109767648 0.7751141 -2.62896343 0.40942813
## theta P57 -0.541595270 0.7380553 -1.98815706 0.90496652
## theta P58 -0.541595270 0.7380553 -1.98815706 0.90496652
## theta P59 -1.109767648 0.7751141 -2.62896343 0.40942813
## theta P60 0.531180215 0.7413633 -0.92186507 1.98422550
## theta P61 -1.109767648 0.7751141 -2.62896343 0.40942813
## theta P62 1.106052468 0.7803438 -0.42339334 2.63549827
## theta P63 0.531180215 0.7413633 -0.92186507 1.98422550
## theta P64 1.106052468 0.7803438 -0.42339334 2.63549827
## theta P65 -0.006463216 0.7287909 -1.43486713 1.42194069
## theta P66 -0.006463216 0.7287909 -1.43486713 1.42194069
## theta P67 -0.541595270 0.7380553 -1.98815706 0.90496652
## theta P68 -1.109767648 0.7751141 -2.62896343 0.40942813
## theta P69 -1.109767648 0.7751141 -2.62896343 0.40942813
## theta P70 -1.109767648 0.7751141 -2.62896343 0.40942813
## theta P71 -0.541595270 0.7380553 -1.98815706 0.90496652
## theta P72 -0.541595270 0.7380553 -1.98815706 0.90496652
## theta P73 -0.006463216 0.7287909 -1.43486713 1.42194069
## theta P74 -0.006463216 0.7287909 -1.43486713 1.42194069
## theta P75 -1.109767648 0.7751141 -2.62896343 0.40942813
## theta P76 1.776521366 0.8687531 0.07379667 3.47924606
## theta P77 -1.109767648 0.7751141 -2.62896343 0.40942813
## theta P78 -1.109767648 0.7751141 -2.62896343 0.40942813
## theta P79 0.531180215 0.7413633 -0.92186507 1.98422550
## theta P80 0.531180215 0.7413633 -0.92186507 1.98422550
## theta P81 -0.006463216 0.7287909 -1.43486713 1.42194069
## theta P82 -0.541595270 0.7380553 -1.98815706 0.90496652
## theta P83 -0.006463216 0.7287909 -1.43486713 1.42194069
## theta P84 -0.006463216 0.7287909 -1.43486713 1.42194069
## theta P85 -1.109767648 0.7751141 -2.62896343 0.40942813
## theta P86 -1.109767648 0.7751141 -2.62896343 0.40942813
## theta P87 -0.541595270 0.7380553 -1.98815706 0.90496652
## theta P88 1.106052468 0.7803438 -0.42339334 2.63549827
## theta P89 1.106052468 0.7803438 -0.42339334 2.63549827
## theta P90 -0.006463216 0.7287909 -1.43486713 1.42194069
## theta P91 -2.703558754 1.1094732 -4.87808628 -0.52903122
## theta P92 -0.541595270 0.7380553 -1.98815706 0.90496652
## theta P93 -1.109767648 0.7751141 -2.62896343 0.40942813
## theta P94 -0.541595270 0.7380553 -1.98815706 0.90496652
## theta P95 -1.109767648 0.7751141 -2.62896343 0.40942813
## theta P96 1.776521366 0.8687531 0.07379667 3.47924606
## theta P97 -0.006463216 0.7287909 -1.43486713 1.42194069
## theta P98 -0.006463216 0.7287909 -1.43486713 1.42194069
## theta P99 -1.109767648 0.7751141 -2.62896343 0.40942813
## theta P100 0.531180215 0.7413633 -0.92186507 1.98422550
## theta P101 -0.541595270 0.7380553 -1.98815706 0.90496652
## theta P102 -1.109767648 0.7751141 -2.62896343 0.40942813
## theta P103 1.106052468 0.7803438 -0.42339334 2.63549827
## theta P104 -0.541595270 0.7380553 -1.98815706 0.90496652
## theta P105 0.531180215 0.7413633 -0.92186507 1.98422550
## theta P106 -0.541595270 0.7380553 -1.98815706 0.90496652
## theta P107 -0.006463216 0.7287909 -1.43486713 1.42194069
## theta P108 1.106052468 0.7803438 -0.42339334 2.63549827
## theta P109 0.531180215 0.7413633 -0.92186507 1.98422550
## theta P110 -0.006463216 0.7287909 -1.43486713 1.42194069
## theta P111 -1.109767648 0.7751141 -2.62896343 0.40942813
## theta P112 0.531180215 0.7413633 -0.92186507 1.98422550
## theta P113 -0.006463216 0.7287909 -1.43486713 1.42194069
## theta P114 -1.109767648 0.7751141 -2.62896343 0.40942813
## theta P115 1.106052468 0.7803438 -0.42339334 2.63549827
## theta P116 1.106052468 0.7803438 -0.42339334 2.63549827
## theta P117 -0.541595270 0.7380553 -1.98815706 0.90496652
## theta P118 -1.771871247 0.8641302 -3.46553528 -0.07820721
## theta P119 -0.541595270 0.7380553 -1.98815706 0.90496652
## theta P120 -0.541595270 0.7380553 -1.98815706 0.90496652
## theta P121 0.531180215 0.7413633 -0.92186507 1.98422550
## theta P122 1.106052468 0.7803438 -0.42339334 2.63549827
## theta P123 0.531180215 0.7413633 -0.92186507 1.98422550
## theta P124 -0.006463216 0.7287909 -1.43486713 1.42194069
## theta P125 -0.541595270 0.7380553 -1.98815706 0.90496652
## theta P126 -0.541595270 0.7380553 -1.98815706 0.90496652
## theta P127 0.531180215 0.7413633 -0.92186507 1.98422550
## theta P128 1.106052468 0.7803438 -0.42339334 2.63549827
## theta P129 -0.006463216 0.7287909 -1.43486713 1.42194069
## theta P130 -0.006463216 0.7287909 -1.43486713 1.42194069
pstats = data.frame(Ability = as.numeric(unlist(pparam$thetapar)), SE = as.numeric(unlist(pparam$se.theta)))
rownames(pstats) = rownames(fit_rm$X)
pstats
item_fit = itemfit(pparam); item_fit # Discrim = corrected item-test correlations of Cureton (1966), not part of Rasch model
##
## Itemfit Statistics:
## Chisq df p-value Outfit MSQ Infit MSQ Outfit t Infit t Discrim
## Soalan1 143.683 129 0.178 1.105 1.092 1.317 1.432 -0.255
## Soalan2 118.199 129 0.742 0.909 0.948 -0.897 -0.690 0.222
## Soalan3 103.913 129 0.949 0.799 0.859 -1.615 -1.644 0.512
## Soalan4 124.121 129 0.605 0.955 0.967 -0.274 -0.316 0.000
## Soalan5 94.176 129 0.991 0.724 0.861 -1.350 -0.987 0.293
## Soalan6 112.980 129 0.841 0.869 0.891 -1.300 -1.495 0.435
## Soalan7 132.586 129 0.396 1.020 0.971 0.163 -0.139 -0.325
## Soalan8 151.905 129 0.082 1.168 0.968 0.752 -0.145 -0.394
## Soalan9 140.252 129 0.235 1.079 1.001 0.630 0.038 -0.161
## Soalan10 107.368 129 0.917 0.826 0.850 -2.268 -2.448 0.689
print(item_fit, sort_by = "infit_t", decreasing = TRUE)
##
## Itemfit Statistics:
## Chisq df p-value Outfit MSQ Infit MSQ Outfit t Infit t Discrim
## Soalan10 107.368 129 0.917 0.826 0.850 -2.268 -2.448 0.689
## Soalan3 103.913 129 0.949 0.799 0.859 -1.615 -1.644 0.512
## Soalan6 112.980 129 0.841 0.869 0.891 -1.300 -1.495 0.435
## Soalan1 143.683 129 0.178 1.105 1.092 1.317 1.432 -0.255
## Soalan5 94.176 129 0.991 0.724 0.861 -1.350 -0.987 0.293
## Soalan2 118.199 129 0.742 0.909 0.948 -0.897 -0.690 0.222
## Soalan4 124.121 129 0.605 0.955 0.967 -0.274 -0.316 0.000
## Soalan8 151.905 129 0.082 1.168 0.968 0.752 -0.145 -0.394
## Soalan7 132.586 129 0.396 1.020 0.971 0.163 -0.139 -0.325
## Soalan9 140.252 129 0.235 1.079 1.001 0.630 0.038 -0.161
print(item_fit, sort_by = "outfit_t", decreasing = TRUE)
##
## Itemfit Statistics:
## Chisq df p-value Outfit MSQ Infit MSQ Outfit t Infit t Discrim
## Soalan10 107.368 129 0.917 0.826 0.850 -2.268 -2.448 0.689
## Soalan3 103.913 129 0.949 0.799 0.859 -1.615 -1.644 0.512
## Soalan5 94.176 129 0.991 0.724 0.861 -1.350 -0.987 0.293
## Soalan1 143.683 129 0.178 1.105 1.092 1.317 1.432 -0.255
## Soalan6 112.980 129 0.841 0.869 0.891 -1.300 -1.495 0.435
## Soalan2 118.199 129 0.742 0.909 0.948 -0.897 -0.690 0.222
## Soalan8 151.905 129 0.082 1.168 0.968 0.752 -0.145 -0.394
## Soalan9 140.252 129 0.235 1.079 1.001 0.630 0.038 -0.161
## Soalan4 124.121 129 0.605 0.955 0.967 -0.274 -0.316 0.000
## Soalan7 132.586 129 0.396 1.020 0.971 0.163 -0.139 -0.325
person_fit = personfit(pparam); person_fit
##
## Personfit Statistics:
## Chisq df p-value Outfit MSQ Infit MSQ Outfit t Infit t
## P1 12.221 9 0.201 1.222 0.944 0.54 -0.03
## P2 11.524 9 0.241 1.152 1.029 0.49 0.20
## P3 4.176 9 0.899 0.418 0.472 -1.67 -1.95
## P4 22.607 9 0.007 2.261 1.576 2.12 1.57
## P5 4.947 9 0.839 0.495 0.594 -1.15 -1.31
## P6 10.320 9 0.325 1.032 0.974 0.23 0.04
## P7 13.359 9 0.147 1.336 0.957 0.68 0.00
## P8 4.176 9 0.899 0.418 0.472 -1.67 -1.95
## P9 14.760 9 0.098 1.476 1.297 0.85 0.86
## P10 8.837 9 0.452 0.884 0.959 -0.15 -0.02
## P11 7.806 9 0.554 0.781 0.879 -0.43 -0.28
## P12 8.935 9 0.443 0.894 1.009 -0.12 0.14
## P13 7.791 9 0.555 0.779 0.944 -0.34 -0.07
## P14 12.896 9 0.167 1.290 1.264 0.71 0.84
## P15 10.423 9 0.317 1.042 1.100 0.28 0.39
## P16 5.196 9 0.817 0.520 0.698 -0.67 -0.85
## P17 22.163 9 0.008 2.216 1.552 2.06 1.52
## P18 16.605 9 0.055 1.661 1.382 1.06 1.05
## P19 6.137 9 0.726 0.614 0.739 -0.77 -0.79
## P20 10.750 9 0.293 1.075 1.191 0.31 0.66
## P21 21.485 9 0.011 2.149 1.648 1.96 1.80
## P22 7.399 9 0.596 0.740 0.805 -0.44 -0.51
## P23 18.126 9 0.034 1.813 1.466 1.71 1.37
## P24 5.077 9 0.828 0.508 0.612 -1.10 -1.30
## P25 17.725 9 0.039 1.772 1.165 1.48 0.58
## P26 14.612 9 0.102 1.461 1.424 1.10 1.27
## P27 6.731 9 0.665 0.673 0.802 -0.62 -0.52
## P28 31.453 9 0.000 3.145 1.995 2.34 2.34
## P29 5.077 9 0.828 0.508 0.612 -1.10 -1.30
## P30 9.162 9 0.422 0.916 1.002 -0.02 0.12
## P31 8.389 9 0.496 0.839 0.946 -0.19 -0.06
## P32 12.644 9 0.179 1.264 1.385 0.67 1.13
## P33 4.947 9 0.839 0.495 0.594 -1.15 -1.31
## P34 10.544 9 0.308 1.054 1.285 0.30 0.86
## P35 11.743 9 0.228 1.174 1.215 0.47 0.67
## P36 7.307 9 0.605 0.731 0.824 -0.24 -0.39
## P37 6.137 9 0.726 0.614 0.739 -0.77 -0.79
## P38 14.465 9 0.107 1.447 1.263 0.82 0.78
## P39 7.607 9 0.574 0.761 0.858 -0.39 -0.33
## P40 9.162 9 0.422 0.916 1.002 -0.02 0.12
## P41 11.360 9 0.252 1.136 1.234 0.43 0.78
## P42 7.307 9 0.605 0.731 0.824 -0.24 -0.39
## P43 4.200 9 0.898 0.420 0.554 -0.92 -1.42
## P44 9.246 9 0.415 0.925 0.983 -0.04 0.06
## P45 8.075 9 0.527 0.808 0.975 -0.27 0.03
## P46 8.075 9 0.527 0.808 0.975 -0.27 0.03
## P47 15.909 9 0.069 1.591 1.445 1.34 1.32
## P48 7.307 9 0.605 0.731 0.824 -0.24 -0.39
## P49 6.059 9 0.734 0.606 0.714 -0.80 -0.83
## P50 7.328 9 0.603 0.733 0.819 -0.45 -0.49
## P51 13.569 9 0.139 1.357 1.352 0.83 1.05
## P52 7.980 9 0.536 0.798 0.884 -0.38 -0.26
## P53 5.130 9 0.823 0.513 0.597 -1.29 -1.36
## P54 6.958 9 0.642 0.696 0.917 -0.30 -0.13
## P55 14.371 9 0.110 1.437 1.334 1.06 1.04
## P56 9.670 9 0.378 0.967 0.992 0.17 0.09
## P57 9.725 9 0.373 0.973 1.022 0.10 0.18
## P58 5.077 9 0.828 0.508 0.612 -1.10 -1.30
## P59 7.787 9 0.556 0.779 0.922 -0.14 -0.11
## P60 6.059 9 0.734 0.606 0.714 -0.80 -0.83
## P61 5.196 9 0.817 0.520 0.698 -0.67 -0.85
## P62 4.658 9 0.863 0.466 0.613 -0.81 -1.11
## P63 11.972 9 0.215 1.197 1.296 0.55 0.92
## P64 7.504 9 0.585 0.750 0.975 -0.20 0.05
## P65 7.806 9 0.554 0.781 0.879 -0.43 -0.28
## P66 4.176 9 0.899 0.418 0.472 -1.67 -1.95
## P67 4.217 9 0.897 0.422 0.497 -1.39 -1.83
## P68 4.953 9 0.838 0.495 0.662 -0.73 -0.99
## P69 4.200 9 0.898 0.420 0.554 -0.92 -1.42
## P70 11.601 9 0.237 1.160 1.001 0.45 0.12
## P71 5.077 9 0.828 0.508 0.612 -1.10 -1.30
## P72 15.148 9 0.087 1.515 1.188 1.08 0.66
## P73 4.176 9 0.899 0.418 0.472 -1.67 -1.95
## P74 14.006 9 0.122 1.401 1.520 0.99 1.50
## P75 6.227 9 0.717 0.623 0.814 -0.45 -0.45
## P76 5.095 9 0.826 0.509 0.809 -0.30 -0.30
## P77 8.741 9 0.461 0.874 1.140 0.02 0.50
## P78 5.984 9 0.741 0.598 0.778 -0.50 -0.57
## P79 8.071 9 0.527 0.807 0.893 -0.27 -0.22
## P80 12.234 9 0.200 1.223 1.201 0.60 0.67
## P81 6.848 9 0.653 0.685 0.773 -0.71 -0.66
## P82 4.217 9 0.897 0.422 0.497 -1.39 -1.83
## P83 5.130 9 0.823 0.513 0.597 -1.29 -1.36
## P84 5.130 9 0.823 0.513 0.597 -1.29 -1.36
## P85 5.984 9 0.741 0.598 0.778 -0.50 -0.57
## P86 7.545 9 0.581 0.754 0.885 -0.19 -0.23
## P87 18.488 9 0.030 1.849 1.446 1.57 1.33
## P88 29.866 9 0.000 2.987 1.545 2.24 1.40
## P89 6.207 9 0.719 0.621 0.835 -0.46 -0.36
## P90 7.119 9 0.625 0.712 0.784 -0.63 -0.61
## P91 2.883 9 0.969 0.288 0.669 -0.09 -0.24
## P92 19.892 9 0.019 1.989 1.466 1.76 1.38
## P93 4.953 9 0.838 0.495 0.662 -0.73 -0.99
## P94 6.137 9 0.726 0.614 0.739 -0.77 -0.79
## P95 5.196 9 0.817 0.520 0.698 -0.67 -0.85
## P96 5.341 9 0.804 0.534 0.855 -0.26 -0.19
## P97 10.302 9 0.327 1.030 1.140 0.21 0.52
## P98 5.991 9 0.741 0.599 0.698 -0.98 -0.94
## P99 4.200 9 0.898 0.420 0.554 -0.92 -1.42
## P100 4.947 9 0.839 0.495 0.594 -1.15 -1.31
## P101 18.283 9 0.032 1.828 1.277 1.55 0.90
## P102 6.979 9 0.639 0.698 0.922 -0.29 -0.11
## P103 12.221 9 0.201 1.222 0.944 0.54 -0.03
## P104 4.217 9 0.897 0.422 0.497 -1.39 -1.83
## P105 4.146 9 0.902 0.415 0.486 -1.44 -1.79
## P106 7.328 9 0.603 0.733 0.819 -0.45 -0.49
## P107 11.159 9 0.265 1.116 1.215 0.41 0.73
## P108 27.217 9 0.001 2.722 1.333 2.04 0.94
## P109 12.623 9 0.180 1.262 1.374 0.67 1.11
## P110 5.991 9 0.741 0.599 0.698 -0.98 -0.94
## P111 4.200 9 0.898 0.420 0.554 -0.92 -1.42
## P112 25.085 9 0.003 2.508 1.722 2.41 1.88
## P113 12.965 9 0.164 1.296 1.133 0.79 0.50
## P114 5.984 9 0.741 0.598 0.778 -0.50 -0.57
## P115 12.221 9 0.201 1.222 0.944 0.54 -0.03
## P116 7.307 9 0.605 0.731 0.824 -0.24 -0.39
## P117 20.202 9 0.017 2.020 1.519 1.80 1.50
## P118 8.683 9 0.467 0.868 1.181 0.18 0.53
## P119 4.217 9 0.897 0.422 0.497 -1.39 -1.83
## P120 7.328 9 0.603 0.733 0.819 -0.45 -0.49
## P121 6.059 9 0.734 0.606 0.714 -0.80 -0.83
## P122 7.307 9 0.605 0.731 0.824 -0.24 -0.39
## P123 11.972 9 0.215 1.197 1.296 0.55 0.92
## P124 14.063 9 0.120 1.406 1.342 1.00 1.06
## P125 6.932 9 0.644 0.693 0.829 -0.55 -0.46
## P126 10.540 9 0.309 1.054 1.121 0.27 0.47
## P127 7.607 9 0.574 0.761 0.858 -0.39 -0.33
## P128 3.963 9 0.914 0.396 0.516 -1.00 -1.49
## P129 4.176 9 0.899 0.418 0.472 -1.67 -1.95
## P130 4.176 9 0.899 0.418 0.472 -1.67 -1.95
print(person_fit, sort_by = "infit_t", decreasing = TRUE)
##
## Personfit Statistics:
## Chisq df p-value Outfit MSQ Infit MSQ Outfit t Infit t
## P1 12.221 9 0.201 1.222 0.944 0.54 -0.03
## P2 11.524 9 0.241 1.152 1.029 0.49 0.20
## P3 4.176 9 0.899 0.418 0.472 -1.67 -1.95
## P4 22.607 9 0.007 2.261 1.576 2.12 1.57
## P5 4.947 9 0.839 0.495 0.594 -1.15 -1.31
## P6 10.320 9 0.325 1.032 0.974 0.23 0.04
## P7 13.359 9 0.147 1.336 0.957 0.68 0.00
## P8 4.176 9 0.899 0.418 0.472 -1.67 -1.95
## P9 14.760 9 0.098 1.476 1.297 0.85 0.86
## P10 8.837 9 0.452 0.884 0.959 -0.15 -0.02
## P11 7.806 9 0.554 0.781 0.879 -0.43 -0.28
## P12 8.935 9 0.443 0.894 1.009 -0.12 0.14
## P13 7.791 9 0.555 0.779 0.944 -0.34 -0.07
## P14 12.896 9 0.167 1.290 1.264 0.71 0.84
## P15 10.423 9 0.317 1.042 1.100 0.28 0.39
## P16 5.196 9 0.817 0.520 0.698 -0.67 -0.85
## P17 22.163 9 0.008 2.216 1.552 2.06 1.52
## P18 16.605 9 0.055 1.661 1.382 1.06 1.05
## P19 6.137 9 0.726 0.614 0.739 -0.77 -0.79
## P20 10.750 9 0.293 1.075 1.191 0.31 0.66
## P21 21.485 9 0.011 2.149 1.648 1.96 1.80
## P22 7.399 9 0.596 0.740 0.805 -0.44 -0.51
## P23 18.126 9 0.034 1.813 1.466 1.71 1.37
## P24 5.077 9 0.828 0.508 0.612 -1.10 -1.30
## P25 17.725 9 0.039 1.772 1.165 1.48 0.58
## P26 14.612 9 0.102 1.461 1.424 1.10 1.27
## P27 6.731 9 0.665 0.673 0.802 -0.62 -0.52
## P28 31.453 9 0.000 3.145 1.995 2.34 2.34
## P29 5.077 9 0.828 0.508 0.612 -1.10 -1.30
## P30 9.162 9 0.422 0.916 1.002 -0.02 0.12
## P31 8.389 9 0.496 0.839 0.946 -0.19 -0.06
## P32 12.644 9 0.179 1.264 1.385 0.67 1.13
## P33 4.947 9 0.839 0.495 0.594 -1.15 -1.31
## P34 10.544 9 0.308 1.054 1.285 0.30 0.86
## P35 11.743 9 0.228 1.174 1.215 0.47 0.67
## P36 7.307 9 0.605 0.731 0.824 -0.24 -0.39
## P37 6.137 9 0.726 0.614 0.739 -0.77 -0.79
## P38 14.465 9 0.107 1.447 1.263 0.82 0.78
## P39 7.607 9 0.574 0.761 0.858 -0.39 -0.33
## P40 9.162 9 0.422 0.916 1.002 -0.02 0.12
## P41 11.360 9 0.252 1.136 1.234 0.43 0.78
## P42 7.307 9 0.605 0.731 0.824 -0.24 -0.39
## P43 4.200 9 0.898 0.420 0.554 -0.92 -1.42
## P44 9.246 9 0.415 0.925 0.983 -0.04 0.06
## P45 8.075 9 0.527 0.808 0.975 -0.27 0.03
## P46 8.075 9 0.527 0.808 0.975 -0.27 0.03
## P47 15.909 9 0.069 1.591 1.445 1.34 1.32
## P48 7.307 9 0.605 0.731 0.824 -0.24 -0.39
## P49 6.059 9 0.734 0.606 0.714 -0.80 -0.83
## P50 7.328 9 0.603 0.733 0.819 -0.45 -0.49
## P51 13.569 9 0.139 1.357 1.352 0.83 1.05
## P52 7.980 9 0.536 0.798 0.884 -0.38 -0.26
## P53 5.130 9 0.823 0.513 0.597 -1.29 -1.36
## P54 6.958 9 0.642 0.696 0.917 -0.30 -0.13
## P55 14.371 9 0.110 1.437 1.334 1.06 1.04
## P56 9.670 9 0.378 0.967 0.992 0.17 0.09
## P57 9.725 9 0.373 0.973 1.022 0.10 0.18
## P58 5.077 9 0.828 0.508 0.612 -1.10 -1.30
## P59 7.787 9 0.556 0.779 0.922 -0.14 -0.11
## P60 6.059 9 0.734 0.606 0.714 -0.80 -0.83
## P61 5.196 9 0.817 0.520 0.698 -0.67 -0.85
## P62 4.658 9 0.863 0.466 0.613 -0.81 -1.11
## P63 11.972 9 0.215 1.197 1.296 0.55 0.92
## P64 7.504 9 0.585 0.750 0.975 -0.20 0.05
## P65 7.806 9 0.554 0.781 0.879 -0.43 -0.28
## P66 4.176 9 0.899 0.418 0.472 -1.67 -1.95
## P67 4.217 9 0.897 0.422 0.497 -1.39 -1.83
## P68 4.953 9 0.838 0.495 0.662 -0.73 -0.99
## P69 4.200 9 0.898 0.420 0.554 -0.92 -1.42
## P70 11.601 9 0.237 1.160 1.001 0.45 0.12
## P71 5.077 9 0.828 0.508 0.612 -1.10 -1.30
## P72 15.148 9 0.087 1.515 1.188 1.08 0.66
## P73 4.176 9 0.899 0.418 0.472 -1.67 -1.95
## P74 14.006 9 0.122 1.401 1.520 0.99 1.50
## P75 6.227 9 0.717 0.623 0.814 -0.45 -0.45
## P76 5.095 9 0.826 0.509 0.809 -0.30 -0.30
## P77 8.741 9 0.461 0.874 1.140 0.02 0.50
## P78 5.984 9 0.741 0.598 0.778 -0.50 -0.57
## P79 8.071 9 0.527 0.807 0.893 -0.27 -0.22
## P80 12.234 9 0.200 1.223 1.201 0.60 0.67
## P81 6.848 9 0.653 0.685 0.773 -0.71 -0.66
## P82 4.217 9 0.897 0.422 0.497 -1.39 -1.83
## P83 5.130 9 0.823 0.513 0.597 -1.29 -1.36
## P84 5.130 9 0.823 0.513 0.597 -1.29 -1.36
## P85 5.984 9 0.741 0.598 0.778 -0.50 -0.57
## P86 7.545 9 0.581 0.754 0.885 -0.19 -0.23
## P87 18.488 9 0.030 1.849 1.446 1.57 1.33
## P88 29.866 9 0.000 2.987 1.545 2.24 1.40
## P89 6.207 9 0.719 0.621 0.835 -0.46 -0.36
## P90 7.119 9 0.625 0.712 0.784 -0.63 -0.61
## P91 2.883 9 0.969 0.288 0.669 -0.09 -0.24
## P92 19.892 9 0.019 1.989 1.466 1.76 1.38
## P93 4.953 9 0.838 0.495 0.662 -0.73 -0.99
## P94 6.137 9 0.726 0.614 0.739 -0.77 -0.79
## P95 5.196 9 0.817 0.520 0.698 -0.67 -0.85
## P96 5.341 9 0.804 0.534 0.855 -0.26 -0.19
## P97 10.302 9 0.327 1.030 1.140 0.21 0.52
## P98 5.991 9 0.741 0.599 0.698 -0.98 -0.94
## P99 4.200 9 0.898 0.420 0.554 -0.92 -1.42
## P100 4.947 9 0.839 0.495 0.594 -1.15 -1.31
## P101 18.283 9 0.032 1.828 1.277 1.55 0.90
## P102 6.979 9 0.639 0.698 0.922 -0.29 -0.11
## P103 12.221 9 0.201 1.222 0.944 0.54 -0.03
## P104 4.217 9 0.897 0.422 0.497 -1.39 -1.83
## P105 4.146 9 0.902 0.415 0.486 -1.44 -1.79
## P106 7.328 9 0.603 0.733 0.819 -0.45 -0.49
## P107 11.159 9 0.265 1.116 1.215 0.41 0.73
## P108 27.217 9 0.001 2.722 1.333 2.04 0.94
## P109 12.623 9 0.180 1.262 1.374 0.67 1.11
## P110 5.991 9 0.741 0.599 0.698 -0.98 -0.94
## P111 4.200 9 0.898 0.420 0.554 -0.92 -1.42
## P112 25.085 9 0.003 2.508 1.722 2.41 1.88
## P113 12.965 9 0.164 1.296 1.133 0.79 0.50
## P114 5.984 9 0.741 0.598 0.778 -0.50 -0.57
## P115 12.221 9 0.201 1.222 0.944 0.54 -0.03
## P116 7.307 9 0.605 0.731 0.824 -0.24 -0.39
## P117 20.202 9 0.017 2.020 1.519 1.80 1.50
## P118 8.683 9 0.467 0.868 1.181 0.18 0.53
## P119 4.217 9 0.897 0.422 0.497 -1.39 -1.83
## P120 7.328 9 0.603 0.733 0.819 -0.45 -0.49
## P121 6.059 9 0.734 0.606 0.714 -0.80 -0.83
## P122 7.307 9 0.605 0.731 0.824 -0.24 -0.39
## P123 11.972 9 0.215 1.197 1.296 0.55 0.92
## P124 14.063 9 0.120 1.406 1.342 1.00 1.06
## P125 6.932 9 0.644 0.693 0.829 -0.55 -0.46
## P126 10.540 9 0.309 1.054 1.121 0.27 0.47
## P127 7.607 9 0.574 0.761 0.858 -0.39 -0.33
## P128 3.963 9 0.914 0.396 0.516 -1.00 -1.49
## P129 4.176 9 0.899 0.418 0.472 -1.67 -1.95
## P130 4.176 9 0.899 0.418 0.472 -1.67 -1.95
print(person_fit, sort_by = "outfit_t", decreasing = TRUE)
##
## Personfit Statistics:
## Chisq df p-value Outfit MSQ Infit MSQ Outfit t Infit t
## P1 12.221 9 0.201 1.222 0.944 0.54 -0.03
## P2 11.524 9 0.241 1.152 1.029 0.49 0.20
## P3 4.176 9 0.899 0.418 0.472 -1.67 -1.95
## P4 22.607 9 0.007 2.261 1.576 2.12 1.57
## P5 4.947 9 0.839 0.495 0.594 -1.15 -1.31
## P6 10.320 9 0.325 1.032 0.974 0.23 0.04
## P7 13.359 9 0.147 1.336 0.957 0.68 0.00
## P8 4.176 9 0.899 0.418 0.472 -1.67 -1.95
## P9 14.760 9 0.098 1.476 1.297 0.85 0.86
## P10 8.837 9 0.452 0.884 0.959 -0.15 -0.02
## P11 7.806 9 0.554 0.781 0.879 -0.43 -0.28
## P12 8.935 9 0.443 0.894 1.009 -0.12 0.14
## P13 7.791 9 0.555 0.779 0.944 -0.34 -0.07
## P14 12.896 9 0.167 1.290 1.264 0.71 0.84
## P15 10.423 9 0.317 1.042 1.100 0.28 0.39
## P16 5.196 9 0.817 0.520 0.698 -0.67 -0.85
## P17 22.163 9 0.008 2.216 1.552 2.06 1.52
## P18 16.605 9 0.055 1.661 1.382 1.06 1.05
## P19 6.137 9 0.726 0.614 0.739 -0.77 -0.79
## P20 10.750 9 0.293 1.075 1.191 0.31 0.66
## P21 21.485 9 0.011 2.149 1.648 1.96 1.80
## P22 7.399 9 0.596 0.740 0.805 -0.44 -0.51
## P23 18.126 9 0.034 1.813 1.466 1.71 1.37
## P24 5.077 9 0.828 0.508 0.612 -1.10 -1.30
## P25 17.725 9 0.039 1.772 1.165 1.48 0.58
## P26 14.612 9 0.102 1.461 1.424 1.10 1.27
## P27 6.731 9 0.665 0.673 0.802 -0.62 -0.52
## P28 31.453 9 0.000 3.145 1.995 2.34 2.34
## P29 5.077 9 0.828 0.508 0.612 -1.10 -1.30
## P30 9.162 9 0.422 0.916 1.002 -0.02 0.12
## P31 8.389 9 0.496 0.839 0.946 -0.19 -0.06
## P32 12.644 9 0.179 1.264 1.385 0.67 1.13
## P33 4.947 9 0.839 0.495 0.594 -1.15 -1.31
## P34 10.544 9 0.308 1.054 1.285 0.30 0.86
## P35 11.743 9 0.228 1.174 1.215 0.47 0.67
## P36 7.307 9 0.605 0.731 0.824 -0.24 -0.39
## P37 6.137 9 0.726 0.614 0.739 -0.77 -0.79
## P38 14.465 9 0.107 1.447 1.263 0.82 0.78
## P39 7.607 9 0.574 0.761 0.858 -0.39 -0.33
## P40 9.162 9 0.422 0.916 1.002 -0.02 0.12
## P41 11.360 9 0.252 1.136 1.234 0.43 0.78
## P42 7.307 9 0.605 0.731 0.824 -0.24 -0.39
## P43 4.200 9 0.898 0.420 0.554 -0.92 -1.42
## P44 9.246 9 0.415 0.925 0.983 -0.04 0.06
## P45 8.075 9 0.527 0.808 0.975 -0.27 0.03
## P46 8.075 9 0.527 0.808 0.975 -0.27 0.03
## P47 15.909 9 0.069 1.591 1.445 1.34 1.32
## P48 7.307 9 0.605 0.731 0.824 -0.24 -0.39
## P49 6.059 9 0.734 0.606 0.714 -0.80 -0.83
## P50 7.328 9 0.603 0.733 0.819 -0.45 -0.49
## P51 13.569 9 0.139 1.357 1.352 0.83 1.05
## P52 7.980 9 0.536 0.798 0.884 -0.38 -0.26
## P53 5.130 9 0.823 0.513 0.597 -1.29 -1.36
## P54 6.958 9 0.642 0.696 0.917 -0.30 -0.13
## P55 14.371 9 0.110 1.437 1.334 1.06 1.04
## P56 9.670 9 0.378 0.967 0.992 0.17 0.09
## P57 9.725 9 0.373 0.973 1.022 0.10 0.18
## P58 5.077 9 0.828 0.508 0.612 -1.10 -1.30
## P59 7.787 9 0.556 0.779 0.922 -0.14 -0.11
## P60 6.059 9 0.734 0.606 0.714 -0.80 -0.83
## P61 5.196 9 0.817 0.520 0.698 -0.67 -0.85
## P62 4.658 9 0.863 0.466 0.613 -0.81 -1.11
## P63 11.972 9 0.215 1.197 1.296 0.55 0.92
## P64 7.504 9 0.585 0.750 0.975 -0.20 0.05
## P65 7.806 9 0.554 0.781 0.879 -0.43 -0.28
## P66 4.176 9 0.899 0.418 0.472 -1.67 -1.95
## P67 4.217 9 0.897 0.422 0.497 -1.39 -1.83
## P68 4.953 9 0.838 0.495 0.662 -0.73 -0.99
## P69 4.200 9 0.898 0.420 0.554 -0.92 -1.42
## P70 11.601 9 0.237 1.160 1.001 0.45 0.12
## P71 5.077 9 0.828 0.508 0.612 -1.10 -1.30
## P72 15.148 9 0.087 1.515 1.188 1.08 0.66
## P73 4.176 9 0.899 0.418 0.472 -1.67 -1.95
## P74 14.006 9 0.122 1.401 1.520 0.99 1.50
## P75 6.227 9 0.717 0.623 0.814 -0.45 -0.45
## P76 5.095 9 0.826 0.509 0.809 -0.30 -0.30
## P77 8.741 9 0.461 0.874 1.140 0.02 0.50
## P78 5.984 9 0.741 0.598 0.778 -0.50 -0.57
## P79 8.071 9 0.527 0.807 0.893 -0.27 -0.22
## P80 12.234 9 0.200 1.223 1.201 0.60 0.67
## P81 6.848 9 0.653 0.685 0.773 -0.71 -0.66
## P82 4.217 9 0.897 0.422 0.497 -1.39 -1.83
## P83 5.130 9 0.823 0.513 0.597 -1.29 -1.36
## P84 5.130 9 0.823 0.513 0.597 -1.29 -1.36
## P85 5.984 9 0.741 0.598 0.778 -0.50 -0.57
## P86 7.545 9 0.581 0.754 0.885 -0.19 -0.23
## P87 18.488 9 0.030 1.849 1.446 1.57 1.33
## P88 29.866 9 0.000 2.987 1.545 2.24 1.40
## P89 6.207 9 0.719 0.621 0.835 -0.46 -0.36
## P90 7.119 9 0.625 0.712 0.784 -0.63 -0.61
## P91 2.883 9 0.969 0.288 0.669 -0.09 -0.24
## P92 19.892 9 0.019 1.989 1.466 1.76 1.38
## P93 4.953 9 0.838 0.495 0.662 -0.73 -0.99
## P94 6.137 9 0.726 0.614 0.739 -0.77 -0.79
## P95 5.196 9 0.817 0.520 0.698 -0.67 -0.85
## P96 5.341 9 0.804 0.534 0.855 -0.26 -0.19
## P97 10.302 9 0.327 1.030 1.140 0.21 0.52
## P98 5.991 9 0.741 0.599 0.698 -0.98 -0.94
## P99 4.200 9 0.898 0.420 0.554 -0.92 -1.42
## P100 4.947 9 0.839 0.495 0.594 -1.15 -1.31
## P101 18.283 9 0.032 1.828 1.277 1.55 0.90
## P102 6.979 9 0.639 0.698 0.922 -0.29 -0.11
## P103 12.221 9 0.201 1.222 0.944 0.54 -0.03
## P104 4.217 9 0.897 0.422 0.497 -1.39 -1.83
## P105 4.146 9 0.902 0.415 0.486 -1.44 -1.79
## P106 7.328 9 0.603 0.733 0.819 -0.45 -0.49
## P107 11.159 9 0.265 1.116 1.215 0.41 0.73
## P108 27.217 9 0.001 2.722 1.333 2.04 0.94
## P109 12.623 9 0.180 1.262 1.374 0.67 1.11
## P110 5.991 9 0.741 0.599 0.698 -0.98 -0.94
## P111 4.200 9 0.898 0.420 0.554 -0.92 -1.42
## P112 25.085 9 0.003 2.508 1.722 2.41 1.88
## P113 12.965 9 0.164 1.296 1.133 0.79 0.50
## P114 5.984 9 0.741 0.598 0.778 -0.50 -0.57
## P115 12.221 9 0.201 1.222 0.944 0.54 -0.03
## P116 7.307 9 0.605 0.731 0.824 -0.24 -0.39
## P117 20.202 9 0.017 2.020 1.519 1.80 1.50
## P118 8.683 9 0.467 0.868 1.181 0.18 0.53
## P119 4.217 9 0.897 0.422 0.497 -1.39 -1.83
## P120 7.328 9 0.603 0.733 0.819 -0.45 -0.49
## P121 6.059 9 0.734 0.606 0.714 -0.80 -0.83
## P122 7.307 9 0.605 0.731 0.824 -0.24 -0.39
## P123 11.972 9 0.215 1.197 1.296 0.55 0.92
## P124 14.063 9 0.120 1.406 1.342 1.00 1.06
## P125 6.932 9 0.644 0.693 0.829 -0.55 -0.46
## P126 10.540 9 0.309 1.054 1.121 0.27 0.47
## P127 7.607 9 0.574 0.761 0.858 -0.39 -0.33
## P128 3.963 9 0.914 0.396 0.516 -1.00 -1.49
## P129 4.176 9 0.899 0.418 0.472 -1.67 -1.95
## P130 4.176 9 0.899 0.418 0.472 -1.67 -1.95
Rel = \(1 - \frac{VAR(error)}{VAR(observed)}\)
Weighted likelihood estimation (WLE) = \(1 - \frac{VAR(SE)}{VAR(estimates)}\) -> for MMLE
Expected a posteriori (EAP) = \(1 - \frac{VAR(SE)}{VAR(SE) + VAR(estimates)}\) -> comparable to Cronbach’s alpha (Wess et al, 2021)
WLErel(istats$Difficulty, istats$SE)
## [1] 0.9744397
EAPrel(istats$Difficulty, istats$SE)
## [1] 0.9750768
WLErel(pstats$Ability, pstats$SE)
## [1] 0.1164769
1 - mean(pstats$SE^2) / sd(pstats$Ability)^2
## [1] 0.1164769
summary(SepRel(pparam)) # SepRel from eRm, same with WLErel from TAM
## Separation Reliability: 0.1165
##
## Observed Variance: 0.6473 (Squared Standard Deviation)
## Mean Square Measurement Error: 0.5719 (Model Error Variance)
EAPrel(pstats$Ability, pstats$SE)
## [1] 0.53092
Expected to be > 20% for unidimensionality
model_prob = pmat(pparam)
response_no_extreme = pparam$X.ex
resids = response_no_extreme - model_prob
VO = var(as.vector(response_no_extreme))
VR = var(as.vector(resids))
(VO - VR)/VO * 100 # > 20%
## [1] 33.50158
std_resids = item_fit$st.res
pca = pca(std_resids, nfactors = ncol(pparam$X), rotate = "none")
pca$values # none of Eigenvalues are more than 2
## [1] 1.55334589 1.39816500 1.31610949 1.16605766 1.12192106 1.01337028
## [7] 0.90095029 0.81385220 0.69083772 0.02539043
contrasts = c(pca$values[1], pca$values[2], pca$values[3], pca$values[4], pca$values[5])
plot(contrasts, ylab = "Eigenvalues", xlab = "Contrast Number",
main = "Contrasts from PCA of Standardized Residuals")
All Eigenvalues < 2, therefore can conclude that unidimensionality assumption fulfilled
scree(std_resids, pc = T, fa = F) |> print() # largest Eigenvalue is < 2
## Scree of eigen values
## Call: NULL
## Eigen values of Principal Components [1] 1.55 1.40 1.32 1.17 1.12 1.01 0.90 0.81 0.69 0.03
fa.parallel(std_resids, fa = "pc") |> print() # looks like random noise
## Parallel analysis suggests that the number of factors = NA and the number of components = 0
## Call: fa.parallel(x = std_resids, fa = "pc")
## Parallel analysis suggests that the number of factors = NA and the number of components = 0
##
## Eigen Values of
##
## eigen values of factors
## [1] 0.90 0.41 0.36 0.24 0.14 0.05 0.02 -0.13 -0.17 -0.92
##
## eigen values of simulated factors
## [1] NA
##
## eigen values of components
## [1] 1.55 1.40 1.32 1.17 1.12 1.01 0.90 0.81 0.69 0.03
##
## eigen values of simulated components
## [1] 1.49 1.33 1.20 1.12 1.02 0.94 0.85 0.77 0.68 0.60
vss(std_resids) # MAP suggests 1, although VSS says 8
## Warning in fa.stats(r = r, f = f, phi = phi, n.obs = n.obs, np.obs = np.obs, :
## The estimated weights for the factor scores are probably incorrect. Try a
## different factor score estimation method.
## Warning in fac(r = r, nfactors = nfactors, n.obs = n.obs, rotate = rotate, : An
## ultra-Heywood case was detected. Examine the results carefully
## Warning in fa.stats(r = r, f = f, phi = phi, n.obs = n.obs, np.obs = np.obs, :
## The estimated weights for the factor scores are probably incorrect. Try a
## different factor score estimation method.
## Warning in fac(r = r, nfactors = nfactors, n.obs = n.obs, rotate = rotate, : An
## ultra-Heywood case was detected. Examine the results carefully
## Warning in fa.stats(r = r, f = f, phi = phi, n.obs = n.obs, np.obs = np.obs, :
## The estimated weights for the factor scores are probably incorrect. Try a
## different factor score estimation method.
## Warning in fac(r = r, nfactors = nfactors, n.obs = n.obs, rotate = rotate, : An
## ultra-Heywood case was detected. Examine the results carefully
## Warning in fa.stats(r = r, f = f, phi = phi, n.obs = n.obs, np.obs = np.obs, :
## The estimated weights for the factor scores are probably incorrect. Try a
## different factor score estimation method.
## Warning in fac(r = r, nfactors = nfactors, n.obs = n.obs, rotate = rotate, : An
## ultra-Heywood case was detected. Examine the results carefully
## Warning in fa.stats(r = r, f = f, phi = phi, n.obs = n.obs, np.obs = np.obs, :
## The estimated weights for the factor scores are probably incorrect. Try a
## different factor score estimation method.
## Warning in fac(r = r, nfactors = nfactors, n.obs = n.obs, rotate = rotate, : An
## ultra-Heywood case was detected. Examine the results carefully
## Warning in fa.stats(r = r, f = f, phi = phi, n.obs = n.obs, np.obs = np.obs, :
## The estimated weights for the factor scores are probably incorrect. Try a
## different factor score estimation method.
## Warning in fac(r = r, nfactors = nfactors, n.obs = n.obs, rotate = rotate, : An
## ultra-Heywood case was detected. Examine the results carefully
## Warning in fa.stats(r = r, f = f, phi = phi, n.obs = n.obs, np.obs = np.obs, :
## The estimated weights for the factor scores are probably incorrect. Try a
## different factor score estimation method.
## Warning in fac(r = r, nfactors = nfactors, n.obs = n.obs, rotate = rotate, : An
## ultra-Heywood case was detected. Examine the results carefully
##
## Very Simple Structure
## Call: vss(x = std_resids)
## VSS complexity 1 achieves a maximimum of 0.74 with 8 factors
## VSS complexity 2 achieves a maximimum of 0.81 with 8 factors
##
## The Velicer MAP achieves a minimum of 0.03 with 1 factors
## BIC achieves a minimum of 181.31 with 1 factors
## Sample Size adjusted BIC achieves a minimum of 247.63 with 5 factors
##
## Statistics by number of factors
## vss1 vss2 map dof chisq prob sqresid fit RMSEA BIC SABIC complex eChisq
## 1 0.16 0.00 0.033 35 352 6.2e-54 9.9 0.16 0.26 181 292 1.0 166.9
## 2 0.27 0.29 0.052 26 323 5.1e-53 8.3 0.29 0.30 196 279 1.4 121.8
## 3 0.36 0.39 0.079 18 300 6.0e-53 6.9 0.41 0.35 212 269 1.5 89.4
## 4 0.42 0.47 0.114 11 279 2.8e-53 5.7 0.51 0.43 225 260 1.6 70.7
## 5 0.49 0.56 0.169 5 256 2.6e-53 4.5 0.62 0.62 232 248 1.7 50.5
## 6 0.59 0.65 0.258 0 213 NA 3.2 0.73 NA NA NA 1.9 30.3
## 7 0.66 0.74 0.423 -4 176 NA 2.1 0.82 NA NA NA 1.7 16.7
## 8 0.74 0.81 0.725 -7 140 NA 1.1 0.90 NA NA NA 1.7 7.2
## SRMR eCRMS eBIC
## 1 0.119 0.14 -3.4
## 2 0.102 0.13 -4.7
## 3 0.087 0.14 1.8
## 4 0.078 0.16 17.2
## 5 0.066 0.20 26.1
## 6 0.051 NA NA
## 7 0.038 NA NA
## 8 0.025 NA NA
plotPImap(fit_rm, irug = T, sort = TRUE) # Person-Item / Wright's map
plotPWmap(fit_rm, imap = T, itemCI = list()) # Pathway map, item locations,
# should be between -2 and 2
plotPWmap(fit_rm, imap = T, itemCI = list(), pmap = T, # + person locations
item.pch = 16,
person.pch = 4) # customize symbols for item and person, values 0 to 25 (see ?points)
plotjointICC(fit_rm)
abline(h = 0.5, col = "blue", lty = "dotted")
plotICC(fit_rm, empICC = list("raw", type = "b", col = "blue", lty = "dotted"))
empICC
allows comparison of model (predicted) ICC with
empirical (observed data) ICC
summary(gofIRT(pparam))
##
## Goodness-of-Fit Tests
## value df p-value
## Collapsed Deviance 78.511 80 0.526
## Hosmer-Lemeshow 11.024 8 0.200
## Rost Deviance 322.856 1014 1.000
## Casewise Deviance 1315.504 1283 0.258
##
## R-Squared Measures
## Pearson R2: 0.336
## Sum-of-Squares R2: 0.335
## McFadden R2: 0.37
##
## Classifier Results - Confusion Matrix (relative frequencies)
## observed
## predicted 0 1
## 0 0.393 0.118
## 1 0.115 0.373
##
## Accuracy: 0.766
## Sensitivity: 0.759
## Specificity: 0.773
## Area under ROC: 0.834
## Gini coefficient: 0.668
Bond, T. G., Yan, Z., & Heene, M. (2021). Applying the Rasch model: Fundamental measurement in the human sciences (4th ed.). Rouledge.
de Ayala, R. J. (2009). The theory and practice of item response theory. The Guilford Press.
Wind, S., & Hua, C. (2022). Rasch measurement theory analysis in R. CRC Press.