@article {2723, title = {Computerized Adaptive Testing for Cognitively Based Multiple-Choice Data}, journal = {Applied Psychological Measurement}, volume = {43}, number = {5}, year = {2019}, pages = {388-401}, abstract = {Cognitive diagnosis models (CDMs) are latent class models that hold great promise for providing diagnostic information about student knowledge profiles. The increasing use of computers in classrooms enhances the advantages of CDMs for more efficient diagnostic testing by using adaptive algorithms, referred to as cognitive diagnosis computerized adaptive testing (CD-CAT). When multiple-choice items are involved, CD-CAT can be further improved by using polytomous scoring (i.e., considering the specific options students choose), instead of dichotomous scoring (i.e., marking answers as either right or wrong). In this study, the authors propose and evaluate the performance of the Jensen{\textendash}Shannon divergence (JSD) index as an item selection method for the multiple-choice deterministic inputs, noisy {\textquotedblleft}and{\textquotedblright} gate (MC-DINA) model. Attribute classification accuracy and item usage are evaluated under different conditions of item quality and test termination rule. The proposed approach is compared with the random selection method and an approximate approach based on dichotomized responses. The results show that under the MC-DINA model, JSD improves the attribute classification accuracy significantly by considering the information from distractors, even with a very short test length. This result has important implications in practical classroom settings as it can allow for dramatically reduced testing times, thus resulting in more targeted learning opportunities.}, doi = {10.1177/0146621618798665}, url = {https://doi.org/10.1177/0146621618798665}, author = {Hulya D. Yigit and Miguel A. Sorrel and Jimmy de la Torre} }