@article {2737, title = {Efficiency of Targeted Multistage Calibration Designs Under Practical Constraints: A Simulation Study}, journal = {Journal of Educational Measurement}, volume = {56}, number = {1}, year = {2019}, pages = {121-146}, abstract = {Abstract Calibration of an item bank for computer adaptive testing requires substantial resources. In this study, we investigated whether the efficiency of calibration under the Rasch model could be enhanced by improving the match between item difficulty and student ability. We introduced targeted multistage calibration designs, a design type that considers ability-related background variables and performance for assigning students to suitable items. Furthermore, we investigated whether uncertainty about item difficulty could impair the assembling of efficient designs. The results indicated that targeted multistage calibration designs were more efficient than ordinary targeted designs under optimal conditions. Limited knowledge about item difficulty reduced the efficiency of one of the two investigated targeted multistage calibration designs, whereas targeted designs were more robust.}, doi = {10.1111/jedm.12203}, url = {https://onlinelibrary.wiley.com/doi/abs/10.1111/jedm.12203}, author = {Berger, St{\'e}phanie and Verschoor, Angela J. and Eggen, Theo J. H. M. and Moser, Urs} } @article {2529, title = {Latent-Class-Based Item Selection for Computerized Adaptive Progress Tests}, journal = {Journal of Computerized Adaptive Testing}, volume = {5}, year = {2017}, pages = {22-43}, keywords = {computerized adaptive progress test, item selection method, Kullback-Leibler information, Latent class analysis, log-odds scoring}, issn = {2165-6592}, doi = {10.7333/1704-0502022}, url = {http://iacat.org/jcat/index.php/jcat/article/view/62/29}, author = {van Buuren, Nikky and Eggen, Theo J. H. M.} } @article {2492, title = {Multidimensional Computerized Adaptive Testing for Classifying Examinees With Within-Dimensionality}, journal = {Applied Psychological Measurement}, volume = {40}, number = {6}, year = {2016}, pages = {387-404}, abstract = {A classification method is presented for adaptive classification testing with a multidimensional item response theory (IRT) model in which items are intended to measure multiple traits, that is, within-dimensionality. The reference composite is used with the sequential probability ratio test (SPRT) to make decisions and decide whether testing can be stopped before reaching the maximum test length. Item-selection methods are provided that maximize the determinant of the information matrix at the cutoff point or at the projected ability estimate. A simulation study illustrates the efficiency and effectiveness of the classification method. Simulations were run with the new item-selection methods, random item selection, and maximization of the determinant of the information matrix at the ability estimate. The study also showed that the SPRT with multidimensional IRT has the same characteristics as the SPRT with unidimensional IRT and results in more accurate classifications than the latter when used for multidimensional data.}, doi = {10.1177/0146621616648931}, url = {http://apm.sagepub.com/content/40/6/387.abstract}, author = {van Groen, Maaike M. and Eggen, Theo J. H. M. and Veldkamp, Bernard P.} } @article {2326, title = {Item Selection Methods Based on Multiple Objective Approaches for Classifying Respondents Into Multiple Levels}, journal = {Applied Psychological Measurement}, volume = {38}, number = {3}, year = {2014}, pages = {187-200}, abstract = {

Computerized classification tests classify examinees into two or more levels while maximizing accuracy and minimizing test length. The majority of currently available item selection methods maximize information at one point on the ability scale, but in a test with multiple cutting points selection methods could take all these points simultaneously into account. If for each cutting point one objective is specified, the objectives can be combined into one optimization function using multiple objective approaches. Simulation studies were used to compare the efficiency and accuracy of eight selection methods in a test based on the sequential probability ratio test. Small differences were found in accuracy and efficiency between different methods depending on the item pool and settings of the classification method. The size of the indifference region had little influence on accuracy but considerable influence on efficiency. Content and exposure control had little influence on accuracy and efficiency.

}, doi = {10.1177/0146621613509723}, url = {http://apm.sagepub.com/content/38/3/187.abstract}, author = {van Groen, Maaike M. and Eggen, Theo J. H. M. and Veldkamp, Bernard P.} }