Gao, Lingyu; Gimpel, Kevin; Jensson, Arnar
Distractor Analysis and Selection for Multiple-Choice Cloze Questions for Second-Language Learners Proceedings Article
In: Proceedings of the Fifteenth Workshop on Innovative Use of NLP for Building Educational Applications, S. 102–114, Association for Computational Linguistics, Seattle, WA, USA (Online), 2020.
Abstract | Links | BibTeX | Schlagwörter: distractor, language learning, Multiple-choice, O, quiz
@inproceedings{Gao2020,
title = {Distractor Analysis and Selection for Multiple-Choice Cloze Questions for Second-Language Learners},
author = {Lingyu Gao and Kevin Gimpel and Arnar Jensson},
url = {https://www.aclweb.org/anthology/2020.bea-1.10},
doi = {10.18653/v1/2020.bea-1.10},
year = {2020},
date = {2020-12-01},
booktitle = {Proceedings of the Fifteenth Workshop on Innovative Use of NLP for Building Educational Applications},
pages = {102–114},
publisher = {Association for Computational Linguistics},
address = {Seattle, WA, USA (Online)},
abstract = {We consider the problem of automatically suggesting distractors for multiple-choice cloze questions designed for second-language learners. We describe the creation of a dataset including collecting manual annotations for distractor selection. We assess the relationship between the choices of the annotators and features based on distractors and the correct answers, both with and without the surrounding passage context in the cloze questions. Simple features of the distractor and correct answer correlate with the annotations, though we find substantial benefit to additionally using large-scale pretrained models to measure the fit of the distractor in the context. Based on these analyses, we propose and train models to automatically select distractors, and measure the importance of model components quantitatively.},
keywords = {distractor, language learning, Multiple-choice, O, quiz},
pubstate = {published},
tppubtype = {inproceedings}
}
Butler, Andrew C.
Multiple-Choice Testing in Education: Are the Best Practices for Assessment Also Good for Learning? Artikel
In: Journal of Applied Research in Memory and Cognition, Bd. 7, Nr. 3, S. 323–331, 2018, ISSN: 2211-3681.
Abstract | Links | BibTeX | Schlagwörter: Assessment, Learning, Multiple-choice, O, Testing
@article{Butler2018,
title = {Multiple-Choice Testing in Education: Are the Best Practices for Assessment Also Good for Learning?},
author = {Andrew C. Butler},
url = {https://doi.org/10.1016/j.jarmac.2018.07.002
http://www.sciencedirect.com/science/article/pii/S2211368118301426},
doi = {10.1016/j.jarmac.2018.07.002},
issn = {2211-3681},
year = {2018},
date = {2018-07-31},
urldate = {2018-10-20},
journal = {Journal of Applied Research in Memory and Cognition},
volume = {7},
number = {3},
pages = {323–331},
abstract = {Multiple-choice tests are arguably the most popular type of assessment in education, and much research has been dedicated to determining best practices for using them to measure learning. The act of taking a test also causes learning, and numerous studies have investigated how best to use multiple-choice tests to improve long-term retention and produce deeper understanding. In this review article, I explore whether the best practices for assessment align with the best practices for learning. Although consensus between these two literatures is not a foregone conclusion, there is substantial agreement in how best to construct and use multiple-choice tests for these two disparate purposes. The overall recommendation from both literatures is to create questions that are simple in format (e.g., avoid use of complex item types), challenge students but allow them to succeed often, and target specific cognitive processes that correspond to learning objectives.},
keywords = {Assessment, Learning, Multiple-choice, O, Testing},
pubstate = {published},
tppubtype = {article}
}