Sepp, Stoo
In: Journal of Learning Analytics, Bd. 12, Nr. 3, S. 34–46, 2025, ISSN: 1929-7750.
Abstract | Links | BibTeX | Schlagwörter: analytics practice, disclosure, ethics, O, Privacy, research paper, transparency
@article{Sepp2025,
title = {Towards More Transparency in Learning Analytics: Sharing Information with University Students Increases their Awareness of Data Collection Practices},
author = {Stoo Sepp},
url = {https://doi.org/10.18608/jla.2025.8713},
doi = {10.18608/jla.2025.8713},
issn = {1929-7750},
year = {2025},
date = {2025-12-15},
urldate = {2025-12-15},
journal = {Journal of Learning Analytics},
volume = {12},
number = {3},
pages = {34–46},
publisher = {Society for Learning Analytics Research},
abstract = {As learning analytics practices become more commonplace in educational settings, student knowledge about the collection and use of their data becomes more of an interest. How students perceive the collection and use of their data has been researched for many years, with legitimate privacy and ethical concerns raised. While various guidelines, models, and frameworks have been proposed to address these concerns, the ways educational institutions practically address them by providing more information for increased transparency has yet to be widely investigated. The present study provides an initial investigation into the effectiveness of three different formats of data disclosure statements in a higher education setting. Participants were presented with one of three different formats from a fictional university: 1) a generic text, 2) a detailed text, or 3) an icon-based “nutrition label.” Participants then completed a survey to assess their understanding and perceptions of data collection practices. Results suggest that regardless of format, participants demonstrated an increased understanding of these practices when the disclosure was not generic. Additionally, student acceptance of data collection and beliefs about sharing were unaffected by disclosure of any kind. This study provides initial evidence to inform learning analytics practices to address identified concerns from students around a lack of transparency. Universities and other institutions in the higher education sector may revisit their data disclosure methods and language to ensure that they are both accurate and transparent, so that students better understand data collection within the scope of their studies.},
keywords = {analytics practice, disclosure, ethics, O, Privacy, research paper, transparency},
pubstate = {published},
tppubtype = {article}
}
Herodotou, Christothea; Shrestha, Sagun; Comfort, Catherine; Andrews, Heshan; Mulholland, Paul; Bayer, Vaclav; Maguire, Claire; Lee, John; Fernandez, Miriam
A Participatory Approach to Designing a Student-Facing Dashboard for Online and Distance Education Artikel
In: Journal of Learning Analytics, Bd. 12, Nr. 2, S. 158–174, 2025, ISSN: 1929-7750.
Abstract | Links | BibTeX | Schlagwörter: distance learning, LADs, learning analytics dashboards, O, online students, participatory design, research paper
@article{Herodotou2025,
title = {A Participatory Approach to Designing a Student-Facing Dashboard for Online and Distance Education},
author = {Christothea Herodotou and Sagun Shrestha and Catherine Comfort and Heshan Andrews and Paul Mulholland and Vaclav Bayer and Claire Maguire and John Lee and Miriam Fernandez},
url = {https://doi.org/10.18608/jla.2025.8481},
doi = {10.18608/jla.2025.8481},
issn = {1929-7750},
year = {2025},
date = {2025-07-08},
urldate = {2025-07-01},
journal = {Journal of Learning Analytics},
volume = {12},
number = {2},
pages = {158–174},
abstract = {In this paper, we explore the design of a student-facing dashboard for online and distance learning with a focus on capturing and addressing specific learning needs. A participatory process involving 20 students was employed, which included a screening questionnaire and focus group discussions. The selection of data points to be displayed on the dashboard was mainly determined by student responses regarding the usefulness of a feature, and a high frequency of their agreement. The data analysis revealed that the learning needs of online students relate to course support and communication (with tutors and other students). In response to this, students expressed a desire for accessing information related to their assignments, study time, and tutorials. The data points endorsed by students related to descriptive (assignment scores, engagement with the virtual learning environment, material accessed), predictive (score prediction), and prescriptive data (material recommendations and contact information). Student choices of data points were driven by a desire to better understand their study progress and take appropriate action. These insights emphasize the need for designing dashboards that not only describe performance but foremost “prescribe” to students potential solutions to overcome performance challenges.},
keywords = {distance learning, LADs, learning analytics dashboards, O, online students, participatory design, research paper},
pubstate = {published},
tppubtype = {article}
}
Parker, Michael J.; Bunch, Matt; Pike, Andrew
How Much is Enough? Formative Assessment Dynamics Artikel
In: Journal of Learning Analytics, Bd. 12, Ausg. 2, S. 196–210, 2025, ISSN: 1929-7750.
Abstract | Links | BibTeX | Schlagwörter: Assessment, formative assessment, learning analytics, learning outcomes, O, outcomes prediction, research paper
@article{Parker2025,
title = {How Much is Enough? Formative Assessment Dynamics},
author = {Michael J. Parker and Matt Bunch and Andrew Pike},
url = {https://doi.org/10.18608/jla.2025.8753},
doi = {10.18608/jla.2025.8753},
issn = {1929-7750},
year = {2025},
date = {2025-07-04},
urldate = {2025-07-04},
journal = {Journal of Learning Analytics},
volume = {12},
issue = {2},
pages = {196–210},
abstract = {While the educational value of formative assessment is widely acknowledged, the precise amount needed to effectively predict student performance on summative assessments remains unclear. This study investigates the relationship between intermediate formative assessment performance and final exam scores, addressing the critical question of how much assessment is needed for accurate prediction. Using a large dataset encompassing over 20,000 student enrollments across 127 course runs of 15 online biomedical sciences courses, we examined the correlation between intermediate assessment scores and final exam performance. Our results show that after completing about 40% of the formative assessments in a course, student scores demonstrate a strong correlation (Pearson r > 0.7) with their final exam scores. The correlation after taking additional formative assessments reaches a maximum of approximately 0.75. This finding was consistent across different course types and lengths, suggesting that the relative amount of assessment taken, rather than the absolute number, is key. Surprisingly, we found that random sampling of assessments was even more predictive than chronological sampling, suggesting that the proportion of questions used, relative to the total number of assessment questions, is more important than their specific sequence. These findings contribute to a deeper understanding of the predictive capabilities of formative assessment, and enable educators to identify at-risk students earlier, optimize assessment design, and develop more efficient and targeted interventions.},
keywords = {Assessment, formative assessment, learning analytics, learning outcomes, O, outcomes prediction, research paper},
pubstate = {published},
tppubtype = {article}
}