Both educational data mining and learning analytics aim to understand learners and optimise learning processes of educational settings like Moodle, a learning management system (LMS). Analytics in an LMS covers many different aspects: finding students at risk of abandoning a course or identifying students with difficulties before the assessments. Thus, there are multiple prediction models that can be explored. The prediction models can target at the course also. For instance, will this activity assessment engage learners? To ease the evaluation and usage of prediction models in Moodle, we abstract out the most relevant elements of prediction models and develop an analytics framework for Moodle. Apart from the software framework, we also present a case study model which uses variables based on assessments to predict students at risk of dropping out of a massive open online course that has been offered eight times from 2013 to 2018, including a total of 46,895 students. A neural network is trained with data from past courses and the framework generates insights about students at risk in ongoing courses. Predictions are then generated after the first, the second, and the third quarters of the course. The average accuracy that we achieve is 88.81\% with a 0.9337 F1 score and a 73.12\% of the area under the ROC curve.
%0 Journal Article
%1 MonllaóOlivé2019
%A Monllaó Olivé, David
%A Huynh, Du Q.
%A Reynolds, Mark
%A Dougiamas, Martin
%A Wiese, Damyon
%D 2019
%J Journal of Computing in Higher Education
%K LMS MOOC Moodle assessment drop-out educationaldatamining framework learninganalytics machinelearning neuralnetworks
%R 10.1007/s12528-019-09230-1
%T A supervised learning framework: using assessment to identify students at risk of dropping out of a MOOC
%U https://doi.org/10.1007/s12528-019-09230-1
%X Both educational data mining and learning analytics aim to understand learners and optimise learning processes of educational settings like Moodle, a learning management system (LMS). Analytics in an LMS covers many different aspects: finding students at risk of abandoning a course or identifying students with difficulties before the assessments. Thus, there are multiple prediction models that can be explored. The prediction models can target at the course also. For instance, will this activity assessment engage learners? To ease the evaluation and usage of prediction models in Moodle, we abstract out the most relevant elements of prediction models and develop an analytics framework for Moodle. Apart from the software framework, we also present a case study model which uses variables based on assessments to predict students at risk of dropping out of a massive open online course that has been offered eight times from 2013 to 2018, including a total of 46,895 students. A neural network is trained with data from past courses and the framework generates insights about students at risk in ongoing courses. Predictions are then generated after the first, the second, and the third quarters of the course. The average accuracy that we achieve is 88.81\% with a 0.9337 F1 score and a 73.12\% of the area under the ROC curve.
@article{MonllaóOlivé2019,
abstract = {Both educational data mining and learning analytics aim to understand learners and optimise learning processes of educational settings like Moodle, a learning management system (LMS). Analytics in an LMS covers many different aspects: finding students at risk of abandoning a course or identifying students with difficulties before the assessments. Thus, there are multiple prediction models that can be explored. The prediction models can target at the course also. For instance, will this activity assessment engage learners? To ease the evaluation and usage of prediction models in Moodle, we abstract out the most relevant elements of prediction models and develop an analytics framework for Moodle. Apart from the software framework, we also present a case study model which uses variables based on assessments to predict students at risk of dropping out of a massive open online course that has been offered eight times from 2013 to 2018, including a total of 46,895 students. A neural network is trained with data from past courses and the framework generates insights about students at risk in ongoing courses. Predictions are then generated after the first, the second, and the third quarters of the course. The average accuracy that we achieve is 88.81{\%} with a 0.9337 F1 score and a 73.12{\%} of the area under the ROC curve.},
added-at = {2019-10-06T16:44:32.000+0200},
author = {Monlla{\'o} Oliv{\'e}, David and Huynh, Du Q. and Reynolds, Mark and Dougiamas, Martin and Wiese, Damyon},
biburl = {https://www.bibsonomy.org/bibtex/2691e1852a66070578d4c4f06031a5f24/ereidt},
day = 24,
doi = {10.1007/s12528-019-09230-1},
interhash = {966cad2e29305d95b0037637f8402dba},
intrahash = {691e1852a66070578d4c4f06031a5f24},
issn = {1867-1233},
journal = {Journal of Computing in Higher Education},
keywords = {LMS MOOC Moodle assessment drop-out educationaldatamining framework learninganalytics machinelearning neuralnetworks},
month = may,
timestamp = {2019-10-06T16:44:32.000+0200},
title = {A supervised learning framework: using assessment to identify students at risk of dropping out of a MOOC},
url = {https://doi.org/10.1007/s12528-019-09230-1},
year = 2019
}