Dacrema, Maurizio Ferrari; Cremonesi, Paolo; Jannach, Dietmar
Are We Really Making Much Progress? A Worrying Analysis of Recent Neural Recommendation Approaches Artikel
In: Proceedings of the 13th ACM Conference on Recommender Systems (RecSys 2019), 2019.
Abstract | Links | BibTeX | Schlagwörter: evolutionary computing, information retrieval, machine learning, neural computing, O
@article{Dacrema2019,
title = {Are We Really Making Much Progress? A Worrying Analysis of Recent Neural Recommendation Approaches},
author = {Maurizio Ferrari Dacrema and Paolo Cremonesi and Dietmar Jannach},
url = {https://arxiv.org/abs/1907.06902
https://dblp.org/rec/bib/journals/corr/abs-1907-06902},
year = {2019},
date = {2019-07-23},
urldate = {2019-08-08},
journal = {Proceedings of the 13th ACM Conference on Recommender Systems (RecSys 2019)},
abstract = {Deep learning techniques have become the method of choice for researchers working on algorithmic aspects of recommender systems. With the strongly increased interest in machine learning in general, it has, as a result, become difficult to keep track of what represents the state-of-the-art at the moment, e.g., for top-n recommendation tasks. At the same time, several recent publications point out problems in today's research practice in applied machine learning, e.g., in terms of the reproducibility of the results or the choice of the baselines when proposing new models. In this work, we report the results of a systematic analysis of algorithmic proposals for top-n recommendation tasks. Specifically, we considered 18 algorithms that were presented at top-level research conferences in the last years. Only 7 of them could be reproduced with reasonable effort. For these methods, it however turned out that 6 of them can often be outperformed with comparably simple heuristic methods, e.g., based on nearest-neighbor or graph-based techniques. The remaining one clearly outperformed the baselines but did not consistently outperform a well-tuned non-neural linear ranking method. Overall, our work sheds light on a number of potential problems in today's machine learning scholarship and calls for improved scientific practices in this area. },
keywords = {evolutionary computing, information retrieval, machine learning, neural computing, O},
pubstate = {published},
tppubtype = {article}
}
Kim, Byung-Hak; Ganapathi, Varun
LumièreNet: Lecture Video Synthesis from Audio Artikel
In: CoRR, Bd. bs/1907.02253, 2019.
Abstract | Links | BibTeX | Schlagwörter: audio processing, computer vision, lectures, machine learning, O, pattern recognition, speech processing
@article{Kim2019,
title = {LumièreNet: Lecture Video Synthesis from Audio},
author = {Byung-Hak Kim and Varun Ganapathi},
url = {http://arxiv.org/abs/1907.02253
https://dblp.org/rec/bib/journals/corr/abs-1907-02253},
year = {2019},
date = {2019-07-08},
urldate = {2019-08-08},
journal = {CoRR},
volume = {bs/1907.02253},
abstract = {We present LumièreNet, a simple, modular, and completely deep-learning based architecture that synthesizes, high quality, full-pose headshot lecture videos from instructor's new audio narration of any length. Unlike prior works, LumièreNet is entirely composed of trainable neural network modules to learn mapping functions from the audio to video through (intermediate) estimated pose-based compact and abstract latent codes. Our video demos are available at [22] and [23].},
keywords = {audio processing, computer vision, lectures, machine learning, O, pattern recognition, speech processing},
pubstate = {published},
tppubtype = {article}
}
Kemper, Lorenz
Predicting Student Dropout: A Machine Learning Approach Unveröffentlicht
2018.
Abstract | Links | BibTeX | Schlagwörter: descision trees, dropout, higher education, logistic regression, machine learning, massive open online courses (MOOCs), O, prediction, students, Studienerfolg
@unpublished{Kemper2018,
title = {Predicting Student Dropout: A Machine Learning Approach},
author = {Lorenz Kemper},
url = {https://www.researchgate.net/publication/322919234_Predicting_Student_Dropout_a_Machine_Learning_Approach},
year = {2018},
date = {2018-02-01},
urldate = {2018-08-22},
institution = {Karlsruhe Institute of Technology (KIT)},
abstract = {We perform two approaches of machine learning, logistic regression and decision trees, to predict student dropout at the Karlsruhe Institute of Technology (KIT). The models are computed on the basis of examination data, i.e. data available at all universities without need of collection. Therefore, we propose a methodical approach that may be put in practice with relative ease at other institutions. Using a Hellinger-Distance splitting approach we find decision trees to produce slightly better results. However, both methods yield high prediction accuracies of up to 95{37d1f293241a1edd19b097ce37fa29bd44d887a41b5283a0fc9485076e078306} after three semesters. A classification with more than 83{37d1f293241a1edd19b097ce37fa29bd44d887a41b5283a0fc9485076e078306} accuracy is already possible after the first semester. Within our analysis we show, that resampling techniques can improve the detection of at-risk students.},
keywords = {descision trees, dropout, higher education, logistic regression, machine learning, massive open online courses (MOOCs), O, prediction, students, Studienerfolg},
pubstate = {published},
tppubtype = {unpublished}
}