@InProceedings{Kettler2024_1217,
author = {Benedict Kettler and Stefan Hillmann},
booktitle = {Studientexte zur Sprachkommunikation: Elektronische Sprachsignalverarbeitung 2024},
title = {Supervised vs. Zero-Shot Learning Automatic Classification of Comments on Educational Videos Using Pre-Trained Language Models},
year = {2024},
editor = {Timo Baumann},
month = mar,
pages = {149--156},
publisher = {TUDpress, Dresden},
abstract = {Despite the potential of AI, only a small percentage of small and medium-sized enterprises (SMEs) are adopting it due to data issues, expertise gaps, and implementation barriers. Zero-shot learning offers a promising approach for SMEs by minimizing these obstacles. This paper explores the use of zero-shot learning in a real-world NLP classification task on online comments (comparable with intent classification tasks) from the e-learning platform Sofatutor. While finetuning has achieved high accuracy (82.3–86.5%), zero-shot models have shown lower performance (39.3–61.4%) due to different label selection, grouping of different scenarios in one class and the type of classification task. Even if the current accuracy is not sufficient for practical application, pre-filtering the data using zeroshot learning might be a promising option for SMEs.},
isbn = {978-3-95908-325-6},
issn = {0940-6832},
keywords = {Large Language Models},
url = {https://www.essv.de/pdf/2024_149_156.pdf},
doi = {10.35096/othr/pub-7092},
}