@InProceedings{Mahmud-Uz-Zaman2021_1125,
author = {Md. Mahmud-Uz-Zaman and Stefan Schaffer and Tatjana Scheffler},
booktitle = {Studientexte zur Sprachkommunikation: Elektronische Sprachsignalverarbeitung 2021},
title = {Comparing BERT with an intent based question answering setup for open-ended questions in the museum domain},
year = {2021},
editor = {Stefan Hillmann and Benjamin Weiss and Thilo Michael and Sebastian Möller},
month = mar,
pages = {247--253},
publisher = {TUDpress, Dresden},
abstract = {BERT-based models achieve state-of-the-art performance for factoid question answering tasks. In this work, we investigate whether a pre-trained BERT model can also perform well for open-ended questions. We set up an online experiment, from which we collected 111 user-generated open-ended questions. These questions were passed to a pre-trained BERT QA model and a dedicated intent recognition based module. We have found that the simple intent based module was around 25% more often correct than the pre-trained BERT model, indicating that open-ended questions still require different solutions compared to factoid questions.},
isbn = {978-3-959082-27-3},
issn = {0940-6832},
keywords = {Sprachdialog},
url = {https://www.essv.de/pdf/2021_247_253.pdf},
}