@inproceedings{49fc889cca114463a979d6122f0e9337,
title = "Ensemble-based Fine-Tuning Strategy for Temporal Relation Extraction from the Clinical Narrative",
abstract = "In this paper, we investigate ensemble methods for fine-tuning transformer-based pretrained models for clinical natural language processing tasks, specifically temporal relation extraction from the clinical narrative. Our experimental results on the THYME data show that ensembling as a fine-tuning strategy can further boost model performance over single learners optimized for hyperparameters. Dynamic snapshot ensembling is particularly beneficial as it fine-tunes a wide array of parameters and results in a 2.8% absolute improvement in F1 over the base single learner.",
author = "Lijing Wang and Timothy Miller and Steven Bethard and Guergana Savova",
note = "Publisher Copyright: {\textcopyright} 2022 Association for Computational Linguistics.; 4th Workshop on Clinical Natural Language Processing, ClinicalNLP 2022 ; Conference date: 14-07-2022",
year = "2022",
language = "English (US)",
series = "ClinicalNLP 2022 - 4th Workshop on Clinical Natural Language Processing, Proceedings",
publisher = "Association for Computational Linguistics (ACL)",
pages = "103--108",
editor = "Tristan Naumann and Steven Bethard and Kirk Roberts and Anna Rumshisky",
booktitle = "ClinicalNLP 2022 - 4th Workshop on Clinical Natural Language Processing, Proceedings",
address = "United States",
}