Hoppa till huvudinnehåll

BibTeX

@inProceedings{dannells-etal-2024-transformer-338708,
	title        = {Transformer-based Swedish Semantic Role Labeling through Transfer Learning},
	abstract     = {Semantic Role Labeling (SRL) is a task in natural language understanding where the goal is to extract semantic roles for a given sentence. English SRL has achieved state-of-the-art performance using Transformer techniques and supervised learning. However, this technique is not a viable choice for smaller languages like Swedish due to the limited amount of training data. In this paper, we present the first effort in building a Transformer-based SRL system for Swedish by exploring multilingual and cross-lingual transfer learning methods and leveraging the Swedish FrameNet resource. We demonstrate that multilingual transfer learning outperforms two different cross-lingual transfer models. We also found some differences between frames in FrameNet that can either hinder or enhance the model’s performance. The resulting end-to-end model is freely available and will be made accessible through Språkbanken Text’s research infrastructure.},
	booktitle    = {Proceedings of the 2024 Joint International Conference on Computational Linguistics, Language Resources and Evaluation (LREC-COLING 2024), 20-25 May, 2024, Torino, Italia},
	author       = {Dannélls, Dana and Johansson, Richard and Buhr, Lucy Yang },
	year         = {2024},
	publisher    = {ELRA and ICCL},
	address      = {Turin, Italy},
	ISBN         = {978-2-493814-10-4},
}