Hoppa till huvudinnehåll

BibTeX

@inProceedings{periti-etal-2025-definition-355273,
	title        = {Definition Generation for Word Meaning Modeling: Monolingual, Multilingual, and Cross-Lingual Perspectives},
	abstract     = {The task of Definition Generation has recently gained attention as an interpretable approach to modeling word meaning. Thus far, most research has been conducted in English, with limited work and resources for other languages. In this work, we expand Definition Generation beyond English to a suite of 22 languages and evaluate Llama-based models within a monolingual, multilingual, and cross-lingual setting. Our experiments show that monolingual fine-tuning consistently outperforms pretrained baselines, with the largest gains observed in languages with lower initial performance; and that multilingual fine-tuning does not consistently improve performance on the individual fine-tuning languages. Our cross-lingual evaluation reveals that models fine-tuned on a single language typically lose the ability to generate definitions in other languages, whereas multilingual models exhibit robust generalization even to languages unseen during fine-tuning.},
	booktitle    = {Proceedings of the 2025 Conference on Empirical Methods in Natural Language Processing},
	author       = {Periti, Francesco and Goworek, Roksana and Dubossarsky, Haim and Tahmasebi, Nina},
	year         = {2025},
	publisher    = {Association for Computational Linguistics},
	pages        = {26015–26035},
}