@inProceedings{bamutura-etal-2020-towards-296511, title = {Towards Computational Resource Grammars for Runyankore and Rukiga}, abstract = {In this paper, we present computational resource grammars of Runyankore and Rukiga (R&R) languages. Runyankore and Rukiga are two under-resourced Bantu Languages spoken by about 6 million people indigenous to South Western Uganda, East Africa. We used Grammatical Framework (GF), a multilingual grammar formalism and a special-purpose functional programming language to formalise the descriptive grammar of these languages. To the best of our knowledge, these computational resource grammars are the first attempt to the creation of language resources for R&R. In Future Work, we plan to use these grammars to bootstrap the generation of other linguistic resources such as multilingual corpora that make use of data-driven approaches to natural language processing feasible. In the meantime, they can be used to build Computer-Assisted Language Learning (CALL) applications for these languages among others.}, booktitle = {Proceedings of The 12th Language Resources and Evaluation Conference}, author = {Bamutura, David and Ljunglöf, Peter and Nabende, Peter}, year = {2020}, publisher = {European Language Resources Association}, } @inProceedings{lange-ljunglof-2020-learning-291243, title = {Learning Domain-specific Grammars from a Small Number of Examples}, abstract = {In this paper we investigate the problem of grammar inference from a different perspective. The common approach is to try to infer a grammar directly from example sentences, which either requires a large training set or suffers from bad accuracy. We instead view it as a problem of grammar restriction or sub-grammar extraction. We start from a large-scale resource grammar and a small number of examples, and find a sub-grammar that still covers all the examples. To do this we formulate the problem as a constraint satisfaction problem, and use an existing constraint solver to find the optimal grammar. We have made experiments with English, Finnish, German, Swedish and Spanish, which show that 10–20 examples are often sufficient to learn an interesting domain grammar. Possible applications include computer-assisted language learning, domain-specific dialogue systems, computer games, Q/A-systems, and others.}, booktitle = {12th International Conference on Agents and Artificial Intelligence - Volume 1: NLPinAI}, author = {Lange, Herbert and Ljunglöf, Peter}, year = {2020}, publisher = {SciTePress}, ISBN = {978-989-758-395-7}, }