Skip to content
Snippets Groups Projects
Commit 35397009 authored by Tania Bladier's avatar Tania Bladier
Browse files

add new papers

parent 91df22d3
No related branches found
No related tags found
No related merge requests found
File added
No preview for this file type
......@@ -1066,6 +1066,20 @@
url = {https://books.google.ru/books?id=iE8hEQAAQBAJ},
}
@InProceedings{10074239,
author = {Ameri, Kimia and Hempel, Michael and Sharif, Hamid and Lopez, Juan and Perumalla, Kalyan},
booktitle = {2023 International Conference on Computing, Networking and Communications (ICNC)},
title = {Impact of Grammar on Language Model Comprehension},
year = {2023},
month = {Feb},
pages = {19-24},
abstract = {Machine Learning and Natural Language Processing are playing an increasingly vital role in many different areas, including cybersecurity in Information Technology and Operational Technology networking, with many associated research challenges. In this paper, we introduce a new language model based on transformers with the addition of syntactical information into the embedding process. We show that our proposed Structurally Enriched Transformer (SET) language model outperforms baseline datasets on a number of downstream tasks from the GLUE benchmark. Our model improved CoLA classification by 11 points over the BERT-Base model. The performance of attention-based models has been demonstrated to be significantly better than that of traditional algorithms in several NLP tasks. Transformers are comprised of multi attention heads stacked on top of each others. A Transformer is capable of generating abstract representations of tokens input to an encoder based on their relationship to all tokens in a sequence. Despite the fact that such models can learn syntactic features based on examples alone, researchers have found that explicitly feeding this information to deep learning models can significantly boost their performance. A complex model like transformers may benefit from leveraging syntactic information such as part of speech (POS).},
doi = {10.1109/ICNC57223.2023.10074239},
file = {:Impact_of_Grammar_on_Language_Model_Comprehension.pdf:PDF},
groups = {ml-architechtures},
keywords = {Deep learning;Computational modeling;Syntactics;Transformers;Natural language processing;Magnetic heads;Grammar;Natural Language Processing;Transfer Learning;Transformers;BERT;Part of Speech;Grammar Enriched},
}
@Comment{jabref-meta: databaseType:bibtex;}
@Comment{jabref-meta: grouping:
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please to comment