@inproceedings{gung-palmer-2021-predicate,
title = "Predicate Representations and Polysemy in {V}erb{N}et Semantic Parsing",
author = "Gung, James and
Palmer, Martha",
editor = "Zarrie{\ss}, Sina and
Bos, Johan and
van Noord, Rik and
Abzianidze, Lasha",
booktitle = "Proceedings of the 14th International Conference on Computational Semantics (IWCS)",
month = jun,
year = "2021",
address = "Groningen, The Netherlands (online)",
publisher = "Association for Computational Linguistics",
url = "https://aclanthology.org/2021.iwcs-1.6",
pages = "51--62",
abstract = "Despite recent advances in semantic role labeling propelled by pre-trained text encoders like BERT, performance lags behind when applied to predicates observed infrequently during training or to sentences in new domains. In this work, we investigate how role labeling performance on low-frequency predicates and out-of-domain data can be further improved by using VerbNet, a verb lexicon that groups verbs into hierarchical classes based on shared syntactic and semantic behavior and defines semantic representations describing relations between arguments. We find that VerbNet classes provide an effective level of abstraction, improving generalization on low-frequency predicates by allowing them to learn from the training examples of other predicates belonging to the same class. We also find that joint training of VerbNet role labeling and predicate disambiguation of VerbNet classes for polysemous verbs leads to improvements in both tasks, naturally supporting the extraction of VerbNet{'}s semantic representations.",
}
<?xml version="1.0" encoding="UTF-8"?>
<modsCollection xmlns="http://www.loc.gov/mods/v3">
<mods ID="gung-palmer-2021-predicate">
<titleInfo>
<title>Predicate Representations and Polysemy in VerbNet Semantic Parsing</title>
</titleInfo>
<name type="personal">
<namePart type="given">James</namePart>
<namePart type="family">Gung</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Martha</namePart>
<namePart type="family">Palmer</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<originInfo>
<dateIssued>2021-06</dateIssued>
</originInfo>
<typeOfResource>text</typeOfResource>
<relatedItem type="host">
<titleInfo>
<title>Proceedings of the 14th International Conference on Computational Semantics (IWCS)</title>
</titleInfo>
<name type="personal">
<namePart type="given">Sina</namePart>
<namePart type="family">Zarrieß</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Johan</namePart>
<namePart type="family">Bos</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Rik</namePart>
<namePart type="family">van Noord</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Lasha</namePart>
<namePart type="family">Abzianidze</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<originInfo>
<publisher>Association for Computational Linguistics</publisher>
<place>
<placeTerm type="text">Groningen, The Netherlands (online)</placeTerm>
</place>
</originInfo>
<genre authority="marcgt">conference publication</genre>
</relatedItem>
<abstract>Despite recent advances in semantic role labeling propelled by pre-trained text encoders like BERT, performance lags behind when applied to predicates observed infrequently during training or to sentences in new domains. In this work, we investigate how role labeling performance on low-frequency predicates and out-of-domain data can be further improved by using VerbNet, a verb lexicon that groups verbs into hierarchical classes based on shared syntactic and semantic behavior and defines semantic representations describing relations between arguments. We find that VerbNet classes provide an effective level of abstraction, improving generalization on low-frequency predicates by allowing them to learn from the training examples of other predicates belonging to the same class. We also find that joint training of VerbNet role labeling and predicate disambiguation of VerbNet classes for polysemous verbs leads to improvements in both tasks, naturally supporting the extraction of VerbNet’s semantic representations.</abstract>
<identifier type="citekey">gung-palmer-2021-predicate</identifier>
<location>
<url>https://aclanthology.org/2021.iwcs-1.6</url>
</location>
<part>
<date>2021-06</date>
<extent unit="page">
<start>51</start>
<end>62</end>
</extent>
</part>
</mods>
</modsCollection>
%0 Conference Proceedings
%T Predicate Representations and Polysemy in VerbNet Semantic Parsing
%A Gung, James
%A Palmer, Martha
%Y Zarrieß, Sina
%Y Bos, Johan
%Y van Noord, Rik
%Y Abzianidze, Lasha
%S Proceedings of the 14th International Conference on Computational Semantics (IWCS)
%D 2021
%8 June
%I Association for Computational Linguistics
%C Groningen, The Netherlands (online)
%F gung-palmer-2021-predicate
%X Despite recent advances in semantic role labeling propelled by pre-trained text encoders like BERT, performance lags behind when applied to predicates observed infrequently during training or to sentences in new domains. In this work, we investigate how role labeling performance on low-frequency predicates and out-of-domain data can be further improved by using VerbNet, a verb lexicon that groups verbs into hierarchical classes based on shared syntactic and semantic behavior and defines semantic representations describing relations between arguments. We find that VerbNet classes provide an effective level of abstraction, improving generalization on low-frequency predicates by allowing them to learn from the training examples of other predicates belonging to the same class. We also find that joint training of VerbNet role labeling and predicate disambiguation of VerbNet classes for polysemous verbs leads to improvements in both tasks, naturally supporting the extraction of VerbNet’s semantic representations.
%U https://aclanthology.org/2021.iwcs-1.6
%P 51-62
Markdown (Informal)
[Predicate Representations and Polysemy in VerbNet Semantic Parsing](https://aclanthology.org/2021.iwcs-1.6) (Gung & Palmer, IWCS 2021)
ACL