Relation classification is associated with many potential applications in the
artificial intelligence area. Recent approaches usually leverage neural
networks based on structure features such as syntactic or dependency features
to solve this problem. However, high-cost structure features make such
approaches inconvenient to be directly used. In addition, structure features
are probably domain-dependent. Therefore, this paper proposes a bi-directional
long-short-term-memory recurrent-neural-network (Bi-LSTM-RNN) model based on
low-cost sequence features to address relation classification. This model
divides a sentence or text segment into five parts, namely two target entities
and their three contexts. It learns the representations of entities and their
contexts, and uses them to classify relations. We evaluate our model on two
standard benchmark datasets in different domains, namely SemEval-2010 Task 8
and BioNLP-ST 2016 Task BB3. In the former dataset, our model achieves
comparable performance compared with other models using sequence features. In
the latter dataset, our model obtains the third best results compared with
other models in the official evaluation. Moreover, we find that the context
between two target entities plays the most important role in relation
classification. Furthermore, statistic experiments show that the context
between two target entities can be used as an approximate replacement of the
shortest dependency path when dependency parsing is not used.
%0 Generic
%1 li2016bilstmrnn
%A Li, Fei
%A Zhang, Meishan
%A Fu, Guohong
%A Qian, Tao
%A Ji, Donghong
%D 2016
%K deep_learning lstm relex rnn
%T A Bi-LSTM-RNN Model for Relation Classification Using Low-Cost Sequence
Features
%U http://arxiv.org/abs/1608.07720
%X Relation classification is associated with many potential applications in the
artificial intelligence area. Recent approaches usually leverage neural
networks based on structure features such as syntactic or dependency features
to solve this problem. However, high-cost structure features make such
approaches inconvenient to be directly used. In addition, structure features
are probably domain-dependent. Therefore, this paper proposes a bi-directional
long-short-term-memory recurrent-neural-network (Bi-LSTM-RNN) model based on
low-cost sequence features to address relation classification. This model
divides a sentence or text segment into five parts, namely two target entities
and their three contexts. It learns the representations of entities and their
contexts, and uses them to classify relations. We evaluate our model on two
standard benchmark datasets in different domains, namely SemEval-2010 Task 8
and BioNLP-ST 2016 Task BB3. In the former dataset, our model achieves
comparable performance compared with other models using sequence features. In
the latter dataset, our model obtains the third best results compared with
other models in the official evaluation. Moreover, we find that the context
between two target entities plays the most important role in relation
classification. Furthermore, statistic experiments show that the context
between two target entities can be used as an approximate replacement of the
shortest dependency path when dependency parsing is not used.
@misc{li2016bilstmrnn,
abstract = {Relation classification is associated with many potential applications in the
artificial intelligence area. Recent approaches usually leverage neural
networks based on structure features such as syntactic or dependency features
to solve this problem. However, high-cost structure features make such
approaches inconvenient to be directly used. In addition, structure features
are probably domain-dependent. Therefore, this paper proposes a bi-directional
long-short-term-memory recurrent-neural-network (Bi-LSTM-RNN) model based on
low-cost sequence features to address relation classification. This model
divides a sentence or text segment into five parts, namely two target entities
and their three contexts. It learns the representations of entities and their
contexts, and uses them to classify relations. We evaluate our model on two
standard benchmark datasets in different domains, namely SemEval-2010 Task 8
and BioNLP-ST 2016 Task BB3. In the former dataset, our model achieves
comparable performance compared with other models using sequence features. In
the latter dataset, our model obtains the third best results compared with
other models in the official evaluation. Moreover, we find that the context
between two target entities plays the most important role in relation
classification. Furthermore, statistic experiments show that the context
between two target entities can be used as an approximate replacement of the
shortest dependency path when dependency parsing is not used.},
added-at = {2018-02-22T11:28:57.000+0100},
author = {Li, Fei and Zhang, Meishan and Fu, Guohong and Qian, Tao and Ji, Donghong},
biburl = {https://www.bibsonomy.org/bibtex/2423dda0e54fb90a9d5eb965a54030ce5/dallmann},
description = {() - 1608.07720.pdf},
interhash = {34c25247ce3edeb3941483763c0cc2a2},
intrahash = {423dda0e54fb90a9d5eb965a54030ce5},
keywords = {deep_learning lstm relex rnn},
note = {cite arxiv:1608.07720},
timestamp = {2018-02-22T11:28:57.000+0100},
title = {A Bi-LSTM-RNN Model for Relation Classification Using Low-Cost Sequence
Features},
url = {http://arxiv.org/abs/1608.07720},
year = 2016
}