@inproceedings{0f742666af09459d959e5d44a1427f81,
title = "Fact-level Extractive Summarization with Hierarchical Graph Mask on BERT",
abstract = "Most current extractive summarization models generate summaries by selecting salient sentences. However, one of the problems with sentence-level extractive summarization is that there exists a gap between the human-written gold summary and the oracle sentence labels. In this paper, we propose to extract fact-level semantic units for better extractive summarization. We also introduce a hierarchical structure, which incorporates the multi-level of granularities of the textual information into the model. In addition, we incorporate our model with BERT using a hierarchical graph mask. This allows us to combine BERT{\textquoteright}s ability in natural language understanding and the structural information without increasing the scale of the model. Experiments on the CNN/DaliyMail dataset show that our model achieves state-of-the-art results.",
author = "Ruifeng Yuan and Zili Wang and Wenjie Li",
note = "Publisher Copyright: {\textcopyright} 2020 COLING 2020 - 28th International Conference on Computational Linguistics, Proceedings of the Conference. All rights reserved.; 28th International Conference on Computational Linguistics, COLING 2020 ; Conference date: 08-12-2020 Through 13-12-2020",
year = "2020",
language = "English",
series = "COLING 2020 - 28th International Conference on Computational Linguistics, Proceedings of the Conference",
publisher = "Association for Computational Linguistics (ACL)",
pages = "5629--5639",
editor = "Donia Scott and Nuria Bel and Chengqing Zong",
booktitle = "COLING 2020 - 28th International Conference on Computational Linguistics, Proceedings of the Conference",
address = "United States",
}