Copyright © 2026 Authors retain the copyright of this article. This article is an open access article distributed under the Creative Commons Attribution License which permits unrestricted use, distribution, and reproduction in any medium, provided the original work is properly cited.
@article{180216,
author = {Sakshi Pujari and Mahesh Vanjire and Isha Ruplag and Riya Patankar},
title = {Text Summarization Using NLP},
journal = {International Journal of Innovative Research in Technology},
year = {2025},
volume = {12},
number = {1},
pages = {849-850},
issn = {2349-6002},
url = {https://ijirt.org/article?manuscript=180216},
abstract = {Automatic text summarization aims to condense lengthy documents into concise summaries while preserving key information. With the rise of transformer-based architectures, abstractive summarization has made significant advances. This paper presents a comparative study of pop- ular models such as BART, T5, and PEGASUS on standard datasets like CNN/DailyMail and XSum. We evaluate their performance using ROUGE and BERTScore, and analyze their strengths and limitations.},
keywords = {},
month = {June},
}
Submit your research paper and those of your network (friends, colleagues, or peers) through your IPN account, and receive 800 INR for each paper that gets published.
Join NowNational Conference on Sustainable Engineering and Management - 2024 Last Date: 15th March 2024
Submit inquiry