Copyright © 2026 Authors retain the copyright of this article. This article is an open access article distributed under the Creative Commons Attribution License which permits unrestricted use, distribution, and reproduction in any medium, provided the original work is properly cited.
@article{193342,
author = {POORNIMA T},
title = {BiLAT A BiLSTM Attention Transformer Model with Hyperparameter Optimization for Robust Fake News Detection},
journal = {International Journal of Innovative Research in Technology},
year = {2026},
volume = {12},
number = {10},
pages = {532-547},
issn = {2349-6002},
url = {https://ijirt.org/article?manuscript=193342},
abstract = {The rapid spread of misinformation on social media, particularly Twitter, poses major problems to information reliability and public trust. This paper includes a complete comparative examination of six deep learning models such as RNN, CNN, BERT, GRU, LSTM, and a suggested BiLSTM Attention Transformer (BiLAT) assessed across three benchmark datasets: Fake and Real News, FakeNewsNet, and ISOT Fabricated News. We systematically test five hyperparameter optimisation methods (Grid Search (GS), Random Search (RS), Bayesian Optimisation (BO), Genetic Algorithm (GA), and BOHB) to see how they affect model performance. Results reveal that transformer-based architectures greatly outperform traditional models, with BiLAT obtaining state-of-the-art performance, including 99% accuracy on FakeNewsNet under BOHB optimization. BOHB consistently gives the best performance gains across all models and datasets, with an accuracy boost of 2–5% over traditional optimisation methods. The findings suggest that integrating advanced transformer topologies with efficient hyperparameter optimization considerably boosts the ability to grasp linguistic intricacies inherent in disinformation. This paper shows the importance of architectural design and optimization technique in constructing effective, scalable fake news detection systems for social media contexts.},
keywords = {Fake News Detection, Deep Learning Models, Hyperparameter Optimization, Transformer Architectures, BOHB},
month = {March},
}
Submit your research paper and those of your network (friends, colleagues, or peers) through your IPN account, and receive 800 INR for each paper that gets published.
Join NowNational Conference on Sustainable Engineering and Management - 2024 Last Date: 15th March 2024
Submit inquiry