Copyright © 2026 Authors retain the copyright of this article. This article is an open access article distributed under the Creative Commons Attribution License which permits unrestricted use, distribution, and reproduction in any medium, provided the original work is properly cited.
@article{180865,
author = {Sameer Sundarrao Kakade and Pranav Shahaji Kamble and DR. Prakash Kene},
title = {Optimizing Transformers and Large Language Models: Effectiveness Through Training and Fine-Tuning},
journal = {International Journal of Innovative Research in Technology},
year = {2025},
volume = {12},
number = {1},
pages = {2727-2730},
issn = {2349-6002},
url = {https://ijirt.org/article?manuscript=180865},
abstract = {Transformer-based large language models have significantly advanced the field of natural language processing, delivering cutting-edge results across numerous tasks. However, as these models scale to billions of parameters, fully fine-tuning them becomes increasingly resource-intensive, both in terms of computation and storage. This paper investigates strategies to enhance the training and fine-tuning of transformers, with an emphasis on parameter-efficient approaches. Commonly known as "delta-tuning," these techniques allow models to be adapted to new tasks by updating only a minimal part of the parameters, yet still achieve performance close to that of full fine-tuning. This research shows recent developments in this area, examining their effectiveness, scalability, and relevance across a variety of NLP applications. By lowering the hardware and memory requirements, parameter-efficient tuning emerges as a practical and scalable method for refining LLMs, enabling broader accessibility and easier deployment across different sectors.},
keywords = {},
month = {June},
}
Submit your research paper and those of your network (friends, colleagues, or peers) through your IPN account, and receive 800 INR for each paper that gets published.
Join NowNational Conference on Sustainable Engineering and Management - 2024 Last Date: 15th March 2024
Submit inquiry