Copyright © 2025 Authors retain the copyright of this article. This article is an open access article distributed under the Creative Commons Attribution License which permits unrestricted use, distribution, and reproduction in any medium, provided the original work is properly cited.
@article{160893, author = {Sandeep Maan and Gian Devi}, title = {Review of Deep Learning Models from Convolution Neural Networks to Transformers}, journal = {International Journal of Innovative Research in Technology}, year = {}, volume = {10}, number = {1}, pages = {1331-1335}, issn = {2349-6002}, url = {https://ijirt.org/article?manuscript=160893}, abstract = {Generative Artificial Intelligence has become synonym for Artificial Intelligence. Specifically, success of Large Language Model (LLM) is going to make long lasting disruptive effect. Application like ChatGPT [1] by OpenAI, BARD by Google are believed to change the human-machine relation in coming years. All these can be attributed to the developments in the field of deep learning during last decade. Things started with Convolutional Neural Networks (CNN) and advancement of GPUs that has made as lasting effect in the field of image processing. In this paper authors have reviewed features and limitation of most three most popular deep learning models viz. Convolution Neural Networks (CNN), Recurrent Neural Networks (RNN) and Transformer model. A systemic study of factors leading to the development of large language models is also presented. }, keywords = {Artificial Intelligence, Machine Leaning, Deep Learning, Convolution Neural Networks (CNN), Recurrent Neural Networks (RNN), Attention, Transformers, Large Language Models (LLM) }, month = {}, }
Cite This Article
Submit your research paper and those of your network (friends, colleagues, or peers) through your IPN account, and receive 800 INR for each paper that gets published.
Join NowNational Conference on Sustainable Engineering and Management - 2024 Last Date: 15th March 2024
Submit inquiry