Copyright © 2025 Authors retain the copyright of this article. This article is an open access article distributed under the Creative Commons Attribution License which permits unrestricted use, distribution, and reproduction in any medium, provided the original work is properly cited.
@article{186509,
author = {Chayan Bhattacharjee and Pooja Amin},
title = {Evaluating Methods to Prevent Overfitting in Neural Networks Through Complexity Reduction},
journal = {International Journal of Innovative Research in Technology},
year = {},
volume = {12},
number = {no},
pages = {306-310},
issn = {2349-6002},
url = {https://ijirt.org/article?manuscript=186509},
abstract = {Manages deep networks of various parameters during the training and testing phase. As the number of parameters increases, these networks acquire the ability to adapt to different types of data records, contributing to incredible strength. However, this ability can also make neuronal networks sensitive, which are prone to over-adaptation. Over--Several strategies can be used to tackle the problem of adaptation. In this article, we will explore various methods to prevent the model from limiting model complexity, data expansion, weight normalization, occurrence, and early stopping.},
keywords = {Overfitting, Neural network generalization, Regularization, Model simplification},
month = {},
}
Cite This Article
Submit your research paper and those of your network (friends, colleagues, or peers) through your IPN account, and receive 800 INR for each paper that gets published.
Join NowNational Conference on Sustainable Engineering and Management - 2024 Last Date: 15th March 2024
Submit inquiry