Copyright © 2026 Authors retain the copyright of this article. This article is an open access article distributed under the Creative Commons Attribution License which permits unrestricted use, distribution, and reproduction in any medium, provided the original work is properly cited.
@article{182930,
author = {Ansari Asra Sultana and Mohammed Abbas Qureshi and Dr. MD. Ateeq Ur Rahman},
title = {Detection of Hand Gestures for Cerebral Palsy Persons Using Deep Neural Network},
journal = {International Journal of Innovative Research in Technology},
year = {2025},
volume = {12},
number = {2},
pages = {4160-4168},
issn = {2349-6002},
url = {https://ijirt.org/article?manuscript=182930},
abstract = {A framework for detecting gestures with the hands is utilized to interface with computers through hand gestures. Our goal is to generate a Windows operating systems program that uses a digital camera to identify real-time movement gestures. It combines hand identifying with real-time tracking of movement. This program employs a digital camera to recognize client gestures and carry out simple tasks in response. The individual must make a specific movement. The gesture in question is captured by the a live stream, which then detects it (by comparing it with a list of recognized gestures) and does what is necessary. It is possible to configure this software to execute in the meantime whenever the individual is using other apps and services. When using a hands-free strategy, this feature can be quite helpful. However, it might not be very useful for composing written records or accessing online resources.},
keywords = {Deep Neural Networks, Hand Gesture Recognition, Human-Computer Interaction, Blob Detection, Predictive Modeling.},
month = {July},
}
Submit your research paper and those of your network (friends, colleagues, or peers) through your IPN account, and receive 800 INR for each paper that gets published.
Join NowNational Conference on Sustainable Engineering and Management - 2024 Last Date: 15th March 2024
Submit inquiry