Copyright © 2026 Authors retain the copyright of this article. This article is an open access article distributed under the Creative Commons Attribution License which permits unrestricted use, distribution, and reproduction in any medium, provided the original work is properly cited.
@article{188344,
author = {Apoorva Kunte and Sahil Mangle and Nitya Munshi and Kapil Mundada},
title = {Airpiano- Python Based Virtual Piano},
journal = {International Journal of Innovative Research in Technology},
year = {2025},
volume = {12},
number = {7},
pages = {8251-8254},
issn = {2349-6002},
url = {https://ijirt.org/article?manuscript=188344},
abstract = {The fusion of computer vision and gesture recognition has enabled novel forms of human-computer interaction, especially in creative domains like music. Air Piano is a virtual instrument that lets users simulate piano playing through mid-air hand gestures, using only a webcam. MediaPipe handles real-time hand tracking[1], while OpenCV processes video input[2] and Pygame plays mapped audio notes for each finger. Gesture-based controls enable volume adjustment and musical scale switching, with voice feedback provided via pyttsx3. This system offers an engaging, touchless way to explore musical expression, showcasing the potential of computer vision in virtual instrument design.},
keywords = {Virtual musical instrument, gesture recognition, hand tracking, computer vision, MediaPipe, OpenCV, Pygame, touchless interaction, human-computer interaction, music technology.},
month = {December},
}
Submit your research paper and those of your network (friends, colleagues, or peers) through your IPN account, and receive 800 INR for each paper that gets published.
Join NowNational Conference on Sustainable Engineering and Management - 2024 Last Date: 15th March 2024
Submit inquiry