Copyright © 2026 Authors retain the copyright of this article. This article is an open access article distributed under the Creative Commons Attribution License which permits unrestricted use, distribution, and reproduction in any medium, provided the original work is properly cited.
@article{176309,
author = {S Angu Ishwarya and A Kishorraj and M Mithreswaran and L Nithish and R Padmapriya and A Aruna},
title = {Emotion-Based Music Recommendation System With Chatbot},
journal = {International Journal of Innovative Research in Technology},
year = {2025},
volume = {11},
number = {11},
pages = {5167-5173},
issn = {2349-6002},
url = {https://ijirt.org/article?manuscript=176309},
abstract = {The goal of the project "EmoTunes - Emotion-Based Music Recommendation" is to music engagement by offering users customized song recommendations based on states of mind The system analyzes facial expressions using techniques for real-time emotion recognition. expressions that can be used to identify positive, negative, or neutral emotions. Singing is recommended from a music dataset classified according to emotional tags, ensuring that the recommendations adapt to the user's current state of mind. Additionally, integration with a platform for streaming music Users can directly access the recommended tracks by using APIs, like lastfm. a user-friendly and seamless experience. Unlike traditional music recommendation systems like Spotify and Apple Music, which are based on user preferences and history and collaborate Emotunes focuses on the dynamic nature of emotions rather than filtering algorithms. Existing procedures Emotional state of users in real time is often overlooked, but Emotunes bridges this gap by integrating emotion recognition into the process of making recommendations. sophisticated methods like For accurate facial emotion detection, Convolutional Neural Networks (CNNs) are used. By making use of the characteristics of music and the dynamic emotional tagging. Not only does this novel strategy increase user engagement, but it also aligns music. suggestions based on the mood of the listener right now, providing a one-of-a-kind and emotionally responsive expertise in listening Emotunes shows promising results in, as demonstrated by user feedback. making music a more pleasurable experience and a compelling solution in the ever-changing landscape of systems of music based on emotions},
keywords = {Emotion Recognition, Music Recommendation, Facial Expression Analysis, Convolutional Neural Networks (CNNs).},
month = {April},
}
Submit your research paper and those of your network (friends, colleagues, or peers) through your IPN account, and receive 800 INR for each paper that gets published.
Join NowNational Conference on Sustainable Engineering and Management - 2024 Last Date: 15th March 2024
Submit inquiry