Copyright © 2026 Authors retain the copyright of this article. This article is an open access article distributed under the Creative Commons Attribution License which permits unrestricted use, distribution, and reproduction in any medium, provided the original work is properly cited.
@article{153373,
author = {Dr. Yogesh Haridas Gulhane},
title = {Classification and Detection Model for Emotional States},
journal = {International Journal of Innovative Research in Technology},
year = {},
volume = {8},
number = {7},
pages = {10-14},
issn = {2349-6002},
url = {https://ijirt.org/article?manuscript=153373},
abstract = {Basic nature of features in speech under different emotional situations are different. Features are varied by place to place and gender and age also can reflect the variation in features of speech. In this research paper proposes to classify the types of emotions and impact of it on performance. Also, this research took a look at variation in features with respective genders, age and places. Implementation cases used data from three subjects. As part of the real input from a microphone, we recorded the voice of different subjects. The subjects were asked to express certain emotions when their speech was recorded. The subjects were studied Mongolian, Indians and they spoke English sentences under different emotional states. A micro- phone was used to record the speech and was kept at a distance about 15cms away from the mouth. The experiments were conducted in an ordinary classroom having an area of 25m2. For extracting features from the recorded speech segments, MATLAB functions were used. Success ratio of the model is 99%. Confusion Matrix is use to get the unidentified signals.},
keywords = {signal Processing, Classification, Detection, SVM, Emotion Analysis},
month = {},
}
Submit your research paper and those of your network (friends, colleagues, or peers) through your IPN account, and receive 800 INR for each paper that gets published.
Join NowNational Conference on Sustainable Engineering and Management - 2024 Last Date: 15th March 2024
Submit inquiry