Copyright © 2026 Authors retain the copyright of this article. This article is an open access article distributed under the Creative Commons Attribution License which permits unrestricted use, distribution, and reproduction in any medium, provided the original work is properly cited.
@article{187026,
author = {S.J.Deebak and G.Ranjani and E.Madhorubagan},
title = {A SURVEY PAPER ON DEEP LEARNING MODEL FOR PREDICTING STUDENT FEEDBACK USING DIVERSE DATA},
journal = {International Journal of Innovative Research in Technology},
year = {2025},
volume = {12},
number = {6},
pages = {3599-3604},
issn = {2349-6002},
url = {https://ijirt.org/article?manuscript=187026},
abstract = {This research proposes a multi-modal deep learning framework to enhance student feedback analysis by integrating text, audio, images, and video data. Traditional feedback often overlooks emotional and non-verbal cues, limiting insight. To address this, the system combines sentiment analysis, emotion recognition, and visual attention tracking. It uses BERT for text, CNN-LSTM for audio, ResNet/VGG for facial expressions, and 3D-CNN/I3D for video-based engagement. These modalities are fused through attention mechanisms or multimodal transformers to generate a unified feedback profile. This data-driven approach supports improved teaching strategies and fosters an adaptive, emotionally aware learning environment that boosts student engagement and outcomes.},
keywords = {Multi-modal Deep Learning, Student Feedback Analysis, Emotion Recognition, Sentiment Analysis, Engagement Detection, BERT, CNN-LSTM, Educational Data Mining.},
month = {November},
}
Submit your research paper and those of your network (friends, colleagues, or peers) through your IPN account, and receive 800 INR for each paper that gets published.
Join NowNational Conference on Sustainable Engineering and Management - 2024 Last Date: 15th March 2024
Submit inquiry