Copyright © 2025 Authors retain the copyright of this article. This article is an open access article distributed under the Creative Commons Attribution License which permits unrestricted use, distribution, and reproduction in any medium, provided the original work is properly cited.
@article{165091, author = {Avdhut Satish Patil and Pruthviraj Kisan Zinjade and NIkhil Vijay Dongre and Sanket Sandesh Rathod}, title = {Semantic Segmentation in VHR Remote-Sensing Images }, journal = {International Journal of Innovative Research in Technology}, year = {}, volume = {11}, number = {1}, pages = {65-70}, issn = {2349-6002}, url = {https://ijirt.org/article?manuscript=165091}, abstract = {The interactions between target features are more intricate in high-resolution remote sensing images, which encompass more feature in- formation such as texture, structure, geometry, and other geometric elements. These characteristics impede the process by which stan- dard convolutional neural networks execute feature classification on remote-sensing images and achieve optimal results. We suggested DMAU-Net, an attention-based multiscale maxpooling dense net- work based on U-Net for ground object categorization, as a solutionto this problem. The network is built with an inbuilt max-pooling module that uses dense connections in the encoder section to im- prove the feature map’s quality and, consequently, the network’s ca- pacity to extract features. Similarly, we present the Efficient Chan- nel Attention (ECA) module in the decoding process, which can am-plify the useful elements and stifle the superfluous ones.}, keywords = {High-resolution remote-sensing images, ground object classification, dense connections, Multiscale maximum pooling, Semantic segmentation, CNN algorithm}, month = {}, }
Cite This Article
Submit your research paper and those of your network (friends, colleagues, or peers) through your IPN account, and receive 800 INR for each paper that gets published.
Join NowNational Conference on Sustainable Engineering and Management - 2024 Last Date: 15th March 2024
Submit inquiry