@article{ART003280310},
author={Ho-min Jung and Tae-Young Lee and Byung-In Choi},
title={Minimization of Performance Degrading in Lightweight and Quantized Super-Resolution Models Through Feature-based Knowledge Distillation},
journal={Journal of The Korea Society of Computer and Information},
issn={1598-849X},
year={2025},
volume={30},
number={12},
pages={37-49}
TY - JOUR
AU - Ho-min Jung
AU - Tae-Young Lee
AU - Byung-In Choi
TI - Minimization of Performance Degrading in Lightweight and Quantized Super-Resolution Models Through Feature-based Knowledge Distillation
JO - Journal of The Korea Society of Computer and Information
PY - 2025
VL - 30
IS - 12
PB - The Korean Society Of Computer And Information
SP - 37
EP - 49
SN - 1598-849X
AB - This study proposes knowledge distillation (KD) method that minimizes the performance degradation caused by lightweighting and quantization in super-resolution (SR) tasks. The method has been redesigned to leverage simultaneously local and global feature information to maintain the detail restoration performance, and has been optimized the network into the edge device for validations. The spatial L1 loss function is used, in local level, to preserve the feature information such as boundaries, textures, and fine patterns. Meanwhile, in global level, the 2D FFT-based frequency transformation is employed to reflect the spatial characteristics and emphasize high-frequency components. This considerations of semantic context and spatial structure in images ensures to preserve fine details and structural consistency during the SR process. For verification, the network was optimized based on the performance comparison across different active functions for real-time operation on edge devices, and the local/global feature-based KD strategy was applied during initial training and quantization-aware training (QAT) to minimize performance loss. As results in optimized network, the inference speed has been improved by more than 7% on edge devices compared to the baseline. In our proposed method, it showed less performance degradation up to 0.12%, whereas the conventional QAT-based quantized models exhibited approximately 1.15% performance degradation in terms of PSNR. Thus, with our proposal, high-quality SR can be achieved even with lightweight models.
KW - Light-weight Super-Resolution;Knowledge Distillation;Quantization;Edge Device;NPU
DO -
UR -
ER -
Ho-min Jung, Tae-Young Lee and Byung-In Choi. (2025). Minimization of Performance Degrading in Lightweight and Quantized Super-Resolution Models Through Feature-based Knowledge Distillation. Journal of The Korea Society of Computer and Information, 30(12), 37-49.
Ho-min Jung, Tae-Young Lee and Byung-In Choi. 2025, "Minimization of Performance Degrading in Lightweight and Quantized Super-Resolution Models Through Feature-based Knowledge Distillation", Journal of The Korea Society of Computer and Information, vol.30, no.12 pp.37-49.
Ho-min Jung, Tae-Young Lee, Byung-In Choi "Minimization of Performance Degrading in Lightweight and Quantized Super-Resolution Models Through Feature-based Knowledge Distillation" Journal of The Korea Society of Computer and Information 30.12 pp.37-49 (2025) : 37.
Ho-min Jung, Tae-Young Lee, Byung-In Choi. Minimization of Performance Degrading in Lightweight and Quantized Super-Resolution Models Through Feature-based Knowledge Distillation. 2025; 30(12), 37-49.
Ho-min Jung, Tae-Young Lee and Byung-In Choi. "Minimization of Performance Degrading in Lightweight and Quantized Super-Resolution Models Through Feature-based Knowledge Distillation" Journal of The Korea Society of Computer and Information 30, no.12 (2025) : 37-49.
Ho-min Jung; Tae-Young Lee; Byung-In Choi. Minimization of Performance Degrading in Lightweight and Quantized Super-Resolution Models Through Feature-based Knowledge Distillation. Journal of The Korea Society of Computer and Information, 30(12), 37-49.
Ho-min Jung; Tae-Young Lee; Byung-In Choi. Minimization of Performance Degrading in Lightweight and Quantized Super-Resolution Models Through Feature-based Knowledge Distillation. Journal of The Korea Society of Computer and Information. 2025; 30(12) 37-49.
Ho-min Jung, Tae-Young Lee, Byung-In Choi. Minimization of Performance Degrading in Lightweight and Quantized Super-Resolution Models Through Feature-based Knowledge Distillation. 2025; 30(12), 37-49.
Ho-min Jung, Tae-Young Lee and Byung-In Choi. "Minimization of Performance Degrading in Lightweight and Quantized Super-Resolution Models Through Feature-based Knowledge Distillation" Journal of The Korea Society of Computer and Information 30, no.12 (2025) : 37-49.