@article{ART003055323},
author={Soon-chan Kwon and Dong-Hee Lee and BEAKCHEOL JANG},
title={Zero-shot Korean Sentiment Analysis with Large Language Models: Comparison with Pre-trained Language Models},
journal={Journal of The Korea Society of Computer and Information},
issn={1598-849X},
year={2024},
volume={29},
number={2},
pages={43-50},
doi={10.9708/jksci.2024.29.02.043}
TY - JOUR
AU - Soon-chan Kwon
AU - Dong-Hee Lee
AU - BEAKCHEOL JANG
TI - Zero-shot Korean Sentiment Analysis with Large Language Models: Comparison with Pre-trained Language Models
JO - Journal of The Korea Society of Computer and Information
PY - 2024
VL - 29
IS - 2
PB - The Korean Society Of Computer And Information
SP - 43
EP - 50
SN - 1598-849X
AB - This paper evaluates the Korean sentiment analysis performance of large language models like GPT-3.5 and GPT-4 using a zero-shot approach facilitated by the ChatGPT API, comparing them to pre-trained Korean models such as KoBERT. Through experiments utilizing various Korean sentiment analysis datasets in fields like movies, gaming, and shopping, the efficiency of these models is validated. The results reveal that the LMKor-ELECTRA model displayed the highest performance based on F1-score, while GPT-4 particularly achieved high accuracy and F1-scores in movie and shopping datasets. This indicates that large language models can perform effectively in Korean sentiment analysis without prior training on specific datasets, suggesting their potential in zero-shot learning. However, relatively lower performance in some datasets highlights the limitations of the zero-shot based methodology. This study explores the feasibility of using large language models for Korean sentiment analysis, providing significant implications for future research in this area.
KW - Language Model;Sentiment analysis;AI;Natural Language Processing;Deep Learning
DO - 10.9708/jksci.2024.29.02.043
ER -
Soon-chan Kwon, Dong-Hee Lee and BEAKCHEOL JANG. (2024). Zero-shot Korean Sentiment Analysis with Large Language Models: Comparison with Pre-trained Language Models. Journal of The Korea Society of Computer and Information, 29(2), 43-50.
Soon-chan Kwon, Dong-Hee Lee and BEAKCHEOL JANG. 2024, "Zero-shot Korean Sentiment Analysis with Large Language Models: Comparison with Pre-trained Language Models", Journal of The Korea Society of Computer and Information, vol.29, no.2 pp.43-50. Available from: doi:10.9708/jksci.2024.29.02.043
Soon-chan Kwon, Dong-Hee Lee, BEAKCHEOL JANG "Zero-shot Korean Sentiment Analysis with Large Language Models: Comparison with Pre-trained Language Models" Journal of The Korea Society of Computer and Information 29.2 pp.43-50 (2024) : 43.
Soon-chan Kwon, Dong-Hee Lee, BEAKCHEOL JANG. Zero-shot Korean Sentiment Analysis with Large Language Models: Comparison with Pre-trained Language Models. 2024; 29(2), 43-50. Available from: doi:10.9708/jksci.2024.29.02.043
Soon-chan Kwon, Dong-Hee Lee and BEAKCHEOL JANG. "Zero-shot Korean Sentiment Analysis with Large Language Models: Comparison with Pre-trained Language Models" Journal of The Korea Society of Computer and Information 29, no.2 (2024) : 43-50.doi: 10.9708/jksci.2024.29.02.043
Soon-chan Kwon; Dong-Hee Lee; BEAKCHEOL JANG. Zero-shot Korean Sentiment Analysis with Large Language Models: Comparison with Pre-trained Language Models. Journal of The Korea Society of Computer and Information, 29(2), 43-50. doi: 10.9708/jksci.2024.29.02.043
Soon-chan Kwon; Dong-Hee Lee; BEAKCHEOL JANG. Zero-shot Korean Sentiment Analysis with Large Language Models: Comparison with Pre-trained Language Models. Journal of The Korea Society of Computer and Information. 2024; 29(2) 43-50. doi: 10.9708/jksci.2024.29.02.043
Soon-chan Kwon, Dong-Hee Lee, BEAKCHEOL JANG. Zero-shot Korean Sentiment Analysis with Large Language Models: Comparison with Pre-trained Language Models. 2024; 29(2), 43-50. Available from: doi:10.9708/jksci.2024.29.02.043
Soon-chan Kwon, Dong-Hee Lee and BEAKCHEOL JANG. "Zero-shot Korean Sentiment Analysis with Large Language Models: Comparison with Pre-trained Language Models" Journal of The Korea Society of Computer and Information 29, no.2 (2024) : 43-50.doi: 10.9708/jksci.2024.29.02.043