@article{ART002649847},
author={Euhee Kim},
title={The Ability of L2 LSTM Language Models to Learn the Filler-Gap Dependency},
journal={Journal of The Korea Society of Computer and Information},
issn={1598-849X},
year={2020},
volume={25},
number={11},
pages={27-40},
doi={10.9708/jksci.2020.25.11.027}
TY - JOUR
AU - Euhee Kim
TI - The Ability of L2 LSTM Language Models to Learn the Filler-Gap Dependency
JO - Journal of The Korea Society of Computer and Information
PY - 2020
VL - 25
IS - 11
PB - The Korean Society Of Computer And Information
SP - 27
EP - 40
SN - 1598-849X
AB - In this paper, we investigate the correlation between the amount of English sentences that Korean English learners (L2ers) are exposed to and their sentence processing patterns by examining what Long Short-Term Memory (LSTM) language models (LMs) can learn about implicit syntactic relationship: that is, the filler–gap dependency. The filler–gap dependency refers to a relationship between a (wh-)filler, which is a wh-phrase like ‘what’ or ‘who’ overtly in clause-peripheral position, and its gap in clause-internal position, which is an invisible, empty syntactic position to be filled by the (wh-)filler for proper interpretation. Here to implement L2ers’ English learning, we build LSTM LMs that in turn learn a subset of the known restrictions on the filler-gap dependency from English sentences in the L2 corpus that L2ers can potentially encounter in their English learning. Examining LSTM LMs’ behaviors on controlled sentences designed with the filler-gap dependency, we show the characteristics of L2ers' sentence processing using the information-theoretic metric of surprisal that quantifies violations of the filler-gap dependency or wh-licensing interaction effects. Furthermore, comparing L2ers’ LMs with native speakers’ LM in light of processing the filler-gap dependency, we not only note that in their sentence processing both L2ers’ LM and native speakers’ LM can track abstract syntactic structures involved in the filler-gap dependency, but also show using linear mixed-effects regression models that there exist significant differences between them in processing such a dependency.
KW - LSTM language model;English sentence processing;filler-gap dependency;surprisal;linear mixed-effects regression model;wh-licensing interaction effects
DO - 10.9708/jksci.2020.25.11.027
ER -
Euhee Kim. (2020). The Ability of L2 LSTM Language Models to Learn the Filler-Gap Dependency. Journal of The Korea Society of Computer and Information, 25(11), 27-40.
Euhee Kim. 2020, "The Ability of L2 LSTM Language Models to Learn the Filler-Gap Dependency", Journal of The Korea Society of Computer and Information, vol.25, no.11 pp.27-40. Available from: doi:10.9708/jksci.2020.25.11.027
Euhee Kim "The Ability of L2 LSTM Language Models to Learn the Filler-Gap Dependency" Journal of The Korea Society of Computer and Information 25.11 pp.27-40 (2020) : 27.
Euhee Kim. The Ability of L2 LSTM Language Models to Learn the Filler-Gap Dependency. 2020; 25(11), 27-40. Available from: doi:10.9708/jksci.2020.25.11.027
Euhee Kim. "The Ability of L2 LSTM Language Models to Learn the Filler-Gap Dependency" Journal of The Korea Society of Computer and Information 25, no.11 (2020) : 27-40.doi: 10.9708/jksci.2020.25.11.027
Euhee Kim. The Ability of L2 LSTM Language Models to Learn the Filler-Gap Dependency. Journal of The Korea Society of Computer and Information, 25(11), 27-40. doi: 10.9708/jksci.2020.25.11.027
Euhee Kim. The Ability of L2 LSTM Language Models to Learn the Filler-Gap Dependency. Journal of The Korea Society of Computer and Information. 2020; 25(11) 27-40. doi: 10.9708/jksci.2020.25.11.027
Euhee Kim. The Ability of L2 LSTM Language Models to Learn the Filler-Gap Dependency. 2020; 25(11), 27-40. Available from: doi:10.9708/jksci.2020.25.11.027
Euhee Kim. "The Ability of L2 LSTM Language Models to Learn the Filler-Gap Dependency" Journal of The Korea Society of Computer and Information 25, no.11 (2020) : 27-40.doi: 10.9708/jksci.2020.25.11.027