@article {Shuang:2019:0952-813X:455, title = "A word-building method based on neural network for text classification", journal = "Journal of Experimental & Theoretical Artificial Intelligence", parent_itemid = "infobike://tandf/teta", publishercode ="tandf", year = "2019", volume = "31", number = "3", publication date ="2019-05-04T00:00:00", pages = "455-474", itemtype = "ARTICLE", issn = "0952-813X", eissn = "1362-3079", url = "https://www.ingentaconnect.com/content/tandf/teta/2019/00000031/00000003/art00006", doi = "doi:10.1080/0952813X.2019.1572654", keyword = "word-building method, text classification, structure information, long short term memory, Convolutional neural network", author = "Shuang and Guo and Zhang and Loo and Su", abstract = "Text classification is a foundational task in many natural language processing applications. All traditional text classifiers take words as the basic units and conduct the pre-training process (like word2vec) to directly generate word vectors at the first step. However, none of them have considered the information contained in word structure which is proved to be helpful for text classification. In this paper, we propose a word-building method based on neural network model that can decompose a Chinese word to a sequence of radicals and learn structure information from these radical level features which is a key difference from theexisting models. Then, the convolutional neural network is applied to extract structure information of words from radical sequence to generate a word vector, and the long short-term memory is applied to generate the sentence vector for the prediction purpose. The experimental results show that our model outperforms other existing models on Chinese dataset. Our model is also applicable to English as well where an English word can be decomposed down to character level, which demonstrates the excellent generalisation ability of our model. The experimental results have proved that our model also outperforms others on English dataset.", }