from stanfordcorenlp import StanfordCoreNLP
nlp = StanfordCoreNLP(r'/Users/yuronan/develop/code/python/stanford-corenlp-4.3.2', lang='en')
f = open('./data/sentic_hownet.txt', 'w+')
def combineProcess():
    allData = []
    allDataSingleWord = [line.strip().split(',')[0] for line in open('./data/senticnet_word.txt')]
    # print(allDataSingleWord)
    for line in open('./data/senticnet_word.txt'):
        allData.append(line.strip())
    for line in open('./data/hownet.txt'):
        single_word = line.strip().split(',')[0]
        if single_word not in allDataSingleWord:
            allData.append(line.strip())
    for line in allData:
        f.write(line + '\n')
combineProcess()