# encoding=utf-8

import jieba_fast as jieba
import os

def read_from_file(file_name):
    with open(file_name, "r", encoding='utf-8') as fp:
        words = fp.read()
    return words


def stop_words():
    if os.path.exists("../data/stopword.txt"):
        words = read_from_file("../data/stopword.txt")
    else:
        words = read_from_file("../../data/stopword.txt")
    result = jieba.cut(words)
    new_words = []
    for r in result:
        new_words.append(r)
    return set(new_words)


_stop_words = stop_words()


def dict(text):
    arr = jieba.cut_for_search(text, HMM=True)
    # arr = jieba.cut(text, cut_all=False, HMM=True)
    map = {}
    for str in arr:
        if str in _stop_words:
            # print("stop words ", str)
            continue
        if map.keys().__contains__(str):
            map[str] = map[str] + 1
        else:
            map[str] = 1
    return map


if __name__ == '__main__':
    words = stop_words()
    print(words)
