from collections import Counter
import CONST
import string

from persistence import sqlUtil
from persistence.searchKeyDao import db_name


def word_frequency_count():
    """
    统计分词后的词频，用于更新词库中重要的词
    :return: 按顺序降序排序的词序列
    """
    # 读文件，统计词频
    with open(CONST.AFTER_CUT_DATA, 'r', encoding='utf-8') as processing_data:
        datas = processing_data.readlines()
        total_word = []
        for line in datas:
            line = line[:-1]  # 去除词条尾行的\n
            # 如果line为‘’直接退出
            if not line:
                continue
            words = line.split('\t')
            # 过滤words中的''值
            words = list(filter(lambda x: x, words))
            total_word.extend(words)
    # 统计词频
    total_word = sorted(Counter(total_word).items(), key=lambda x: x[1], reverse=True)
    word_list = [word[0] for word in total_word]
    return word_list


def renovation_word_index():
    """
    更新CONST中的词表索引
    """
    word_index = []
    local_list = [alp for alp in string.ascii_lowercase] + ['other']
    sql = 'select id from word_{} limit 0,1'
    try:
        conn, cur = sqlUtil.get_cur(db_name)
        # 读a-z和other，27张表的第一行数据的id
        for alpha in local_list:
            cur.execute(sql.format(alpha))
            results = cur.fetchall()
            word_index.append(results[0][0])
    except Exception as e:
        conn.rollback()
        print('事务处理失败', e)
    finally:
        sqlUtil.close_db(conn, cur)
    print(word_index)


if __name__ == '__main__':
    renovation_word_index()
