from elasticsearch import Elasticsearch
import json
import os
import pandas as pd
import pathlib
import sys

project_path = pathlib.Path(os.path.abspath(__file__)).parent.parent.parent
sys.path.append(str(project_path))
from settings import es_host,es_setting_sheet,excel_path,es_config_path,table_cols_sheet,es_stopword_sheet,es_synonym_sheet
from utility.import_log import flog
log = flog(__file__)

es_setting = pd.read_excel(excel_path, sheet_name=es_setting_sheet)
es = Elasticsearch()
log.info(f'成功读取Excel文件{es_setting_sheet}页配置信息')
try:
    common_stopwords_df = pd.read_excel(excel_path, sheet_name=es_stopword_sheet)
    common_synonyms_df = pd.read_excel(excel_path, sheet_name=es_synonym_sheet)
    log.info(f'成功加载停用词,同义词信息')
    for _, row in es_setting.iterrows():
        index_name = row[0]
        settings = json.loads(row[1])

        # write synonyms/stopwords to file
        if not os.path.exists(es_config_path):
            raise Exception('es config path is not Setting!')

        synonyms_path = es_config_path + settings['settings']['analysis']['filter']['synonym']['synonyms_path']
        stopwords_path = es_config_path + settings['settings']['analysis']['filter']['stopword']['stopwords_path']
        if index_name == 'kgqa_attr':
            synonyms_index_path = es_config_path + settings['settings']['analysis']['filter']['synonym_index']['synonyms_path']
            stopwords_index_path = es_config_path + settings['settings']['analysis']['filter']['stopword_index']['stopwords_path']
        for each in [synonyms_path, stopwords_path]:
            parent_path = pathlib.Path(each).parent
            if not os.path.exists(parent_path):
                os.makedirs(parent_path)
        if index_name == 'kgqa_value':
            common_synonyms = common_synonyms_df['字段值搜索同义词'][common_synonyms_df['字段值搜索同义词'].notnull()].tolist()
            common_stopwords = common_stopwords_df['字段值搜索停用词'][common_stopwords_df['字段值搜索停用词'].notnull()].tolist()
        elif index_name == 'kgqa_attr':
            common_synonyms = common_synonyms_df['字段名搜索同义词'][common_synonyms_df['字段名搜索同义词'].notnull()].tolist()
            common_stopwords = common_stopwords_df['字段名搜索停用词'][common_stopwords_df['字段名搜索停用词'].notnull()].tolist()
            index_synonyms=common_synonyms_df['字段名索引同义词'][common_synonyms_df['字段名索引同义词'].notnull()].tolist()
            index_stopwords=common_stopwords_df['字段名索引停用词'][common_stopwords_df['字段名索引停用词'].notnull()].tolist()
        log.info(f'搜索同义词开始写入{synonyms_path}')
        with open(synonyms_path,'w', encoding="utf-8") as syn_f:
            for i in common_synonyms:
                syn_f.writelines(i)
                syn_f.writelines('\n')
        log.info(f'搜索停用词开始写入{stopwords_path}')
        with open(stopwords_path, 'w', encoding="utf-8") as sto_f:
            for i in common_stopwords:
                sto_f.writelines(i)
                sto_f.writelines('\n')
        log.info('停用词,同义词处理成功')
        if index_name == 'kgqa_attr':
            log.info(f'索引同义词开始写入{synonyms_index_path}')
            with open(synonyms_index_path, 'w', encoding="utf-8") as sto_f:
                for i in index_synonyms:
                    sto_f.writelines(i)
                    sto_f.writelines('\n')
            log.info(f'索引停用词开始写入{stopwords_index_path}')
            with open(stopwords_index_path, 'w', encoding="utf-8") as sto_f:
                for i in index_stopwords:
                    sto_f.writelines(i)
                    sto_f.writelines('\n')
            log.info('停用词,同义词处理成功')

        if not es.indices.exists(index_name):
            es.indices.create(index=index_name, body=settings)
            log.info(f'{index_name} created')
except Exception as ex:
    log.error('停用词,同义词加载失败!失败信息如下:')
    log.error(str(ex))
