# encoding: utf-8
"""
数据库交互模组
"""
import json
import uuid
import time
import datetime

import psycopg2
import pandas as pd

from ..config.db import POSTGRESQL_INFO

QUEUE_TASK_STATUS = 'QUEUING'
RUN_TASK_STATUS = 'RUNNING'
SUCCESS_TASK_STATUS = 'SUCCESS'
FAIL_TASK_STATUS = 'FAIL'


def _get_from_postgresql(query, cur=None, label='all', db_infos=POSTGRESQL_INFO):
    if cur is None:
        cnx = psycopg2.connect(**db_infos)
        cur = cnx.cursor()
        cur.execute(query)
    if label == 'all':
        return cur.fetchall()
    else:
        return cur.fetchone()


class SourceDB(object):
    @ staticmethod
    def get_default_info():
        """
        获取基本模型信息
        :return:
        """
        query = "select default_model_id, task_name, model_status, update_time from init_model_param where id='0'"
        data = _get_from_postgresql(query, label='one')
        result = {
            'default_model_id': data[0],
            'task_name': data[1],
            'model_status': data[2],
            'update_time': data[3]
        }
        return result

    @staticmethod
    def update_model_status(model_status, id_='0', cnx=None):
        if cnx is None:
            cnx = psycopg2.connect(**POSTGRESQL_INFO)
        cur = cnx.cursor()
        query = "update init_model_param set model_status='%s' where id='%s'" % (model_status, id_)
        cur.execute(query)
        cnx.commit()

    @staticmethod
    def update_model_default(default_model_id, id_='0', cnx=None):
        if cnx is None:
            cnx = psycopg2.connect(**POSTGRESQL_INFO)
        cur = cnx.cursor()
        query = "update init_model_param set default_model_id=%s, update_time=%s where id=%s"
        cur.execute(query, [default_model_id, datetime.datetime.now(), id_])
        cnx.commit()

    @staticmethod
    def update_server_heartbeat(server_id, cnx=None):
        if cnx is None:
            cnx = psycopg2.connect(**POSTGRESQL_INFO)
        cur = cnx.cursor()
        query = "update model_servers set heartbeat_time = %s where server_id = %s"
        cur.execute(query, [datetime.datetime.now(), server_id])
        cnx.commit()

    @staticmethod
    def delete_server_info(heartbeat_delete_time, cnx=None):
        if cnx is None:
            cnx = psycopg2.connect(**POSTGRESQL_INFO)
        cur = cnx.cursor()
        query = "DELETE FROM model_servers where extract(epoch from now() - heartbeat_time) > %s"
        cur.execute(query, [heartbeat_delete_time])
        cnx.commit()

    @staticmethod
    def update_server_load_model_info(server_id, load_model_label, load_model_error, cnx=None):
        if cnx is None:
            cnx = psycopg2.connect(**POSTGRESQL_INFO)
        cur = cnx.cursor()
        query = "update model_servers set load_model_label=%s, load_model_error=%s where server_id=%s"
        cur.execute(query, [load_model_label, load_model_error, server_id])
        cnx.commit()

    @staticmethod
    def get_train_info(train_task_id, cur=None):
        """
        通过训练任务ID获取模型训练信息
        :param train_task_id:
        :param cur: 数据库链接
        :return:
        """
        query = "select base_on_model_id, task_status, training_params " \
                "from df_training_task where id='%s'" % train_task_id
        train_info = _get_from_postgresql(query, label='all', cur=cur)
        train_info_result = {}
        if len(train_info) == 0:
            error_msg = '无法从数据库中获取对应训练任务：%s的信息' % train_task_id
            train_info_result['error_msg'] = error_msg
        else:
            train_info = train_info[0]
            base_on_model_id = str(train_info[0])
            task_status = train_info[1]
            training_params = json.loads(train_info[2])
            train_info_result['base_on_model_id'] = base_on_model_id
            train_info_result['task_status'] = task_status
            train_info_result['training_params'] = training_params
            train_info_result['error_msg'] = None
        return train_info_result

    @staticmethod
    def get_queuing_train_task(cnx):
        query = "select id from df_training_task where task_status='QUEUING'"
        cur = cnx.cursor()
        data = _get_from_postgresql(query, cur, label='all')
        queuing_tasks = [_[0] for _ in data]
        return queuing_tasks

    @staticmethod
    def get_dataset_with_task(train_task_id, cnx=None, db_infos=POSTGRESQL_INFO, correct_contract=True):
        """
        通过数据任务ID获取数据
        :param train_task_id:
        :param cnx:
        :param db_infos: 数据库信息
        :param correct_contract: 是否修正合同
        :return:
        """
        result_data = []
        mark_dataset = {}
        if cnx is None:
            cnx = psycopg2.connect(**db_infos)
        query = '''
        select c.element_content, c.element_name, c.element_position, 
        c.term_text, c.term_position, c.data_mark_id, c.term_level 
        FROM df_training_task_item a, df_dataset_item b, df_data_mark_result c
        where a.task_id = '%s'
        and b.mark_result_id is not null
        and a.dataset_id = b.dataset_id
        and b.mark_result_id = c.id''' % train_task_id
        data = pd.read_sql(query, cnx)
        if len(data) == 0:
            error_msg = '对应训练任务：%s的标注数据为空' % train_task_id
            raise Exception(error_msg)
        else:
            '''
            数据清洗模块 暂时空着
            '''
            '''
            构建前置条款
            '''
            head_clauses = data.loc[data['term_level'] == 0, ['term_text', 'data_mark_id', 'term_text']].drop_duplicates()
            head_clauses = head_clauses.set_index('data_mark_id').to_dict('index')
            for key, value in head_clauses.items():
                value['len_text'] = len(value['term_text']) + len('\n')

            for index, row in data.iterrows():
                term_text = row['term_text']
                element_content = row['element_content']
                element_name = row['element_name']
                element_position = row['element_position']
                term_position = row['term_position']
                data_mark_id = row['data_mark_id']
                term_level = row['term_level']
                offset = [int(_) for _ in element_position.split(',')]
                actual_term_text = term_text
                actual_offset = offset
                mark_element_content = term_text[actual_offset[0]: actual_offset[1]]
                if element_content != mark_element_content:
                    print(data_mark_id, element_content, mark_element_content)
                    continue
                if correct_contract and term_level != 0:
                    if data_mark_id in head_clauses.keys():
                        head_clause = head_clauses[data_mark_id]
                        clause_text = head_clause['term_text']
                        clause_length = head_clause['len_text']
                        actual_term_text = clause_text + '\n' + term_text
                        actual_offset = [_ + clause_length for _ in offset]
                unique_key = data_mark_id + '_' + str(term_position)
                if unique_key not in mark_dataset.keys():
                    id_ = str(uuid.uuid1())
                    mark_dataset[unique_key] = {
                        "id": id_,
                        "text": actual_term_text,
                        "info_list": [],
                        "schema": set()
                    }
                # 构建info_list 信息
                '''
                位置采用的是当前位置信息
                '''
                info_list_one = [
                    {
                        "type": element_name,
                        "span": element_content,
                        "offset": actual_offset
                    }
                ]
                mark_dataset[unique_key]['info_list'].append(info_list_one)
                mark_dataset[unique_key]['schema'].add(element_name)
        '''
        修正数据集
        '''
        for value in mark_dataset.values():
            value['schema'] = dict((_, None) for _ in value['schema'])
            value['info_list'] = sorted(value['info_list'], key=lambda x: x[0]['offset'][0])
            result_data.append(value)
        return result_data

    @staticmethod
    def update_train_task_start(train_task_id, cnx):
        query = "update df_training_task set start_time ='%s', task_status='%s' where id='%s'" % (datetime.datetime.now(), RUN_TASK_STATUS, train_task_id)
        cur = cnx.cursor()
        cur.execute(query)
        cnx.commit()

    @staticmethod
    def get_model_server(cnx=None):
        if cnx is None:
            cnx = psycopg2.connect(**POSTGRESQL_INFO)
        query = "select * from model_servers"
        server_infos = pd.read_sql(query, cnx)
        return server_infos

    @staticmethod
    def update_train_queue_task_start(train_task_id, cnx):
        if cnx is None:
            cnx = psycopg2.connect(**POSTGRESQL_INFO)
        cur = cnx.cursor()
        query = "update df_training_task set start_time ='%s', task_status='%s' where id='%s'" % (datetime.datetime.now(), QUEUE_TASK_STATUS, train_task_id)
        cur.execute(query)
        cnx.commit()

    @staticmethod
    def update_train_task_end(train_task_id, training_infos, trained_infos, cnx):
        """
        :param train_task_id:
        :param training_infos:
        :param trained_infos:
        :param cnx:
        :return:
        """
        '''
        一、 更新任务表
        '''
        cur = cnx.cursor()
        query_task = "update df_training_task set end_time = %s, task_status=%s, model_id=%s, " \
                     "training_result_params=%s where id=%s "

        query_model = "insert into df_trained_model(id, base_on_model_id, create_time, name, training_params, training_result_params) " \
                      "values(%s, %s, %s, %s, %s, %s)"

        # 更新任务表
        end_time = datetime.datetime.now()
        if trained_infos['return_code'] == 0:
            task_status = SUCCESS_TASK_STATUS
            model_id = trained_infos['model_id']
            training_result_params = json.dumps(trained_infos['metric_span'])
        else:
            task_status = FAIL_TASK_STATUS
            model_id = None
            training_result_params = None
        cur.execute(query_task, [end_time, task_status, model_id, training_result_params, train_task_id])

        # 更新模型表
        if trained_infos['return_code'] == 0:
            model_id = trained_infos['model_id']
            base_on_model_id = training_infos['base_on_model_id']
            create_time = end_time
            name = model_id
            training_params = json.dumps(training_infos['training_params'])
            training_result_params = json.dumps(trained_infos['metric_span'])
            cur.execute(query_model, [model_id, base_on_model_id, create_time, name, training_params, training_result_params])
        cnx.commit()

    @staticmethod
    def create_server_info(server_id, default_model_id, now_model_id, pid, cnx=None):
        if cnx is None:
            cnx = psycopg2.connect(**POSTGRESQL_INFO)
        cur = cnx.cursor()
        query = "insert into model_servers(server_id, default_model_id, now_model_id, " \
                "pid, create_time, update_time, heartbeat_time) values(%s, %s, %s, %s, %s, %s, %s)"
        cur.execute(query, [server_id, default_model_id, now_model_id, pid,
                            datetime.datetime.now(), datetime.datetime.now(), datetime.datetime.now()])
        cnx.commit()

    @staticmethod
    def update_server_info_with_load(server_id, default_model_id, now_model_id,
                                     load_model_label, load_model_error, cnx=None):
        if cnx is None:
            cnx = psycopg2.connect(**POSTGRESQL_INFO)
        cur = cnx.cursor()
        query = "update model_servers set default_model_id=%s, now_model_id=%s, " \
                "update_time=%s, load_model_label=%s, load_model_error=%s where server_id=%s"
        cur.execute(query, [default_model_id, now_model_id, datetime.datetime.now(), load_model_label, load_model_error, server_id])
        cnx.commit()


def write_to_json(data, path):
    # new_data = data.to_dict(orient='records')
    new_data = data
    with open(path, 'w', encoding='utf8') as f:
        for _ in new_data:
            f.write(json.dumps(_, ensure_ascii=False) + '\r')


# logging 缓存器
class LogCache(object):
    def __init__(self, cnx, train_task_id):
        self.cnx = cnx
        self.train_task_id = train_task_id

        self.update_count = 0
        self.update_time = time.time()
        self.logging_msgs = []

        self.update_count_interval = 3
        self.update_time_interval = 2

    def add_log_line(self, line, header=False):
        if header:
            actual_line = str(datetime.datetime.now()) + ' - custom - INFO ' + line
        else:
            actual_line = line
        self.logging_msgs.append(actual_line)
        self.judge_update_log_to_db()

    def judge_update_log_to_db(self):
        """
        更新log日志到数据库
        :return:
        """
        now_time = time.time()
        now_count = len(self.logging_msgs)
        if now_count - self.update_count >= self.update_count_interval or now_time - self.update_time >= self.update_time_interval:
            self.update_count = now_count
            self.update_time = now_time
            self.update_log_to_db()

    def update_log_to_db(self):
        if len(self.logging_msgs) > 0:
            log_text = '\n'.join(self.logging_msgs)
            log_text = log_text.replace('\'', '\"')
            query = '''update df_training_task set log_text ='%s' where id='%s' ''' % (log_text, self.train_task_id)
            cur = self.cnx.cursor()
            cur.execute(query)
            self.cnx.commit()

