import sys
from datetime import datetime, timedelta

import psycopg2


class ReadYatLog:
    def __init__(self):
        self.schd_start_time = ''
        self.schd_run_time = ''
        self.schd_end_time = ''
        self.schd_pass_num = 0
        self.schd_fail_num = 0
        self.schd_block_num = 0
        self.schd_total_num = 0
        self.case_info = list()
        self.schd_info = ''
        self.tester = {'FU_AC_JDBC_PY': '',
                       'FU_AC_ODBC_PY': '',
                       'FU_AC_GDBC_PY': '',
                       'FU_AC_PDBC_PY': '',
                       'FU_AC_PLUGINS_PY': '',
                       'FU_AC_PLUGINS_SQL': '',
                       'FU_SQ_GRAM_PY_1': '',
                       'FU_SQ_GRAM_PY_2': '',
                       'FU_SQ_GRAM_PY_3': '',
                       'FU_SQ_GRAM_PY_4': '',
                       'FU_SQ_GRAM_SQL': '',
                       'FU_SQ_PROC_PY': '',
                       'FU_SQ_PROC_SQL': '',
                       'FU_SQ_SYS_PY': '',
                       'FU_SQ_PERF_PY_1': '',
                       'FU_SQ_PERF_PY_2': '',
                       'FU_SQ_KEYWORD_PY': '',
                       'FU_SQ_KEYWORD_SQL_1': '',
                       'FU_SQ_KEYWORD_SQL_2': '',
                       'FU_SQ_PARALLEL_QUERY_PY': '',
                       'FU_ST_INDEX_PY': '',
                       'FU_ST_INDEX_SQL': '',
                       'FU_ST_PART_PY': '',
                       'FU_ST_PART_SQL': '',
                       'FU_ST_MOT_PY': '',
                       'FU_ST_MATER_SQL': '',
                       'FU_ST_FULLINDEX_PY': '',
                       'FU_ST_FULLINDEX_SQL': '',
                       'FU_ST_FLASHBACK_SQL': '',
                       'FU_ST_USTORE_SQL_0': '',
                       'FU_ST_USTORE_SQL_1': '',
                       'FU_ST_USTORE_SQL_2': '',
                       'FU_ST_USTORE_SQL_3': '',
                       'FU_ST_USTORE_SQL_4': '',
                       'FU_ST_ROWCOMPRESS_SQL': '',
                       'DF_MA_DROPNODE_PY_1': '',
                       'DF_MA_DROPNODE_PY_2': '',
                       'DF_MA_UPGRADE_PY': '',
                       'DF_MA_TOOLS_PY_1': '',
                       'DF_MA_TOOLS_PY_2': '',
                       'DF_MA_TOOLS_PY_3': '',
                       'DF_MA_TOOLS_PY_4': '',
                       'DF_MA_TOOLS_PY_5': '',
                       'DF_MA_TOOLS_PY_6': '',
                       'DF_MA_TOOLS_PY_7': '',
                       'DF_MA_TOOLS_PY_8': '',
                       'DF_MA_OM_PY': '',
                       'DF_MA_OM_SQL': '',
                       'DF_SE_IDENTITY_PY': '',
                       'DF_SE_VISIT_PY': '',
                       'DF_SE_VISIT_SQL': '',
                       'DF_SE_THREE_PY_1': '',
                       'DF_SE_THREE_PY_2': '',
                       'DF_SE_AUDIT_PY': '',
                       'DF_SE_AUDIT_SQL': '',
                       'DF_SE_ENCRYPT_PY': '',
                       'DF_SE_MASK_PY': '',
                       'DF_SE_EQUALITY_PY': '',
                       'DF_HA_BACKUP_PY_1': '',
                       'DF_HA_BACKUP_PY_2': '',
                       'DF_HA_BACKUP_PY_3': '',
                       'DF_HA_DELAYBACK_PY_1': '',
                       'DF_HA_DELAYBACK_PY_2': '',
                       'DF_HA_DUMP_PY': '',
                       'DF_HA_COPY_PY': '',
                       'DF_HA_GUC_PY_1': '',
                       'DF_HA_GUC_PY_2': '',
                       'DF_HA_GUC_PY_3': '',
                       'DF_HA_GUC_PY_4': '',
                       'DF_HA_GUC_PY_5': '',
                       'DF_HA_GUC_PY_6': '',
                       'DF_HA_GUC_PY_7': '',
                       'DF_HA_GUC_PY_8': '',
                       'DF_HA_GUC_PY_9': '',
                       'DF_HA_GUC_PY_10': '',
                       'DF_HA_GUC_PY_11': '',
                       'DF_HA_GUC_PY_12': '',
                       'DF_HA_GUC_PY_13': '',
                       'DF_HA_GUC_SQL_1': '',
                       'DF_HA_GUC_SQL_2': '',
                       'DF_HA_GUC_SQL_3': '',
                       'DF_HA_GUC_SQL_4': '',
                       'DF_HA_PUB_SUB_PY_1': '',
                       'DF_HA_PUB_SUB_PY_2': '',
                       'DF_HA_GSC_PY': '',
                       'DF_SE_MODIFY_PY': '',
                       'DF_SE_MODIFY_SQL': '',
                       'EC_MY_PLUGIN_PY': '',
                       'EC_MY_GRAMMAR_SQL': ''}

    def read_yat_log(self, **kwargs):
        with open(kwargs['path'], 'r', encoding='utf-8', errors='ignore') as f:
            for line in f.readlines():
                line_len = len(line.split())
                if line_len == 4:
                    self.schd_start_time = ' '.join(line.split()[1:3])
                elif line_len == 11:
                    self.schd_end_time = ' '.join(line.split()[-3:-1])
                    self.schd_run_time = line.split()[6].strip('PT')
                elif line_len in [9, 10]:
                    case_name = line.split()[6].split('/')[-1]
                    case_status = line.split()[-1]
                    case_start_time = ' '.join(line.split()[1:3:]).strip(
                        '[').strip(']')
                    case_run_time = ''.join(line.split()[3:5:]).strip(
                        '[').strip(']')

                    t_format = "%Y-%m-%d %H:%M:%S"
                    tmp_start = datetime.strptime(case_start_time, t_format)
                    tmp_run = float(case_run_time[:-1:])
                    if case_run_time.endswith('h'):
                        case_end_time = (tmp_start + timedelta(
                            hours=tmp_run)).strftime(t_format)
                    elif case_run_time.endswith('m'):
                        case_end_time = (tmp_start + timedelta(
                            minutes=tmp_run)).strftime(t_format)
                    elif case_run_time.endswith('s'):
                        case_end_time = (tmp_start + timedelta(
                            seconds=tmp_run)).strftime(t_format)
                    else:
                        case_end_time = case_start_time
                    tester = self.tester.get(kwargs['schd_name'], '')

                    self.case_info.append(f"('{kwargs['run_os']}', "
                                          f"'{kwargs['schd_name']}', "
                                          f"'{case_name}', "
                                          f"'{case_status}', "
                                          f"'{case_start_time}', "
                                          f"'{case_run_time}', "
                                          f"'用例问题', "
                                          f"'{tester}', "
                                          f"'未解决', "
                                          f"'{kwargs['ip_host']}', "
                                          f"'{kwargs['console_url']}', "
                                          f"'{case_end_time}', "
                                          f"'{kwargs['log_url']}')")

                    if case_status == 'er':
                        self.schd_fail_num += 1
                    elif case_status == 'to':
                        self.schd_block_num += 1
                    else:
                        self.schd_pass_num += 1
        self.schd_total_num = sum(
            [self.schd_pass_num, self.schd_fail_num, self.schd_block_num])

        self.schd_info += f"('{kwargs['run_os']}', '{kwargs['schd_name']}', " \
            f"'{self.schd_total_num}', '{self.schd_pass_num}', " \
            f"'{self.schd_fail_num}', '{self.schd_block_num}', " \
            f"'{self.schd_start_time}', interval '{self.schd_run_time}', " \
            f"'{kwargs['ip_host']}', '{kwargs['core_num']}')"


class DbOperate:
    def __init__(self, **dbinfo):
        self.db_info = dbinfo
        self.conn = None
        self.cur = None

    def conn_db(self):
        self.conn = psycopg2.connect(database=self.db_info['db_name'],
                                     user=self.db_info['db_user'],
                                     password=self.db_info['db_password'],
                                     host=self.db_info['db_host'],
                                     port=self.db_info['port'])
        self.conn.set_client_encoding('utf-8')
        self.conn.set_isolation_level(
            psycopg2.extensions.ISOLATION_LEVEL_AUTOCOMMIT)
        self.cur = self.conn.cursor()

    def exec_dml_sql(self, sql_cmd):
        print(sql_cmd)
        self.cur.execute(sql_cmd)
        self.conn.commit()

    def exec_dql_sql(self, sql_cmd):
        self.cur.execute(sql_cmd)
        rows = self.cur.fetchall()
        return rows

    def close_conn(self):
        self.conn.close()


if __name__ == '__main__':
    Usage = f"Usage:input as 'python3 {sys.argv[0]} schd_name yat_log_path " \
        f"run_os ip_host core_num console_url run_model_table " \
        f"run_info_table his_model_table his_info_table " \
        f"report_db_name report_db_user report_db_password report_db_host" \
        f"report_port' "
    if len(sys.argv) < 10:
        print(Usage)
        exit(0)

    tem_dict = dict()
    tem_dict['schd_name'] = sys.argv[1]
    tem_dict['path'] = sys.argv[2]
    tem_dict['run_os'] = sys.argv[3]
    tem_dict['ip_host'] = sys.argv[4]
    tem_dict['core_num'] = sys.argv[5]
    tem_dict['console_url'] = sys.argv[6]
    tem_dict['run_model_table'] = sys.argv[7]
    tem_dict['run_info_table'] = sys.argv[8]
    tem_dict['his_model_table'] = sys.argv[9]
    tem_dict['his_info_table'] = sys.argv[10]
    tem_dict['log_url'] = sys.argv[16]
    read_res = ReadYatLog()
    read_res.read_yat_log(**tem_dict)
    print(read_res.schd_info)

    db_info = {'db_name': sys.argv[11],
               'db_user': sys.argv[12],
               'db_password': sys.argv[13],
               'db_host': sys.argv[14],
               'port': sys.argv[15]}
    my_db = DbOperate(**db_info)
    my_db.conn_db()
    # noinspection PyBroadException
    try:
        # 删除一月前的数据
        delete_his_sql = ""
        for his_t in [tem_dict['his_model_table'], tem_dict['his_info_table']]:
            delete_his_sql += f"delete from \"{his_t}\" as n " \
                f"where n.start_time < to_timestamp(substring(to_char(now(), " \
                f"'yyyy-MM-DD hh24:MI:ss') from 1 for 10), 'yyyy-MM-dd') " \
                f"- interval '30d';"
            vacuum_sql = f"vacuum full \"{his_t}\";"

        # 转移model表数据到历史表
        trans_model_sql = f"insert into \"{tem_dict['his_model_table']}\"(" \
            f"run_os, model_name, total_num, pass_num, fail_num, " \
            f"block_num, start_time, run_time, ip_name, core_num) " \
            f"select run_os, model_name, total_num, pass_num, fail_num, " \
            f"block_num, start_time, run_time, ip_name, core_num " \
            f"from \"{tem_dict['run_model_table']}\" " \
            f"where model_name='{tem_dict['schd_name']}' " \
            f"and run_os='{tem_dict['run_os']}';"

        # 删除model表中已转移数据
        delete_model_sql = f"delete from \"{tem_dict['run_model_table']}\" " \
            f"where model_name='{tem_dict['schd_name']}' " \
            f"and run_os='{tem_dict['run_os']}';"
        vacuum_sql += f"vacuum full \"{tem_dict['run_model_table']}\";"

        # 转移info表数据到历史表
        trans_info_sql = f"insert into \"{tem_dict['his_info_table']}\"(" \
            f"run_os, model_name, tc_name, run_result, start_time, " \
            f"run_time, failed_reason, solution, problem_type, " \
            f"issue_no, develop_owner, test_owner, status, " \
            f"ip_name, jenkins_link, end_time, log_link) " \
            f"select run_os, model_name, tc_name, run_result, start_time, " \
            f"run_time, failed_reason, solution, problem_type, " \
            f"issue_no, develop_owner, test_owner, status, " \
            f"ip_name, jenkins_link, end_time, log_link " \
            f"from \"{tem_dict['run_info_table']}\" " \
            f"where model_name='{tem_dict['schd_name']}' " \
            f"and run_os='{tem_dict['run_os']}';"

        # 删除info表中已转移数据
        delete_info_sql = f"delete from \"{tem_dict['run_info_table']}\" " \
            f"where model_name='{tem_dict['schd_name']}' " \
            f"and run_os='{tem_dict['run_os']}';"
        vacuum_sql += f"vacuum full \"{tem_dict['run_info_table']}\";"

        # 写入model表
        insert_model_sql = f"insert into \"{tem_dict['run_model_table']}\"(" \
            f"run_os, model_name, total_num, pass_num, fail_num, " \
            f"block_num, start_time, run_time, ip_name, core_num) " \
            f"values {read_res.schd_info}; "

        # 写入info表
        insert_info_sql = f"insert into \"{tem_dict['run_info_table']}\"(" \
            f"run_os, model_name, tc_name, run_result, start_time, " \
            f"run_time, problem_type, test_owner, status, ip_name, " \
            f"jenkins_link, end_time, log_link) " \
            f"values {','.join(read_res.case_info)};"

        exec_sql = f"begin; {delete_his_sql} " \
            f"{delete_model_sql} {delete_info_sql} " \
            f"{insert_model_sql} {insert_info_sql} end;"
        my_db.exec_dml_sql(exec_sql)
        for s in vacuum_sql.split(';'):
            if len(s.strip()) > 0:
                my_db.exec_dml_sql(f'{s};')
    except Exception as e:
        print(str(e))
    finally:
        my_db.close_conn()
