# -*- coding: UTF-8 -*-
# 每10分钟检查一次

import os
import sys

env_dist = os.environ.get('DEPENDENCY_DIR')
sys.path.append(env_dist + "/python_task/")
print(sys.path)

import re
import time
from time import strftime
import datetime
import traceback
import subprocess
import logging
import json
import hashlib
import ssl
from urllib2 import Request, urlopen

LOG_FORMAT = "%(asctime)s - %(levelname)s - %(message)s"
logging.basicConfig(level=logging.DEBUG, format=LOG_FORMAT)


def executeCheckTask(paramters):
    print("start execute task:" + str(paramters))
    taskConfigId = paramters[0]
    taskType = paramters[2]
    checkSql = paramters[11]
    if taskType == 'sla_check':
        checkPath = paramters[12]
        produce_time = paramters[10]

        real_produce_time = ""
        dp_scheduler_id = paramters[19]
        task_exec_result = get_task_exec_result(dp_scheduler_id)
        print("task_exec_result :" + str(task_exec_result))
        if (task_exec_result['is_valid'] == True):
            min_end_time = task_exec_result['min_end_time']
            if min_end_time != None:
                real_produce_time = min_end_time
                print('call api get real_produce_time:' +
                      str(real_produce_time))
        else:
            newCheckPath = handleCmdDate(checkPath)
            print("newCheckPath:" + newCheckPath)
            cmd = """hdfs dfs -ls """+newCheckPath + \
                """ |grep -v 'Found' |sort -r -k6,7 |head -1 |awk '{print ""$7""}'"""
            result = executeCMD(cmd)
            if (len(result) == 0):
                real_produce_time = ""
            else:
                real_produce_time = result[0]

        print(real_produce_time)
        data = {
            'real_produce_time': real_produce_time,
            'config_produce_time': produce_time
        }
        execResult = json.dumps(data)
        print("execResult is :" + execResult)
        # 检查结果写入中间结果表
        buildValues(taskConfigId, taskType, "", execResult)

        # 检查是否异常并通知
        check_notify(data, paramters)
    return


def check_notify(execResult, paramters):
    task_type = paramters[2]
    if task_type == 'sla_check':
        check_table = paramters[3]
        owner_id=paramters[15]
        resolve_group_id=paramters[13]
        real_produce_time = execResult['real_produce_time']
        config_produce_time = execResult['config_produce_time']
        print(" sla check result:" +
              str(real_produce_time == ""
                  or real_produce_time > config_produce_time))
        if real_produce_time == "" or real_produce_time > config_produce_time:
            subject = "SLA检查异常"
            description = "表{check_table}数据没有在配置时间{config_produce_time}内生成".format(
                check_table=check_table,
                config_produce_time=config_produce_time)
            notifyResolveGroup(subject, description, owner_id,
                               resolve_group_id)


def notifyResolveGroup(subject, description, owner_name, resolve_group_id):
    # subject = "数据产出SLA检查"
    # description = "${table_level}层表${table_name}数据没有按时生成"
    # owner_name = ""
    cmd = """
    curl -L -X POST 'http://app-onduty-zp.58corp.com/onduty/app/v1/api/addEvent' \
        -H 'Content-Type: application/json' \
        -d '{{
            "agentId": "cc7cafe1-c906-4f6b-ae14-a4aa503d89ff",
            "agentSecret": "a90e2ea2-8d96-4579-b976-1c0476cc16b9",
            "endpoint": "offline-checker",
            "resolvegroupId": "{resolve_group_id}",
            "ctiId": "20211114204556146u7obtt",
            "eventType":"softwareFault",
            "severity": 2,
            "isNotifyRg":false,
            "subject": "{subject}",
            "description": "{description}",
            "responser":  "{owner_name}"
        }}'
    """.format(subject=subject,
               description=description,
               owner_name=owner_name,
               resolve_group_id=resolve_group_id)
    lines = executeCMD(cmd)
    print("cmd is ：" + cmd)
    print("notifyResolveGroup result :" + str(lines))
    return lines


def handleCmdDate(checkSql):
    it = re.finditer('\$\{#date\(-?\d+,-?\d+,(-?\d+)\):([\w-]+)#\}', checkSql)
    for match in it:
        day = match.group(1)
        strFmt = match.group(2)
        date1 = datetime.datetime.now() + datetime.timedelta(days=int(day))
        finalDate = date1.strftime('%Y%m%d')
        print(finalDate)
        checkSql = checkSql.replace(match.group(), finalDate)
    print(checkSql)
    return checkSql


values = []


def buildValues(taskId, taskType, checkCmd, execResult):
    global values
    checkCmd = checkCmd.replace("'", "\"")
    checkCmd = checkCmd.replace("'", "\\\"")
    execResult = execResult.replace("'", "\"")
    execResult = execResult.replace("\"", "\\\"")
    values.append("(null,'" + taskId + "', '" + taskType + "', '" + checkCmd +
                  "', '" + execResult + "', 0, 0, null, null)")


def insertToMidResult():
    global values
    if (len(values) > 0):
        date1 = datetime.datetime.now()
        finalDate = date1.strftime('%Y%m%d')
        createTime = date1.strftime('%Y-%m-%d %H:%M:%S')

        sql = "INSERT INTO hdp_lbg_supin_zplisting.dwd_zp_rock_dqc_sla_check_mid_result partition(dt = '" + finalDate + "') VALUES " + ",".join(
            values)
        res = executeHiveInsertSql(sql)
        return res


def executeHiveInsertSql(sql):
    sql = sql
    cmd = 'hive -e """set mapreduce.job.queuename=root.offline.hdp_lbg_supin.normal;SET hive.cli.print.header=false;' + sql + '"""'
    print(cmd)
    lines = []
    try:
        p = subprocess.check_output(cmd, shell=True)
        resultStr = p.decode('UTF-8')
        for line in resultStr.splitlines():
            lines.append(line.split('\t'))
    except Exception as e:
        print("message is:%s" % (str(e)))
        traceback.print_exc()
    print("hive result is:" + str(lines))
    return lines


def executeHiveSql(sql):
    sql = sql
    cmd = 'hive -e """set mapreduce.job.queuename=root.offline.hdp_lbg_supin.normal;SET hive.cli.print.header=false;' + \
        sql.replace('"', "\'") + '"""'
    print(cmd)
    lines = []
    try:
        p = subprocess.check_output(cmd, shell=True)
        resultStr = p.decode('UTF-8')
        for line in resultStr.splitlines():
            lines.append(line.split('\t'))
    except Exception as e:
        print("message is:%s" % (str(e)))
        traceback.print_exc()
    print("hive result is:" + str(lines))
    return lines


def executeCMD(cmd):
    print("cmd is :" + cmd)
    lines = []
    try:
        p = subprocess.check_output(cmd, shell=True)
        resultStr = p.decode('UTF-8')
        # out, err = p.communicate()
        # for line in out.splitlines():
        #     lines.append(line)
        lines = resultStr.splitlines()
    except Exception as e:
        print("message is:%s" % (str(e)))
        traceback.print_exc()
    return lines


def call_dp_api(url, postData):
    ts = str(int(time.time() * 1000))
    token = hashlib.md5(('Pg0KCLPZadpSqAI6' + ts).encode('utf-8')).hexdigest()
    headers = {
        'client-user': 'xn_hrg_star',
        'token': token,
        'ts': ts,
        'Content-Type': 'application/json'
    }
    data = json.dumps(postData)

    # 创建一个未经验证的 SSL 上下文
    ctx = ssl.create_default_context()
    ctx.check_hostname = False
    ctx.verify_mode = ssl.CERT_NONE

    request = Request(url, data=data, headers=headers)
    response = urlopen(request, context=ctx)
    response_data = response.read()
    response_json = json.loads(response_data)
    print('call dp api result is:' + str(response_json))

    return response_json


def get_task_exec_result(scheduler_id):
    url = 'https://58dp.58corp.com/openapi/team/job/list-exec-logs'
    today = datetime.date.today()
    formatted_date = today.strftime('%Y-%m-%d')
    start_time = formatted_date + " 00:00:00"
    end_time = formatted_date + " 24:00:00"
    postData = {
        "req": {
            "start_time_args": start_time,
            "end_time_args": end_time,
            "scheduler_id": scheduler_id,
            "job_state": [400],
            "schedule_type":[1,5]
        },
        "page": {
            "current": 1,
            "size": 1000
        }
    }
    response_json = call_dp_api(url, postData)
    return get_min_end_time(response_json)


def get_min_end_time(json_data):
    result = {'is_valid': False, 'min_end_time': None}
    if json_data['code'] == 0:
        result['is_valid'] = True
        records = json_data['data']['records']
        if len(records) > 0:
            min_end_time = min(record['end_time'] for record in records)
            result['min_end_time'] = datetime.datetime.strptime(
                min_end_time, '%Y-%m-%d %H:%M:%S').strftime('%H:%M')
    return result


def fetchAllTask():
    res = executeHiveSql("""SELECT  id
            ,task_name
            ,task_type
            ,check_table
            ,compare_table
            ,is_open
            ,join_field
            ,check_field
            ,check_value
            ,field_check_rule
            ,produce_time
            ,check_sql
            ,check_path
            ,resolve_group_id
            ,priority
            ,owner_id
            ,action
            ,update_time
            ,create_time
            ,dp_scheduler_id
        FROM hdp_lbg_supin_zplisting.app_dqc_check_task_config
        WHERE action in(0, 1)
        AND is_open = 1
        AND task_type= 'sla_check'
        AND produce_time BETWEEN from_unixtime(unix_timestamp()-600, 'HH:mm') AND from_unixtime(unix_timestamp(), 'HH:mm')
        """)
    return res


def checkTasks(results):
    for x in results:
        executeCheckTask(x)
    insertToMidResult()


def main():
    results = fetchAllTask()
    checkTasks(results)


if __name__ == '__main__':
    main()
