import datetime
import decimal
import json
import math
import re

from time import strftime, gmtime

import paramiko
import requests

from django.core.paginator import Paginator
from django.db import connection
from django.db.models import Count, Q
from django.shortcuts import render, HttpResponse
from django.views.decorators.csrf import csrf_exempt
from django.views.decorators.http import require_http_methods

from ReportApp.models import Tier2FuncOs, Tier2FuncInfo, Tier3FuncInfo, \
    ReliaInfo, SteadyInfo, \
    IssuesCategory, IssuesInfo, ReliaModel, HisTier2FuncInfo, \
    HisReliaInfo, HisTier3FuncInfo, HisMasFuncInfo, MasFuncInfo, MasFuncModel


@require_http_methods(["GET"])
def get_sum_data(request):
    """
    报告汇总部分数据来源
    :param request:
    :return:
    """
    feature = request.GET.get('feature')
    tier = request.GET.get('tier')
    if feature == "function":
        feature_name = '功能'
        if tier == '2':
            t_name = "ReportApp_tier2funcmodel"
        elif tier == '3':
            t_name = "ReportApp_tier3funcmodel"
        else:
            t_name = "ReportApp_masfuncmodel"
    else:
        feature_name = '可靠性'
        t_name = "ReportApp_reliamodel"

    with connection.cursor() as cursor:
        row_query = f'select run_os, sum(total_num) as total_num, ' \
            f'sum(pass_num) as pass_num, sum(fail_num) as fail_num, ' \
            f'sum(block_num) as block_num, min(start_time) as start_time, ' \
            f'max(run_time) as run_time, sum(core_num) as core_num ' \
            f'from (select run_os, sum(total_num) as total_num, ' \
            f'sum(pass_num) as pass_num, sum(fail_num) as fail_num, ' \
            f'sum(block_num) as block_num, min(start_time) as start_time, ' \
            f'sum(run_time) as run_time, max(core_num) as core_num ' \
            f'from "{t_name}" ' \
            f'group by run_os, ip_name) group by run_os order by run_os;'
        cursor.execute(row_query)
        data_row = cursor.fetchall()

    response_data = {'total': len(data_row),
                     'rows': []}
    for row in data_row:
        response_data['rows'].append({
            'feature_name': feature_name,
            'run_os': row[0],
            'total_num': int(row[1]),
            'pass_num': int(row[2]),
            'fail_num': int(row[3]),
            'block_num': int(row[4]),
            'success_rate': cal_rate(row[2], row[1]),
            'start_time': datetime.datetime.strftime(row[5],
                                                     '%Y-%m-%d %H:%M:%S'),
            'run_time': str(row[6].days) + '天 ' + strftime("%H:%M:%S", gmtime(
                row[6].seconds)),
            'core_num': int(row[7])})

    return HttpResponse(
        json.dumps(response_data, cls=DecimalEncoder, ensure_ascii=False))


@require_http_methods(["GET"])
def get_sqlperf_data(request):
    """
    报告汇总部分数据来源
    :param request:
    :return:
    """
    response_data = {'sql_num': 0,
                   'rows': [],
                   'date_version': []}
    num_dict = ("one", "two", "three")
    with connection.cursor() as cursor:
        row_query = f'select start_time,commit_id ' \
                    f'from "ReportApp_sqlperfcommit" ' \
                    f"where scene='simple_operator' and db_type='openGauss' " \
                    f'order by start_time desc limit 3;'
        cursor.execute(row_query)
        date_commit_row = cursor.fetchall()
    date_commit = {'date': [],
                   'commit': []}
    for row in date_commit_row:
        date_commit['date'].append(row[0].strftime('%Y-%m-%d'))
        date_commit['commit'].append(row[1])

    sql_row = {}
    if len(date_commit['date']) > 0:
        with connection.cursor() as cursor:
            row_query = f'select sql_statement, exec_wrap ' \
                        f'from "ReportApp_sqlperf" ' \
                        f"where start_time = '{date_commit['date'][0]}' " \
                        f"and scene='simple_operator' and db_type='openGauss';"
            cursor.execute(row_query)
            sql_row = cursor.fetchall()
        response_data['sql_num'] = len(sql_row)

    if len(sql_row) > 0:
        id_num = 1
        for row in sql_row:
            response_data['rows'].append({
                'id_seq': id_num,
                'sql_statement': row[0],
                'exec_wrap': row[1]
            })
            id_num += 1
    else:
        return HttpResponse(
            json.dumps(response_data, cls=DecimalEncoder, ensure_ascii=False))

    num = 0
    for date in date_commit['date']:
        sql_num = 0
        response_data['date_version'].append(date + ' commitid: ' +
                                             date_commit['commit'][num])
        for sql in sql_row:
            sql_stmt = sql[0].replace('\'', '\\\'')
            with connection.cursor() as cursor:
                row_query = f'select sql_explain, exe_time ' \
                    f'from "ReportApp_sqlperf" where ' \
                    f"start_time = '{date}' and " \
                    f"sql_statement = e'{sql_stmt}' and " \
                    f"scene='simple_operator' and db_type='openGauss';"
                cursor.execute(row_query)
                data_row = cursor.fetchall()

            if len(data_row) > 0:
                response_data['rows'][sql_num].update({
                    f'explain_{num_dict[num]}': data_row[0][0]
                })
                response_data['rows'][sql_num].update({
                    f'execute_time_{num_dict[num]}': data_row[0][1]
                })
            else:
                response_data['rows'][sql_num].update({
                    f'explain_{num_dict[num]}': ""
                })
                response_data['rows'][sql_num].update({
                    f'execute_time_{num_dict[num]}': ""
                })
            sql_num += 1
        num += 1

    pg_start_time = ""
    with connection.cursor() as cursor:
        row_query = f'select start_time from "ReportApp_sqlperfcommit" ' \
                    f"where scene='simple_operator' and db_type='pg14';"
        cursor.execute(row_query)
        sql_pg_start = cursor.fetchall()
        if len(sql_pg_start) != 0:
            pg_start_time = sql_pg_start[0][0].strftime('%Y-%m-%d')
            response_data['date_version'].append(pg_start_time)
            sql_num = 0
            for sql in sql_row:
                sql_stmt = sql[0].replace('\'', '\\\'')
                row_query = f'select sql_explain, exe_time ' \
                    f'from "ReportApp_sqlperf" where ' \
                    f"start_time = '{pg_start_time}' and " \
                    f"sql_statement = e'{sql_stmt}' and " \
                    f"scene='simple_operator' and db_type='pg14';"
                cursor.execute(row_query)
                pg_data_row = cursor.fetchall()

                if len(pg_data_row) > 0:
                    response_data['rows'][sql_num].update({
                        f'explain_pg': pg_data_row[0][0]
                    })
                    response_data['rows'][sql_num].update({
                        f'execute_time_pg': pg_data_row[0][1]
                    })
                else:
                    response_data['rows'][sql_num].update({
                        f'explain_pg': ""
                    })
                    response_data['rows'][sql_num].update({
                        f'execute_time_pg': ""
                    })
                sql_num += 1

    return HttpResponse(
        json.dumps(response_data, cls=DecimalEncoder, ensure_ascii=False))


@require_http_methods(["GET"])
def get_sqlperf_data_yukon(request):
    """
    报告汇总部分数据来源
    :param request:
    :return:
    """
    response_data = {'sql_num': 0,
                   'rows': [],
                   'date_version': []}
    num_dict = ("one", "two", "three")
    with connection.cursor() as cursor:
        row_query = f'select start_time,commit_id ' \
                    f'from "ReportApp_sqlperfcommit" ' \
                    f"where scene='yukon' and db_type='openGauss' " \
                    f'order by start_time desc limit 3;'
        cursor.execute(row_query)
        date_commit_row = cursor.fetchall()
    date_commit = {'date': [],
                   'commit': []}
    for row in date_commit_row:
        date_commit['date'].append(row[0].strftime('%Y-%m-%d'))
        date_commit['commit'].append(row[1])

    sql_row = {}
    if len(date_commit['date']) > 0:
        with connection.cursor() as cursor:
            row_query = f'select sql_statement from "ReportApp_sqlperf" ' \
                        f"where start_time = '{date_commit['date'][0]}' " \
                        f"and scene='yukon' and db_type='openGauss';"
            cursor.execute(row_query)
            sql_row = cursor.fetchall()
        response_data['sql_num'] = len(sql_row)

    if len(sql_row) > 0:
        id_num = 1
        for row in sql_row:
            response_data['rows'].append({
                'id_seq': id_num,
                'sql_statement': row[0]
            })
            id_num += 1
    else:
        return HttpResponse(
            json.dumps(response_data, cls=DecimalEncoder, ensure_ascii=False))

    num = 0
    for date in date_commit['date']:
        sql_num = 0
        response_data['date_version'].append(date + ' commitid: ' +
                                             date_commit['commit'][num])
        for sql in sql_row:
            sql_stmt = sql[0].replace('\'', '\\\'')
            with connection.cursor() as cursor:
                row_query = f'select sql_explain, exe_time ' \
                    f'from "ReportApp_sqlperf" where ' \
                    f"start_time = '{date}' and " \
                    f"sql_statement = e'{sql_stmt}' and " \
                    f"scene='yukon' and db_type='openGauss';"
                cursor.execute(row_query)
                data_row = cursor.fetchall()

            if len(data_row) > 0:
                response_data['rows'][sql_num].update({
                    f'explain_{num_dict[num]}': data_row[0][0]
                })
                response_data['rows'][sql_num].update({
                    f'execute_time_{num_dict[num]}': data_row[0][1]
                })
            else:
                response_data['rows'][sql_num].update({
                    f'explain_{num_dict[num]}': ""
                })
                response_data['rows'][sql_num].update({
                    f'execute_time_{num_dict[num]}': ""
                })
            sql_num += 1
        num += 1

    pg_start_time = ""
    with connection.cursor() as cursor:
        row_query = f'select start_time from "ReportApp_sqlperfcommit" ' \
                    f"where scene='yukon' and db_type='pg14';"
        cursor.execute(row_query)
        sql_pg_start = cursor.fetchall()
        if len(sql_pg_start) != 0:
            pg_start_time = sql_pg_start[0][0].strftime('%Y-%m-%d')
            response_data['date_version'].append(pg_start_time)
            sql_num = 0
            for sql in sql_row:
                sql_stmt = sql[0].replace('\'', '\\\'')
                row_query = f'select sql_explain, exe_time ' \
                    f'from "ReportApp_sqlperf" where ' \
                    f"start_time = '{pg_start_time}' and " \
                    f"sql_statement = e'{sql_stmt}' and " \
                    f"scene='yukon' and db_type='pg14';"
                cursor.execute(row_query)
                pg_data_row = cursor.fetchall()

                if len(pg_data_row) > 0:
                    response_data['rows'][sql_num].update({
                        f'explain_pg': pg_data_row[0][0]
                    })
                    response_data['rows'][sql_num].update({
                        f'execute_time_pg': pg_data_row[0][1]
                    })
                else:
                    response_data['rows'][sql_num].update({
                        f'explain_pg': ""
                    })
                    response_data['rows'][sql_num].update({
                        f'execute_time_pg': ""
                    })
                sql_num += 1

    return HttpResponse(
        json.dumps(response_data, cls=DecimalEncoder, ensure_ascii=False))


@require_http_methods(["GET"])
def get_tpch_data(request):
    query_send = request.GET.get('querydop_val', default='1')
    if query_send == "query_dop_1" or query_send == '1':
        query_dop = 1
    else:
        query_dop = 64
    response_data = {'sql_num': 0,
                   'rows': [],
                   'date_version': []}
    num_dict = ("one", "two", "three")
    tpch_time_set = []
    with connection.cursor() as cursor:
        row_query = f'select start_time,commit_id from "ReportApp_tpchperf" ' \
                    f"where db_type='openGauss' and query_dop = {query_dop} " \
                    f'group by start_time,commit_id ' \
                    f'order by start_time desc limit 3;'
        cursor.execute(row_query)
        date_commit_row = cursor.fetchall()
    date_commit = {'date': [],
                   'commit': []}
    for row in date_commit_row:
        date_commit['date'].append(row[0].strftime('%Y-%m-%d'))
        date_commit['commit'].append(row[1])

    tpch_res_row = {}
    if len(date_commit['date']) > 0:
        with connection.cursor() as cursor:
            row_query = f'select tpch_res from "ReportApp_tpchperf" ' \
                        f"where start_time = '{date_commit['date'][0]}' and " \
                        f"db_type='openGauss' and query_dop = {query_dop} " \
                        f"order by sql_id;"
            cursor.execute(row_query)
            tpch_res_row = cursor.fetchall()
        response_data['sql_num'] = len(tpch_res_row)

    if len(tpch_res_row) > 0:
        with connection.cursor() as cursor:
            stmt_query = f'select sql_id, sql_statement, sql_explain from ' \
                         f'"ReportApp_tpchinfo" order by sql_id;'
            cursor.execute(stmt_query)
            sql_row = cursor.fetchall()
        for row in sql_row:
            response_data['rows'].append({
                'id_seq': row[0],
                'sql_statement': row[1],
                'sql_explain': row[2]
            })
    else:
        return HttpResponse(
            json.dumps(response_data, cls=DecimalEncoder, ensure_ascii=False))

    num = 0
    for date in date_commit['date']:
        response_data['date_version'].append(date + ' commitid: ' +
                                             date_commit['commit'][num])
        with connection.cursor() as cursor:
            row_query = f'select sql_id, explain, tpch_res ' \
                f'from "ReportApp_tpchperf" where ' \
                f"start_time = '{date}' and db_type='openGauss' " \
                f"and query_dop = {query_dop} order by sql_id;"
            cursor.execute(row_query)
            data_row = cursor.fetchall()

        for datas in data_row:
            sql_ind = datas[0]
            response_data['rows'][sql_ind - 1].update({
                f'explain_{num_dict[num]}': datas[1]
            })
            response_data['rows'][sql_ind - 1].update({
                f'execute_time_{num_dict[num]}': datas[2]
            })
        num += 1

    pg_start_time = ""
    with connection.cursor() as cursor:
        row_query = f'select start_time from "ReportApp_tpchperf" ' \
                    f"where db_type='pg14' and query_dop = {query_dop} " \
                    f'group by start_time,commit_id;'
        cursor.execute(row_query)
        sql_pg_start = cursor.fetchall()
    if len(sql_pg_start) != 0:
        pg_start_time = sql_pg_start[0][0].strftime('%Y-%m-%d')
        response_data['date_version'].append(pg_start_time)
        with connection.cursor() as cursor:
            row_query = f'select sql_id, explain, tpch_res ' \
                f'from "ReportApp_tpchperf" where ' \
                f"start_time = '{pg_start_time}' and db_type='pg14' " \
                f"and query_dop = {query_dop} order by sql_id;"
            cursor.execute(row_query)
            data_row = cursor.fetchall()

        for datas in data_row:
            sql_ind = datas[0]
            response_data['rows'][sql_ind - 1].update({
                f'explain_pg': datas[1]
            })
            response_data['rows'][sql_ind - 1].update({
                f'execute_time_pg': datas[2]
            })
    return HttpResponse(
        json.dumps(response_data, cls=DecimalEncoder, ensure_ascii=False))


@require_http_methods(["GET"])
def get_migrate_perf_data(request):
    migrate_type = request.GET.get('migrate_type')
    response_data = {'sql_num': 0,
                   'rows': [],
                   'date_version': []}

    num_serial = ("one", "two", "three")
    with connection.cursor() as cursor:
        row_query = f'select start_time,commit_id ' \
                    f'from "ReportApp_migrateperf" ' \
                    f"where db_type='openGauss' " \
                    f"and migrate_type = '{migrate_type}'" \
                    f'group by start_time,commit_id ' \
                    f'order by start_time desc limit 3;'
        cursor.execute(row_query)
        date_commit_row = cursor.fetchall()
    date_commit = {'date': [],
                   'commit': []}
    for row in date_commit_row:
        date_commit['date'].append(row[0].strftime('%Y-%m-%d'))
        date_commit['commit'].append(row[1])

    num = 0
    for date in date_commit['date']:
        response_data['date_version'].append(date + ' commitid: ' +
                                             date_commit['commit'][num])
        with connection.cursor() as cursor:
            row_query = f"select migrate_scene, speed from " \
                        f'"ReportApp_migrateperf" where ' \
                        f"db_type='openGauss' and start_time = '{date}' " \
                        f"and migrate_type = '{migrate_type}' " \
                        f"order by migrate_scene;"
            cursor.execute(row_query)
            migrate_res_row = cursor.fetchall()

        mig_ind = 0
        for migrate_row in migrate_res_row:
            if num == 0:
                response_data['rows'].append({
                    'scene_type': migrate_row[0],
                    f'speed_{num_serial[num]}': migrate_row[1]
                })
            else:
                response_data['rows'][mig_ind].update({
                    f'speed_{num_serial[num]}': migrate_row[1]
                })
            mig_ind += 1
        num += 1
    return HttpResponse(
        json.dumps(response_data, cls=DecimalEncoder, ensure_ascii=False))


@require_http_methods(["GET"])
def get_info_data(request):
    """
    详细数据部分数据来源
    :param request:
    :return:
    """
    limit = 20
    offset = 0
    sort_column = ''
    order = ''
    conditions = {}
    t_name = ''
    feature_name = '功能'
    params = ('limit', 'offset', 'sort', 'order', 'feature', 'run_os',
              'model_name', 'tc_name', 'run_result', 'failed_reason',
              'solution', 'problem_type', 'issue_no', 'develop_owner',
              'tester', 'status', 'date_type', 'create_time', 'end_time')
    for par in params:
        val = request.GET.get(par)
        if par == 'limit':
            limit = val
        elif par == 'offset':
            offset = val
        elif par == 'sort':
            sort_column = val
        elif par == 'order':
            order = val
        elif par == 'feature':
            if val and val == 'function':
                feature_name = '功能'
            else:
                feature_name = '可靠性'
        elif par == 'date_type':
            if val and val == 'his':
                if feature_name == '功能':
                    if request.GET.get('tier') == "2":
                        t_name = HisTier2FuncInfo.objects
                    elif request.GET.get('tier') == "3":
                        t_name = HisTier3FuncInfo.objects
                    else:
                        t_name = HisMasFuncInfo.objects
                else:
                    t_name = HisReliaInfo.objects
            else:
                if feature_name == '功能':
                    if request.GET.get('tier') == "2":
                        t_name = Tier2FuncInfo.objects
                    elif request.GET.get('tier') == "3":
                        t_name = Tier3FuncInfo.objects
                    else:
                        t_name = MasFuncInfo.objects
                else:
                    t_name = ReliaInfo.objects
        elif par == 'tester':
            if val and val != 'all':
                conditions['test_owner__in'] = val.split(',')
        elif par in ['model_name', 'tc_name']:
            if val and val != 'all':
                conditions[f'{par}__icontains'] = val
        elif par == 'create_time' and val:
            f_time = datetime.datetime.strptime(val, "%Y-%m-%d %H:%M:%S")
            conditions[f'start_time__gte'] = f_time
        elif par == 'end_time' and val:
            f_time = datetime.datetime.strptime(val, "%Y-%m-%d %H:%M:%S")
            conditions[f'start_time__lte'] = f_time
        elif par == 'failed_reason' and val:
            if val == '空':
                conditions[f'{par}__isnull'] = True
            else:
                conditions[f'{par}__icontains'] = val
        else:
            if val and val != 'all':
                conditions[f'{par}__icontains'] = val
    all_records = t_name.filter(**conditions).order_by("run_os",
                                                       "model_name",
                                                       "tc_name")

    if sort_column:
        if order == 'desc':
            sort_column = '-%s' % sort_column
        all_records = all_records.order_by(sort_column)

    all_records_count = all_records.count()

    if not offset:
        offset = 0
    if not limit:
        limit = 20
    pageinator = Paginator(all_records, limit)

    page = int(int(offset) / int(limit) + 1)
    response_data = {'total': all_records_count,
                     'rows': []}

    for info in pageinator.page(page):
        if info.run_result == 'ok':
            run_result = '成功'
        elif info.run_result == 'er':
            run_result = '失败'
        else:
            run_result = '阻塞'

        response_data['rows'].append({
            "id": info.id,
            "feature_name": feature_name,
            "run_os": info.run_os if info.run_os else "",
            "model_name": info.model_name if info.model_name else "",
            "tc_name": info.tc_name if info.tc_name else "",
            "run_result": run_result,
            "start_time": info.start_time.strftime(
                "%Y-%m-%d %H:%M:%S") if info.start_time else "",
            "run_time": info.run_time if info.run_time else "",
            "failed_reason": info.failed_reason if info.failed_reason else "",
            "solution": info.solution if info.solution else "",
            "problem_type": info.problem_type if info.problem_type else "",
            "issue_no": info.issue_no if info.issue_no else "",
            "develop_owner": info.develop_owner if info.develop_owner else "",
            "test_owner": info.test_owner,
            "status": info.status if info.status else "",
            "jenkins_link": info.jenkins_link if info.jenkins_link else "",
            "log_link": info.log_link if info.log_link else "",
        })

    return HttpResponse(json.dumps(response_data))


def tier2(request):
    """
    报告入口
    :param request:
    :return:
    """
    return render(request, 'tier2.html')


def tier3(request):
    """
    报告入口
    :param request:
    :return:
    """
    return render(request, 'tier3.html')


def master(request):
    """
    报告入口
    :param request:
    :return:
    """
    return render(request, 'master.html')


def reliability(request):
    """
    报告入口
    :param request:
    :return:
    """
    return render(request, 'reliability.html')


def steady(request):
    """
    报告入口
    :param request:
    :return:
    """
    t_name = SteadyInfo.objects
    data_list = [i for i in t_name.all().order_by('id')]
    for data in data_list:
        if data.run_os == 'x86_openEuler':
            data.status = 'error'

    sqlsmith_url = "http://xx.xx.xx.xx:8080/job/sqlsmith_execute/" \
                   "label=sqlsmith_ubuntu_x.xx.xx.xx/lastBuild/"
    sqlsmith_start_url = sqlsmith_url + "buildTimestamp"
    sqlsmith_start = requests.get(sqlsmith_start_url)
    sqlsmith_status_url = sqlsmith_url + "api/xml"
    res = requests.get(sqlsmith_status_url)
    start_index = res.text.index('<building>') + len('<building>')
    end_index = res.text.index('</building>')
    sqlsmith_status = res.text[start_index: end_index]
    sqlsmith_list = {'model': 'SQLsmith',
                     'start_time': sqlsmith_start.text,
                     'status': sqlsmith_status,
                     'url': sqlsmith_url}

    data_dict = {'data_list': data_list, 'sqlsmith_list': sqlsmith_list}
    return render(request, 'steady.html', {"data_dict": data_dict})


def lcov(request):
    url = "http://xx.xx.xx.xx:8080/jenkins/job/lcov/ws/results/index.html"
    res = requests.get(url)
    start_index = res.text.index('<td class="headerItem">Lines:</td>')
    end_index = res.text.index('</tr>', start_index)
    lines_rows = res.text[start_index:end_index + len('</tr>')]
    lines_hit = int(
        re.split("[<>]", lines_rows.splitlines()[1].strip())[2].strip())
    lines_total = int(
        re.split("[<>]", lines_rows.splitlines()[2].strip())[2].strip())
    lines_coverage = float(
        re.split("[<>%]", lines_rows.splitlines()[3].strip())[2].strip())

    start_index = res.text.index('<td class="headerItem">Functions:</td>')
    end_index = res.text.index('</tr>', start_index)
    functions_rows = res.text[start_index:end_index + len('</tr>')]
    functions_hit = int(
        re.split("[<>]", functions_rows.splitlines()[1].strip())[2].strip())
    functions_total = int(
        re.split("[<>]", functions_rows.splitlines()[2].strip())[2].strip())
    functions_coverage = float(
        re.split("[<>%]", functions_rows.splitlines()[3].strip())[2].strip())

    start_index = res.text.index('<td class="headerItem">Date:</td>')
    end_index = res.text.index('</tr>', start_index)
    data_rows = res.text[start_index:end_index + len('</tr>')]
    date_time = re.split("[<>]", data_rows.splitlines()[1].strip())[2].strip()

    data_dict = {'lines': {'hit': lines_hit, 'total': lines_total,
                           'coverage': lines_coverage},
                 'functions': {'hit': functions_hit, 'total': functions_total,
                               'coverage': functions_coverage},
                 'date_time': date_time,
                 'url': url}
    return render(request, 'lcov.html', {"data_dict": data_dict})


def performance(request):
    """
        报告入口
        :param request:
        :return:
        """
    return render(request, 'performance.html')


def perf_get_data(request):
    y_data = list()
    t_list = list()
    dot_data = list()
    feature_tuple = (
        'single_2p_no_part_1H', '1p1s_2p_part_1H', '1p1s_2p_no_part_1H',
        'single_5220_no_part_1H', 'single_4p_part_1H', 'single_2p_part_1H')
    today = datetime.datetime.today()
    for i in range(7):
        t_tmp = today - datetime.timedelta(i)
        t_list.append(datetime.datetime.strftime(t_tmp, '%Y-%m-%d'))
    t_list.reverse()
    for f in feature_tuple:
        conditions = ""
        conditions += f"cpu_feature = '{f.split('_')[1]}' "
        conditions += f"and cluster = '{f.split('_')[0]}' "
        if f.split('_')[2] == 'no':
            conditions += "and t_feature = 'no_part' "
        else:
            conditions += "and t_feature = 'part' "
        tmp_y_data = []
        tmp_dot_data = []
        for t in t_list:
            raw_query = f'set time zone \'PRC\'; ' \
                f'select substring(start_time, 0, 11) as ' \
                f'new_time, tpmc, dot_res from "ReportApp_perfinfo" ' \
                f'where {conditions} and new_time=\'{t}\' ' \
                f'order by start_time desc limit 1;'
            with connection.cursor() as cursor:
                cursor.execute(raw_query)
                rows = cursor.fetchall()
            if len(rows) > 0:
                tmp_y_data.append(rows[0][1])
                if rows[0][2]:
                    cur_dot_data = rows[0][2].strip().split(",")
                    dot_data_float = []
                    for each_dot in cur_dot_data:
                        dot_data_float.append(float(each_dot)*0.45)
                    tmp_dot_data.append(dot_data_float)
                else:
                    tmp_dot_data.append(list())
            else:
                tmp_y_data.append(0)
                tmp_dot_data.append(list())
        y_data.append(tmp_y_data)
        dot_data.append(tmp_dot_data)

    data_dict = {'x_data': t_list, 'y_data': y_data, 'dot_data': dot_data}

    return HttpResponse(
        json.dumps(data_dict, cls=DecimalEncoder, ensure_ascii=False))


def perf_resource(request):
    start_time = datetime.datetime.now().strftime('%Y-%m-%d')

    cpu_data = list()
    cpu_data_us = list()
    cpu_data_sy = list()
    raw_query = f'select us,sy ' \
                f'from "ReportApp_perfcpu" ' \
                f'where start_time like \'%{start_time}%\' ' \
                f'order by id desc limit 64;'
    with connection.cursor() as cursor:
        cursor.execute(raw_query)
        rows = cursor.fetchall()
        for row in rows:
            cpu_data_us.append(row[0])
            cpu_data_sy.append(row[1])
    cpu_data.append(cpu_data_us)
    cpu_data.append(cpu_data_sy)
    return HttpResponse(
        json.dumps(cpu_data, cls=DecimalEncoder, ensure_ascii=False))


@require_http_methods(["POST"])
@csrf_exempt
def update_info(request):
    """
    编辑表数据
    :param request:
    :return:
    """
    t_id = request.POST.get('id', '')
    ids = request.POST.get('ids', '')
    solution = request.POST.get('solution')
    failed_reason = request.POST.get('failed_reason')
    status = request.POST.get('status')
    develop_owner = request.POST.get('develop_owner')
    test_owner = request.POST.get('test_owner')
    problem_type = request.POST.get('problem_type')
    issue_no = request.POST.get('issue_no')
    feature = request.POST.get('feature')
    tier = request.POST.get('tier')
    if feature == "function":
        if tier == '2':
            t_name = Tier2FuncInfo.objects
        elif tier == '3':
            t_name = Tier3FuncInfo.objects
        else:
            t_name = MasFuncInfo.objects
    else:
        t_name = ReliaInfo.objects

    if not problem_type:
        problem_type = '用例问题'

    if not status:
        status = '未解决'

    if t_id:
        t_name.filter(id=t_id).update(
            solution=solution,
            failed_reason=failed_reason,
            status=status,
            develop_owner=develop_owner,
            test_owner=test_owner,
            problem_type=problem_type,
            issue_no=issue_no)
    if ids:
        id_list = ids.split(',')
        for i in id_list:
            t_name.filter(id=i).update(
                solution=solution,
                failed_reason=failed_reason,
                status=status,
                develop_owner=develop_owner,
                test_owner=test_owner,
                problem_type=problem_type,
                issue_no=issue_no)
    return HttpResponse(json.dumps({
        "status": 0,
        "result": "success"
    }))


def test(request):
    """
    报告入口
    :param request:
    :return:
    """
    return render(request, 'test.html')


def analyse(request):
    """
    报告入口
    :param request:
    :return:
    """
    return render(request, 'analyse.html')


@require_http_methods(["GET"])
def get_bug_data(request):
    """
    分析页面bug数据来源
    :param request:
    :return:
    """
    response_data = {'total': 0,
                     'rows': []}
    for t in ('ReportApp_tier2funcinfo', 'ReportApp_tier3funcinfo',
              'ReportApp_masfuncinfo', 'ReportApp_reliainfo'):
        raw_query = f"select a.run_os, a.model_name, count(a.*) as er_num, " \
            f"a.ip_name , a.issue_no, coalesce(b.title, a.failed_reason), " \
            f"b.priority, b.creater, b.assignee, b.state, b.create_date " \
            f"from \"{t}\" a " \
            f"left join \"ReportApp_issuesinfo\" b " \
            f"on a.issue_no=b.issue_id " \
            f"where a.run_result<>'ok' and a.problem_type='bug' " \
            f"group by a.run_os, a.model_name, a.ip_name, a.issue_no, " \
            f"b.title, b.priority, b.creater, b.assignee, b.state, " \
            f"b.create_date, a.failed_reason " \
            f"order by er_num, a.issue_no, a.model_name;"
        with connection.cursor() as cursor:
            cursor.execute(raw_query)
            rows = cursor.fetchall()

        for row in rows:
            if 'tier2func' in t:
                feature_name = '功能-2.0.0'
            elif 'tier3func' in t:
                feature_name = '功能-3.0.0'
            elif 'masfunc' in t:
                feature_name = '功能-master'
            else:
                feature_name = '可靠性'
            if row[10]:
                create_date = datetime.datetime.strftime(row[10],
                                                         '%Y-%m-%d %H:%M:%S')
            else:
                create_date = ''
            response_data['rows'].append({
                'feature_name': feature_name,
                'run_os': row[0] if row[0] else '',
                'schd_name': row[1] if row[1] else '',
                'er_num': row[2] if row[2] else '',
                'ip_name': row[3] if row[3] else '',
                'issue_no': row[4] if row[4] else '',
                'title': row[5] if row[5] else '',
                'priority': row[6] if row[6] else '',
                'creater': row[7] if row[7] else '',
                'assignee': row[8] if row[8] else '',
                'status': row[9] if row[9] else '',
                'create_date': create_date})
    response_data['total'] = len(response_data['rows'])

    return HttpResponse(json.dumps(response_data))  # 需要json处理下数据格式


@require_http_methods(["GET"])
def get_core_data(request):
    core_info_data = [i for i in
                      MasFuncModel.objects.filter(~Q(core_num=0))]
    core_info_data.extend(
        [i for i in ReliaModel.objects.filter(~Q(core_num=0))])
    response_data = {'total': len(core_info_data),
                     'rows': []}
    for data in core_info_data:
        schd_prifix_tuple = ('FU', 'DF')
        if data.model_name.split('_')[0] in schd_prifix_tuple:
            feature_name = '功能-master'
        else:
            feature_name = '可靠性'
        response_data['rows'].append({
            'feature_name': feature_name,
            'run_os': data.run_os,
            'model_name': data.model_name,
            'start_time': datetime.datetime.strftime(data.start_time,
                                                     '%Y-%m-%d %H:%M:%S'),
            'run_time': datetime.time.strftime(data.run_time, '%H:%M:%S'),
            'ip_name': data.ip_name,
            'core_num': data.core_num})

    return HttpResponse(json.dumps(response_data))  # 需要json处理下数据格式


@require_http_methods(["GET"])
def get_re_er_data(request):
    response_data = {'total': 0,
                     'rows': []}
    for t in [["ReportApp_masfuncinfo", "ReportApp_hismasfuncinfo"],
              ["ReportApp_reliainfo", "ReportApp_hisreliainfo"]]:
        raw_query = f"select count(b.*) as num, a.model_name, a.tc_name, " \
            f"a.test_owner from (select distinct(tc_name), model_name, " \
            f"test_owner, run_result from \"{t[0]}\" " \
            f"where run_result<>'ok') a, \"{t[1]}\" b " \
            f"where a.run_result=b.run_result and a.tc_name=b.tc_name " \
            f"group by a.model_name, a.tc_name, a.test_owner " \
            f"order by num desc, a.model_name, a.tc_name;"
        with connection.cursor() as cursor:
            cursor.execute(raw_query)
            rows = cursor.fetchall()
        for row in rows:
            if 'Reliability' in row[2]:
                feature_name = '可靠性'
            else:
                feature_name = '功能-master'
            response_data['rows'].append({
                'feature_name': feature_name,
                'num': row[0],
                'model_name': row[1],
                'tc_name': row[2],
                'test_owner': row[3]})
    response_data['total'] = len(response_data['rows'])
    return HttpResponse(json.dumps(response_data))  # 需要json处理下数据格式


def cal_rate(dividends, divisor):
    orgin_rate = dividends / divisor * 100
    if math.floor(orgin_rate) < 100 and math.ceil(orgin_rate) == 100:
        spot_index = str(orgin_rate).index('.')
        rate = float(str(orgin_rate)[:spot_index + 3:])
    elif math.floor(orgin_rate) == 100 and math.ceil(orgin_rate) == 100:
        rate = round(orgin_rate)
    else:
        rate = round(orgin_rate, 2)
    return rate


def get_images_data(request):
    conditions = dict()
    t_list = list()
    pie_data_list = list()
    pie_data_list_2 = list()
    total_num_list = list()
    today = datetime.datetime.today()
    for i in range(7):
        t_tmp = today - datetime.timedelta(i)
        t_list.append(datetime.datetime.strftime(t_tmp, '%Y-%m-%d'))
    t_list.reverse()
    conditions['run_result'] = 'er'

    for tb in (Tier2FuncInfo.objects, MasFuncInfo.objects):
        pie_data_list.append([{'value': i['num'],
                               'name': i['problem_type'] if i[
                                   'problem_type'] else '其他'}
                              for i in
                              tb.filter(**conditions).values(
                                  'problem_type').annotate(
                                  num=Count('run_result')).values(
                                  'num', 'problem_type')])

        info_data = [i for i in
                     tb.filter(**conditions).values(
                         'status').annotate(
                         num=Count('run_result')).values(
                         'num', 'status')]
        pie_data_list_2.append(
            [{'value': i['num'], 'name': i['status'] if i['status'] else '其他'}
             for i in info_data])

        if not info_data:
            total_num_list.append(Tier2FuncOs.objects.first().total_num)
        else:
            total_num_list.append(0)

    line_x_data = [i[5:] for i in t_list]
    line_y_data = list()
    line_x_data_master = [i[5:] for i in t_list]
    line_y_data_master = list()
    for run_os in ('x86_CentOS', 'x86_openEuler'):
        tmp_list = list()
        tmp_master_list = list()
        for t in t_list:
            if t_list.index(t) == 6:
                t_name = "ReportApp_tier2funcmodel"
                t_name_master = "ReportApp_masfuncmodel"
            else:
                t_name = "ReportApp_histier2funcmodel"
                t_name_master = "ReportApp_hismasfuncmodel"
            for tb in (t_name, t_name_master):
                with connection.cursor() as cursor:
                    row_query = f'''select sum(total_num) as total_num, 
                        sum(pass_num) as pass_num 
                        from "{tb}" 
                        where run_os='{run_os}' and start_time like '{t}%' 
                        group by run_os;'''
                    cursor.execute(row_query)
                    row = cursor.fetchall()
                if row:
                    if tb == t_name:
                        tmp_list.append(cal_rate(row[0][1], row[0][0]))
                    else:
                        tmp_master_list.append(cal_rate(row[0][1], row[0][0]))
                else:
                    if tb == t_name:
                        tmp_list.append(0)
                    else:
                        tmp_master_list.append(0)
        line_y_data.append(tmp_list)
        line_y_data_master.append(tmp_master_list)

    bar_dimensions = ['日期', 'bug', '用例问题', '其他用例影响', '研发代码变更', '环境问题', '其他']
    bar_source = list()
    bar_source_master = list()
    for t in t_list:
        media_dict = dict.fromkeys(bar_dimensions, 0)
        media_dict_master = dict.fromkeys(bar_dimensions, 0)
        for tb in ("ReportApp_histier2funcinfo", "ReportApp_hismasfuncinfo"):
            with connection.cursor() as cursor:
                row_query = f'''select count(id) as num, 
                    coalesce(problem_type, '其他') 
                    from "{tb}"
                    where run_result='er' and start_time like '%{t}%' 
                    group by problem_type order by problem_type;'''
                cursor.execute(row_query)
                date_row = cursor.fetchall()
            if not date_row:
                if tb == "ReportApp_histier2funcinfo":
                    media_dict.update({'日期': t[5:]})
                    media_dict.update({'error': 0})
                else:
                    media_dict_master.update({'日期': t[5:]})
                    media_dict_master.update({'error': 0})
            else:
                if tb == "ReportApp_histier2funcinfo":
                    for item in date_row:
                        media_dict.update({'日期': t[5:]})
                        media_dict.update({item[-1]: item[0]})
                else:
                    for item in date_row:
                        media_dict_master.update({'日期': t[5:]})
                        media_dict_master.update({item[-1]: item[0]})
        bar_source.append(media_dict)
        bar_source_master.append(media_dict_master)

    response_data = {'pie_data_1': pie_data_list[0],
                     'pie_data_2': pie_data_list_2[0],
                     'pie_data_1_master': pie_data_list[1],
                     'pie_data_2_master': pie_data_list_2[1],
                     'tier2_total_num': total_num_list[0],
                     'master_total_num': total_num_list[1],
                     'line_x_data': line_x_data,
                     'line_y_data': line_y_data,
                     'line_x_data_master': line_x_data_master,
                     'line_y_data_master': line_y_data_master,
                     'bar_dimensions': bar_dimensions,
                     'bar_source': bar_source,
                     'bar_dimensions_master': bar_dimensions,
                     'bar_source_master': bar_source_master}

    return HttpResponse(json.dumps(response_data))  # 需要json处理下数据格式


def issues(request):
    """
    报告入口
    :param request:
    :return:
    """
    return render(request, 'issues.html')


@require_http_methods(["GET"])
def get_bugs_data(request):
    # issue pie的数据
    task_count, requirement_count, bug_count, consulting_count, \
    feature_count, openproject_count = \
        IssuesCategory.objects.all().values_list('issue_count')

    # bug pie的数据
    backlog_count = IssuesInfo.objects.filter(state='待办的').count()
    confirmed_count = IssuesInfo.objects.filter(state='已确认').count()
    suspend_count = IssuesInfo.objects.filter(state='挂起').count()
    fixing_count = IssuesInfo.objects.filter(state='修复中').count()
    done_count = IssuesInfo.objects.filter(state='已完成').count()
    accepted_count = IssuesInfo.objects.filter(state='已验收').count()
    canceled_count = IssuesInfo.objects.filter(state='已取消').count()

    # bug priority pie的数据
    conditions = {
        'state__in': ['待办的', '已确认', '挂起', '修复中']
    }
    unset_bug_priority_count = IssuesInfo.objects.filter(**conditions). \
        values('priority').annotate(count=Count('issue_id')). \
        values('priority', 'count')
    unset_bug_dict = dict()
    for el in unset_bug_priority_count:
        unset_bug_dict.update({el['priority']: el['count']})
    serious_count = unset_bug_dict["严重"] \
        if unset_bug_dict.__contains__("严重") else 0
    main_count = unset_bug_dict["主要"] \
        if unset_bug_dict.__contains__("主要") else 0
    secondary_count = unset_bug_dict["次要"] \
        if unset_bug_dict.__contains__("次要") else 0
    unimportant_count = unset_bug_dict["不重要"] \
        if unset_bug_dict.__contains__("不重要") else 0
    not_specified_count = unset_bug_dict["无优先级"] \
        if unset_bug_dict.__contains__("无优先级") else 0

    di = serious_count * 10 + main_count * 3 + secondary_count * 1 \
         + unimportant_count * 0.1

    response_data = {
        'issue_pie': [{'name': '任务', 'value': task_count},
                      {'name': '需求', 'value': requirement_count},
                      {'name': '缺陷', 'value': bug_count},
                      {'name': '咨询', 'value': consulting_count},
                      {'name': '特性', 'value': feature_count},
                      {'name': '开源项目', 'value': openproject_count}],
        'bug_pie': [{'name': '待办的', 'value': backlog_count},
                    {'name': '已确认', 'value': confirmed_count},
                    {'name': '挂起', 'value': suspend_count},
                    {'name': '修复中', 'value': fixing_count},
                    {'name': '已完成', 'value': done_count},
                    {'name': '已验收', 'value': accepted_count},
                    {'name': '已取消', 'value': canceled_count}],
        'bug_priority_pie': [{'name': '严重', 'value': serious_count},
                             {'name': '主要', 'value': main_count},
                             {'name': '次要', 'value': secondary_count},
                             {'name': '不重要', 'value': unimportant_count},
                             {'name': '无优先级', 'value': not_specified_count}],
        'di': di,
        'last_update_time':
            datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S")
    }

    return HttpResponse(json.dumps(response_data))


@require_http_methods(["GET"])
def get_owner_data(request):
    '''
    责任人详情表接口
    :param request:
    :return: 返回华为侧测试人员的提单数，未回归数和不规范数
    '''
    limit = request.GET.get('limit')
    offset = request.GET.get('offset')

    owner_list = ['裴琳倩', '郝梦', '张欢', '宁雅黎', '侯少龙', '屈敏',
                  '郭维彪', '仇颖', '李岩松', '张翱', '姚康', '白小丽',
                  '成嘉琦', '王珂', '李卓', '雷帅', '邹佳良', '夏琦',
                  '李鑫', '雷彦斌', '李鸿基', '宋晶', '程瑶', '张航',
                  '李春龙', '杨若兰', '杨毅翔']
    tester_datas = [
        {'owner': name, 'bugs_count': 0, 'no_verified_count': 0,
         'non_standard_count': 0} for name in owner_list
    ]

    conditions1 = {
        'creater__in': owner_list
    }
    # 各责任人名下的总bug数
    bugs_counts = IssuesInfo.objects.filter(**conditions1). \
        values('creater').annotate(count=Count('issue_id')). \
        values('creater', 'count')
    for el in bugs_counts:
        tester_datas[owner_list.index(el['creater'])]['bugs_count'] \
            = el['count']
    # 各责任人名下的未回归的bug树，包含待办的、挂起、修复中、已完成
    conditions2 = {
        'creater__in': conditions1['creater__in'],
        'state__in': ['待办的', '已确认', '挂起', '修复中', '已完成']
    }
    no_verified_counts = IssuesInfo.objects.filter(**conditions2). \
        values('creater').annotate(count=Count('issue_id')). \
        values('creater', 'count')
    for el in no_verified_counts:
        tester_datas[owner_list.index(el['creater'])]['no_verified_count'] \
            = el['count']
    # 各责任人名下提单不标准的数目
    # -- 未标明优先级
    # -- 未关联仓库
    for owner in owner_list:
        query_set = IssuesInfo.objects.all().filter(creater=owner)
        non_standard_count = 0
        for query in query_set:
            if query.priority == '无优先级':
                non_standard_count += 1
            if query.category == '':
                non_standard_count += 1
        tester_datas[owner_list.index(owner)]['non_standard_count'] \
            = non_standard_count

    # 根据bugs_count降序排列
    tester_datas = sorted(tester_datas, key=lambda el: el['bugs_count'],
                          reverse=True)
    if not offset:
        offset = 0
    if not limit:
        limit = 5
    pageinator = Paginator(tester_datas, limit)

    page = int(int(offset) / int(limit) + 1)
    response_data = {'total': len(owner_list),
                     'rows': []}

    for info in pageinator.page(page):
        response_data['rows'].append({
            "owner": info['owner'],
            "bugs_count": info['bugs_count'],
            "no_verified_count": info['no_verified_count'],
            "non_standard_count": info['non_standard_count']
        })

    return HttpResponse(json.dumps(response_data))


@require_http_methods(["GET"])
def get_issues_detail_data(request):
    limit = request.GET.get('limit')
    offset = request.GET.get('offset')
    sort_column = request.GET.get('sort')
    order = request.GET.get('order')
    create_user = request.GET.get('create_user')
    owner = request.GET.get('owner')
    issue_id = request.GET.get('issue_id')
    select_bug_status = request.GET.get('select_bug_status')
    select_bug_priority = request.GET.get('select_bug_priority')
    # 创建时间区间默认["2020-06-01 00:00:00", now]
    create_time_str = request.GET.get('create_time')
    if create_time_str:
        create_time = datetime.datetime.strptime(create_time_str,
                                                 "%Y-%m-%d %H:%M:%S")
    else:
        create_time = datetime.datetime.strptime("2020-05-01 00:00:00",
                                                 "%Y-%m-%d %H:%M:%S")
    end_time_str = request.GET.get('end_time')
    if end_time_str:
        end_time = datetime.datetime.strptime(end_time_str,
                                              "%Y-%m-%d %H:%M:%S")
    else:
        end_time = datetime.datetime.now()
    conditions = {}
    if len(create_user) > 0:
        conditions['creater__in'] = create_user.split(',')
    if len(owner) > 0:
        conditions['assignee__contains'] = owner
    if len(select_bug_status) > 0:
        conditions['state__in'] = select_bug_status.split(',')
    if len(select_bug_priority) > 0:
        conditions['priority__in'] = select_bug_priority.split(',')
    if create_time:
        conditions['create_date__gt'] = create_time
    if end_time:
        conditions['create_date__lt'] = end_time
    if len(issue_id):
        conditions['issue_id__in'] = issue_id.split(',')

    all_records = IssuesInfo.objects.filter(**conditions).order_by(
        "-create_date")
    if sort_column:
        if order == 'desc':
            sort_column = '-%s' % sort_column
        all_records = all_records.order_by(sort_column)

    all_records_count = all_records.count()

    if not offset:
        offset = 0
    if not limit:
        limit = 20
    pageinator = Paginator(all_records, limit)

    page = int(int(offset) / int(limit) + 1)
    response_data = {'total': all_records_count,
                     'rows': []}

    for info in pageinator.page(page):
        response_data['rows'].append({
            "issue_no": info.issue_id,
            "project": info.category,
            "title": info.title,
            "priority": info.priority,
            "di": info.di,
            "creator": info.creater,
            "assignee": info.assignee,
            "link": info.link,
            "state": info.state,
            "create_time": info.create_date.strftime(
                "%Y-%m-%d %H:%M:%S") if info.create_date else "",
        })

    return HttpResponse(json.dumps(response_data))


class DecimalEncoder(json.JSONEncoder):
    def default(self, o):
        if isinstance(o, decimal.Decimal):
            return float(o)
        super(DecimalEncoder, self).default(o)
