# coding:utf8
from flask import Blueprint, render_template, jsonify, request
from utilities.mysql_helper import getresult, executesql
from config import mysql_database
import requests, json
import copy, datetime
from utilities.hbase_helper import get_graph_list

report = Blueprint('report', __name__)

cubes = []
models = []
projects = []

type_templates = {
    'pie': {
        'title': [],
        'tooltip': {
            'trigger': 'item',
            'formatter': "{a}<br/>{b} : {c} ({d}%)"
        },
        'series': []
    },
    'bar': {
        'dataZoom': [
            {
                'show': True,
                'start': 0,
                'end': 100
            },
            {
                'type': 'slider',
                'yAxisIndex': 0,
                'filterMode': 'empty'
            },
        ],
        'tooltip': {
            'confine': True,
            'trigger': 'item',
            'enterable': True,
        },
        'grid': {
            'top': '2%',
            'left': '3%',
            'right': '4%',
            'bottom': '7%',
            'containLabel': True
        },
        'xAxis': [],
        'yAxis': [{
            'type': 'value'
        }],
        'series': []
    },
    'line': {
        'dataZoom': [
            {
                'show': True,
                'start': 0,
                'end': 100
            },
            {
                'type': 'slider',
                'yAxisIndex': 0,
                'filterMode': 'empty'
            },
        ],
        'tooltip': {
            'confine': True,
            'trigger': 'xAxis',
            'enterable': True,
        },
        'grid': {
            'top': '2%',
            'left': '3%',
            'right': '4%',
            'bottom': '7%',
            'containLabel': True
        },
        'xAxis': [],
        'yAxis': [{
            'type': 'value'
        }],
        'series': []
    },
}


@report.route('/data_report_hive')
def data_query_hive():
    return render_template('data_report/data_report_hive.html')


@report.route('/test')
def test():
    return render_template('data_report/test.html')


@report.route('/get_report_dbs')
def get_report_dbs():
    result = []
    sql = 'select distinct database_name from ods_update_count'
    dataset = getresult(sql, (), mysql_database['portal'])
    for row in dataset:
        result.append(row)
    return jsonify(data=result)


@report.route('/get_report_tables')
def get_report_tables():
    database = request.args['database']
    result = []
    sql = 'select distinct table_name,database_name from ods_update_count where database_name="' + database + '"'
    dataset = getresult(sql, (), mysql_database['portal'])
    for row in dataset:
        result.append(row)
    return jsonify(data=result)


@report.route('/get_report_pie_data')
def get_report_pie_data():
    database_type = request.args['database_type']
    date_type = request.args['date_type']
    date_txt = request.args['date_txt']
    sql_days = 'select max(ods_update_count.date) from ods_update_count join dim_date on ods_update_count.date=dim_date.date where '
    sql = ''
    title = ''
    if date_type == 'year':
        sql_days = sql_days + 'year="' + date_txt[:-1] + '"'
    elif date_type == 'month':
        sql_days = sql_days + 'year="' + date_txt[:date_txt.index(
            u'年')] + '" and month="' + str(
                int(date_txt[date_txt.index(u'年') + 1:-1])) + '"'
    elif date_type == 'day':
        sql_days = '"' + date_txt + '"'
    elif date_type == 'season':
        sql_days = sql_days + 'year="' + date_txt[:date_txt.index(
            u'年')] + '" and quarter="' + date_txt[date_txt.index(u'年') + 1:
                                                  date_txt.index(u'季度')] + '"'
    elif date_type == 'week':
        sql_days = sql_days + 'year="' + date_txt[:date_txt.index(
            u'年')] + '" and week_of_year="' + str(
                int(date_txt[date_txt.index(u'年') + 1:date_txt.index(
                    u'周')])) + '"'
    if database_type == 'all':
        title = date_txt + u' 各库总量占比情况'
        sql = 'select DISTINCT(a.database_name),IFNULL(b.sumall,0) from ods_update_count as a LEFT JOIN (' + 'select database_name,sum(all_count) as sumall from ods_update_count where date = (' + sql_days + ') group by database_name) as b on a.database_name=b.database_name'
    else:
        title = date_txt + ' ' + database_type + u'库各表总量占比情况'
        sql = 'select table_name,all_count from ods_update_count where date = (' + sql_days + ') and database_name="' + database_type + '"'

    data = []
    names = []
    dataset = getresult(sql, (), mysql_database['portal'])

    count = 0
    for row in dataset:
        count += row[1]
    for row in dataset:
        names.append(row[0])
        data.append({'name': row[0], 'value': row[1]})
    option = {
        'title': {
            'text': title,
            'x': 'center'
        },
        'tooltip': {
            'trigger': 'item',
            'formatter': "{a} <br/>{b} : {c} ({d}%)"
        },
        'legend': {
            'orient': 'vertical',
            'left': 'left',
            'data': names
        },
        'series': [{
            'name': '数据详情：',
            'type': 'pie',
            'radius': '55%',
            'center': ['50%', '50%'],
            'data': data,
            'itemStyle': {
                'emphasis': {
                    'shadowBlur': 10,
                    'shadowOffsetX': 0,
                    'shadowColor': 'rgba(0, 0, 0, 0.5)'
                }
            }
        }]
    }
    return jsonify(data=option)


@report.route('/get_report_bar_data')
def get_report_bar_data():
    start_date_pick = request.args['start_date_pick']
    end_date_pick = request.args['end_date_pick']
    database_table_type = request.args['database_table_type']
    database_table = request.args['database_table']
    period_type = request.args['period_type']
    sql_date = ''
    sql_count = ''
    sql_max_date = ''
    sql_add = ''
    source = ''
    title = ''
    if database_table == 'all':
        if database_table_type == 'all':
            sql_count = 'select date,sum(all_count),sum(update_count) from ods_update_count where date in ("%s") group by date'
            title = start_date_pick + u'-' + end_date_pick + u' 所有数据变化情况'
        else:
            sql_count = 'select date,sum(all_count),sum(update_count) from ods_update_count where date in ("%s") and database_name="' + database_table_type + '" group by date'
            title = start_date_pick + u'-' + end_date_pick + u' ' + database_table_type + u'库数据变化情况'
    else:
        sql_count = 'select date,all_count,update_count from ods_update_count where table_name="' + database_table + '" and date in("%s")'
        title = start_date_pick + u'-' + end_date_pick + u' ' + database_table + u'表数据变化情况'

    if period_type == 'year':
        sql_max_date = 'select year,max(ods_update_count.date) from ods_update_count join dim_date on ods_update_count.date=dim_date.date where year>="' + start_date_pick[:
                                                                                                                                                                           -1] + '" and year<="' + end_date_pick[:
                                                                                                                                                                                                                 -1] + '" group by year'
        sql_date = 'select DISTINCT year,ods_update_count.date from ods_update_count join dim_date on ods_update_count.date=dim_date.date where year>="' + start_date_pick[:
                                                                                                                                                                           -1] + '" and year<="' + end_date_pick[:
                                                                                                                                                                                                                 -1] + '"'
        if database_table_type == 'all':
            sql_add = 'select b.year,sum(a.update_count) from ods_update_count a join (' + sql_date + ') b on a.date=b.date group by b.year'
        elif database_table == 'all':
            sql_add = 'select b.year,sum(a.update_count) from ods_update_count a join (' + sql_date + ') b on a.date=b.date where a.database_name="' + database_table_type + '" group by b.year'
        else:
            sql_add = 'select b.year,sum(a.update_count) from ods_update_count a join (' + sql_date + ') b on a.date=b.date where a.database_name="' + database_table_type + '" and a.table_name="' + database_table + '" group by b.year'
        dataset = getresult(sql_max_date, (), mysql_database['portal'])
        date_dict = {}
        for row in dataset:
            date_dict[row[1]] = [
                row[0],
                '',
            ]
        result = getresult(sql_count % '","'.join(date_dict.keys()), (),
                           mysql_database['portal'])
        for row in result:
            date_dict[row[0]].append(row[1])
        dataset = getresult(sql_add, (), mysql_database['portal'])
        for row in dataset:
            for key in date_dict:
                if row[0] == date_dict[key][0]:
                    date_dict[key].append(row[1])
                    break
        source = u'年'
    elif period_type == 'month':
        start_year_month = start_date_pick.replace(u'年', '').replace(u'月', '')
        end_year_month = end_date_pick.replace(u'年', '').replace(u'月', '')
        sql_max_date = 'select year,month,max(ods_update_count.date) from ods_update_count join dim_date on ods_update_count.date=dim_date.date where concat(year,lpad(month,2,"0"))>="' + start_year_month + '" and concat(year,lpad(month,2,"0"))<="' + end_year_month + '" group by year,month'
        sql_date = 'select DISTINCT year,month,ods_update_count.date from ods_update_count join dim_date on ods_update_count.date=dim_date.date where concat(year,lpad(month,2,"0"))>="' + start_year_month + '" and concat(year,lpad(month,2,"0"))<="' + end_year_month + '"'
        if database_table_type == 'all':
            sql_add = 'select b.year,b.month,sum(a.update_count) from ods_update_count a join (' + sql_date + ') b on a.date=b.date group by b.year,b.month'
        elif database_table == 'all':
            sql_add = 'select b.year,b.month,sum(a.update_count) from ods_update_count a join (' + sql_date + ') b on a.date=b.date where a.database_name="' + database_table_type + '" group by b.year,b.month'
        else:
            sql_add = 'select b.year,b.month,sum(a.update_count) from ods_update_count a join (' + sql_date + ') b on a.date=b.date where a.database_name="' + database_table_type + '" and a.table_name="' + database_table + '" group by b.year,b.month'
        dataset = getresult(sql_max_date, (), mysql_database['portal'])
        date_dict = {}
        for row in dataset:
            date_dict[row[2]] = [
                row[0],
                row[1],
            ]
        result = getresult(sql_count % '","'.join(date_dict.keys()), (),
                           mysql_database['portal'])
        for row in result:
            date_dict[row[0]].append(row[1])
        dataset = getresult(sql_add, (), mysql_database['portal'])
        for row in dataset:
            for key in date_dict:
                if row[0] == date_dict[key][0] and row[1] == date_dict[key][1]:
                    date_dict[key].append(row[2])
                    break
        source = u'年/月'
    elif period_type == 'day':
        sql_max_date = 'select DISTINCT date from ods_update_count where date>="' + start_date_pick + '" and date<="' + end_date_pick + '"'
        dataset = getresult(sql_max_date, (), mysql_database['portal'])
        date_dict = {}
        for row in dataset:
            date_dict[row[0]] = [
                row[0],
                '',
            ]
        result = getresult(sql_count % '","'.join(date_dict.keys()), (),
                           mysql_database['portal'])
        for row in result:
            date_dict[row[0]].append(row[1])
            date_dict[row[0]].append(row[2])
        source = u''
    elif period_type == 'week':
        if len(start_date_pick[start_date_pick.index(u'年') + 1:-1]) == 1:
            start_year_week = start_date_pick[:start_date_pick.index(
                u'年')] + '0' + start_date_pick[start_date_pick.index(u'年') + 1:
                                               -1]
        else:
            start_year_week = start_date_pick[:start_date_pick.index(
                u'年')] + start_date_pick[start_date_pick.index(u'年') + 1:-1]
        if len(end_date_pick[end_date_pick.index(u'年') + 1:-1]) == 1:
            end_year_week = end_date_pick[:end_date_pick.index(
                u'年')] + '0' + end_date_pick[end_date_pick.index(u'年') + 1:-1]
        else:
            end_year_week = end_date_pick[:end_date_pick.index(
                u'年')] + end_date_pick[end_date_pick.index(u'年') + 1:-1]
        sql_max_date = 'select year,week_of_year,max(ods_update_count.date) from ods_update_count join dim_date on ods_update_count.date=dim_date.date where concat(year,lpad(week_of_year,2,"0"))>="' + start_year_week + '" and concat(year,lpad(week_of_year,2,"0"))<="' + end_year_week + '" group by year,week_of_year'
        sql_date = 'select DISTINCT year,week_of_year,ods_update_count.date from ods_update_count join dim_date on ods_update_count.date=dim_date.date where concat(year,lpad(week_of_year,2,"0"))>="' + start_year_week + '" and concat(year,lpad(week_of_year,2,"0"))<="' + end_year_week + '"'
        if database_table_type == 'all':
            sql_add = 'select b.year,b.week_of_year,sum(a.update_count) from ods_update_count a join (' + sql_date + ') b on a.date=b.date group by b.year,b.week_of_year'
        elif database_table == 'all':
            sql_add = 'select b.year,b.week_of_year,sum(a.update_count) from ods_update_count a join (' + sql_date + ') b on a.date=b.date where a.database_name="' + database_table_type + '" group by b.year,b.week_of_year'
        else:
            sql_add = 'select b.year,b.week_of_year,sum(a.update_count) from ods_update_count a join (' + sql_date + ') b on a.date=b.date where a.database_name="' + database_table_type + '" and a.table_name="' + database_table + '" group by b.year,b.week_of_year'
        dataset = getresult(sql_max_date, (), mysql_database['portal'])
        date_dict = {}
        for row in dataset:
            date_dict[row[2]] = [
                row[0],
                row[1],
            ]
        result = getresult(sql_count % '","'.join(date_dict.keys()), (),
                           mysql_database['portal'])
        for row in result:
            date_dict[row[0]].append(row[1])
        dataset = getresult(sql_add, (), mysql_database['portal'])
        for row in dataset:
            for key in date_dict:
                if row[0] == date_dict[key][0] and row[1] == date_dict[key][1]:
                    date_dict[key].append(row[2])
                    break
        source = u'年/周'
    elif period_type == 'season':
        start_year_season = start_date_pick[:start_date_pick.index(
            u'年')] + start_date_pick[start_date_pick.index(u'年') + 1:-2]
        end_year_season = end_date_pick[:end_date_pick.index(
            u'年')] + end_date_pick[end_date_pick.index(u'年') + 1:-2]
        sql_max_date = 'select year,quarter,max(ods_update_count.date) from ods_update_count join dim_date on ods_update_count.date=dim_date.date where concat(year,quarter)>="' + start_year_season + '" and concat(year,quarter)<="' + end_year_season + '" group by year,quarter'
        dataset = getresult(sql_max_date, (), mysql_database['portal'])
        sql_date = 'select DISTINCT year,quarter,ods_update_count.date from ods_update_count join dim_date on ods_update_count.date=dim_date.date where concat(year,quarter)>="' + start_year_season + '" and concat(year,quarter)<="' + end_year_season + '"'
        if database_table_type == 'all':
            sql_add = 'select b.year,b.quarter,sum(a.update_count) from ods_update_count a join (' + sql_date + ') b on a.date=b.date group by b.year,b.quarter'
        elif database_table == 'all':
            sql_add = 'select b.year,b.quarter,sum(a.update_count) from ods_update_count a join (' + sql_date + ') b on a.date=b.date where a.database_name="' + database_table_type + '" group by b.year,b.quarter'
        else:
            sql_add = 'select b.year,b.quarter,sum(a.update_count) from ods_update_count a join (' + sql_date + ') b on a.date=b.date where a.database_name="' + database_table_type + '" and a.table_name="' + database_table + '" group by b.year,b.quarter'
        date_dict = {}
        for row in dataset:
            date_dict[row[2]] = [
                row[0],
                row[1],
            ]
        result = getresult(sql_count % '","'.join(date_dict.keys()), (),
                           mysql_database['portal'])
        for row in result:
            date_dict[row[0]].append(row[1])
        dataset = getresult(sql_add, (), mysql_database['portal'])
        for row in dataset:
            for key in date_dict:
                if row[0] == date_dict[key][0] and row[1] == date_dict[key][1]:
                    date_dict[key].append(row[2])
                    break
        source = u'年/季度'
    labels, sum, sum_add = [], [], []
    names = ['增量', '本期总量']
    for key in sorted(date_dict.keys()):
        sum.append(date_dict[key][2])
        sum_add.append(date_dict[key][3])
        if date_dict[key][1] != '':
            labels.append(date_dict[key][0] + '/' + date_dict[key][1])
        else:
            labels.append(date_dict[key][0])

    data = [{
        'name': '增量',
        'type': 'bar',
        'data': sum_add,
        'label': {
            'normal': {
                'show': True,
                'position': 'top'
            },
        }
    }, {
        'name': '本期总量',
        'type': 'line',
        'data': sum,
        'label': {
            'normal': {
                'show': True,
                'position': 'top'
            },
        }
    }]

    return jsonify(
        data=data, labels=labels, title=title, names=names, source=source)


@report.route('/data_report_kylin')
def data_report_kylin():
    return render_template('data_report/data_report_kylin.html')


@report.route('/data_report_kylin_get_cubes')
def data_report_kylin_get_cubes():
    global cubes, models, projects
    cubes_ready = []
    measures = []
    dimensions = {}
    tables = []
    header = {
        'Authorization': 'Basic QURNSU46S1lMSU4=',
        'Content-Type': 'application/json;charset=UTF-8'
    }
    cube_name = ''
    if request.args.keys():
        for item in cubes:
            if item['status'] == 'READY':
                cubes_ready.append(item['name'])
        cube_name = request.args['cube_name']
    else:
        url_cubes = 'http://172.19.100.23:7070/kylin/api/cubes'
        url_models = 'http://172.19.100.23:7070/kylin/api/models'
        url_project = 'http://172.19.100.23:7070/kylin/api/projects'

        result_models = requests.get(url_models, headers=header)
        if result_models.status_code == 200:
            models = json.loads(result_models.content)
        result_project = requests.get(url_project, headers=header)
        if result_project.status_code == 200:
            projects = json.loads(result_project.content)
        result = requests.get(url_cubes, headers=header)
        if result.status_code == 200:
            cubes = json.loads(result.content)
            for item in cubes:
                if item['status'] == 'READY':
                    cubes_ready.append(item['name'])
        cube_name = cubes[0]['name']
    if cubes_ready:
        url_cube = 'http://172.19.100.23:7070/kylin/api/cube_desc/' + cube_name
        result = requests.get(url_cube, headers=header)
        fact_table, dimensions_tables = '', ''
        if result.status_code == 200:
            cube_desc = json.loads(result.content)
            for item in cube_desc[0]['dimensions']:
                if item['table'] in dimensions.keys():
                    if item['derived'] is None:
                        dimensions[item['table']].append(item['column'])
                    else:
                        dimensions[item['table']].append(
                            ','.join(item['derived']))
                else:
                    if item['derived'] is None:
                        dimensions[item['table']] = [item['column']]
                    else:
                        dimensions[item['table']] = [','.join(item['derived'])]
                tables.append(item['table'])
            for item in cube_desc[0]['measures']:
                if item['function']['parameter']['type'] == 'column':
                    measures.append(item['function']['parameter']['value'])
            for model in models:
                if model['name'] == cube_desc[0]['model_name']:
                    fact_table = model['fact_table']
                    dimensions_tables = ' | '.join([
                        item['table'] + '(' + item['join']['type'] + ')'
                        for item in model['lookups']
                    ])
                    break
            return jsonify(
                status='ok',
                cubes=cubes_ready,
                measures=list(set(measures)),
                dimensions=dimensions,
                tables=list(set(tables)),
                fact_table=fact_table,
                dimensions_tables=dimensions_tables)
    return jsonify(status='request error')


@report.route('/data_report_kylin_get_result')
def data_report_kylin_get_result():
    allias = ['a', 'b', 'c', 'd', 'e', 'f', 'g', 'h']
    global projects, models, cubes
    header = {
        'Authorization': 'Basic QURNSU46S1lMSU4=',
        'Content-Type': 'application/json;charset=UTF-8'
    }
    if projects and models:
        project_name = None
        args = request.args
        current_cube_name = args['cube_name']
        url_cube = 'http://172.19.100.23:7070/kylin/api/cube_desc/' + current_cube_name
        result = requests.get(url_cube, headers=header)
        current_model_name = ""
        if result.status_code == 200:
            current_model_name = json.loads(result.content)[0]['model_name']
        for item in projects:
            if current_model_name in item['models']:
                project_name = item['name']
                break
        current_model_json = None
        for model in models:
            if current_model_name == model['name']:
                current_model_json = model
                break
        url = 'http://172.19.100.23:7070/kylin/api/query'
        if 'sql' in args:
            json_data = {
                'sql': args['sql'],
                'project': project_name,
                'offset': 0
            }
            result = requests.post(url, headers=header, json=json_data)
        else:
            compute = args['compute']
            measure = '"' + args['measure'] + '"'
            dimensions = []
            if args['dimensions'] != '':
                temp_table = ''
                for item in args['dimensions'].split(','):
                    if item.find('.') > -1:
                        dimensions.append(item[:item.rindex('.') + 1] + u'"' +
                                          item[item.rindex('.') + 1:] + u'"')
                        start = item.rindex('.')
                        temp_table = item[:item.index('.', start)]
                    else:
                        dimensions.append(temp_table + '.' + '"' + item + '"')
            filter_condition = args['filter']
            orderby_condition = args['orderby']
            join_tables = set(args['tables'].split(','))
            fact_table = current_model_json['fact_table']
            if fact_table in join_tables:
                join_tables.remove(fact_table)
            join_tables_dict = dict([(item, allias.pop())
                                     for item in list(join_tables)])
            fact_table_dict = dict([(fact_table, allias.pop())])

            if compute == 'COUNT_DISTINCT':
                compute = 'count(distinct '
            else:
                compute = compute + '('
            if dimensions:
                dimensions_ = dimensions[:]
                dimensions = []
                for item in dimensions_:
                    if item[:item.rindex('.')] in join_tables_dict.keys():
                        dimensions.append(
                            item.replace(item[:item.rindex('.')],
                                         join_tables_dict[item[:item.rindex(
                                             '.')]]))
                    else:
                        dimensions.append(
                            item.replace(fact_table, fact_table_dict[
                                fact_table]))
                sql = ''.join([
                    'select ', compute, fact_table_dict[fact_table], '.',
                    measure, ') as "result",', ','.join(dimensions), ' from ',
                    fact_table, ' ', fact_table_dict[fact_table]
                ])
            else:
                sql = ''.join([
                    'select ', compute, fact_table_dict[fact_table], '.',
                    measure, ')', ' from ', fact_table, ' ',
                    fact_table_dict[fact_table]
                ])
            # join
            join_sql = ''
            for lookup in current_model_json['lookups']:
                if join_tables and lookup['table'] in join_tables:
                    temp_sql = ''
                    for i, item in enumerate(lookup['join']['primary_key']):
                        if i == 0:
                            temp_sql = join_tables_dict[lookup['table']] + '."' + item + '"=' + fact_table_dict[fact_table] + '."' + lookup['join']['foreign_key'][i] + '"'
                        else:
                            temp_sql += ' and ' + join_tables_dict[lookup['table']] + '.' + item + '=' + \
                                        fact_table_dict[fact_table] + '.' + lookup['join'][
                                            'foreign_key'][i]
                    join_sql += ''.join([
                        ' ', lookup['join']['type'], ' join ', lookup['table'],
                        ' ', join_tables_dict[lookup['table']], ' on ',
                        temp_sql
                    ])
            sql = sql + join_sql
            # where
            if filter_condition != '':
                if filter_condition.strip().find('where') == 0:
                    sql += ' ' + filter_condition
                else:
                    sql += ' where ' + filter_condition
            # group
            if dimensions:
                sql += ' group by ' + ','.join(dimensions)
            # order by
            if orderby_condition != '':
                if orderby_condition.strip().find('order by') == 0:
                    sql += ' ' + orderby_condition
                else:
                    sql += ' order by ' + orderby_condition
            sql = sql.replace('DEFAULT.', '') if 'DEFAULT.' in sql else sql
            json_data = {'sql': sql, 'project': project_name, 'offset': 0}
            result = requests.post(url, headers=header, json=json_data)
        if result.status_code == 200:
            json_result = json.loads(result.content)
            columns = []
            for i, item in enumerate(json_result['columnMetas']):
                columns.append({'title': item['label']})
            rows = json_result['results']
            return jsonify(
                status='ok', columns=columns, rows=rows, data=json_result)
        else:
            print 'error:' + str(result.status_code)
            return jsonify(status='error:' + str(result.status_code))


@report.route('/data_report_kylin_change_type', methods=['POST', 'GET'])
def data_report_kylin_change_type():
    global type_templates
    type_txt = request.form['type'].strip()
    data_txt = request.form['data'].strip()
    json_data = json.loads(data_txt)
    if len(json_data['columnMetas']) == 1:
        return jsonify(status='error', message='no dimensions!')
    else:
        cate_group = []
        for i, item in enumerate(json_data['columnMetas']):
            if i > 0:
                cate_group.append(item['label'])
        cate_group_dict = {}
        cate_group_ordered = []
        for i, key in enumerate(cate_group):
            cate_group_dict[key] = list(
                set([item[i + 1] for item in json_data['results']]))
            cate_group_ordered.append((key, len(cate_group_dict[key])))
        cate_group_ordered = sorted(
            cate_group_ordered, cmp=lambda x, y: cmp(x[1], y[1]))
        if len(cate_group) > 2:
            return jsonify(
                status='error', message='only support less than 2 dimensions!')
        if type_txt == "pie_":
            type_templates_pie = copy.deepcopy(type_templates['pie'])
            if len(cate_group) == 1:
                type_data = {
                    'name':
                    '',
                    'type':
                    'pie',
                    'radius':
                    '50%',
                    'center': ['50%', '50%'],
                    'data': [{
                        'value': item[0],
                        'name': item[1]
                    } for item in json_data['results']]
                }
                type_templates_pie['series'].append(type_data)
                return jsonify(status='ok', data=type_templates_pie, width=100)
            elif len(cate_group) == 2:
                start_x = 200
                index = cate_group.index(cate_group_ordered[0][0]) + 1
                index2 = 1 if index == 2 else 2
                for item in cate_group_dict[cate_group_ordered[0][0]]:
                    type_data = {
                        'name': item,
                        'type': 'pie',
                        'radius': '100',
                        'center': [str(start_x + 50), '300'],
                        'data': []
                    }
                    type_title = {'text': item, 'x': str(start_x)}

                    for r_l in json_data['results']:
                        if r_l[index] == item:
                            type_data['data'].append({
                                'value': r_l[0],
                                'name': r_l[index2]
                            })
                    type_templates_pie['series'].append(type_data)
                    type_templates_pie['title'].append(type_title)
                    start_x += 400
                return jsonify(
                    status='ok', data=type_templates_pie, width=start_x - 200)
        elif type_txt == 'bar_':
            type_templates_bar = copy.deepcopy(type_templates['bar'])
            if len(cate_group) == 1:
                type_data = {
                    'name': '',
                    'type': 'bar',
                    'data': [item[0] for item in json_data['results']]
                }
                type_title = {
                    'type': 'category',
                    'data': [item[1] for item in json_data['results']]
                }
                type_templates_bar['xAxis'].append(type_title)
                type_templates_bar['series'].append(type_data)
                return jsonify(status='ok', data=type_templates_bar, width=100)
            else:
                index = cate_group.index(cate_group_ordered[0][0]) + 1
                index2 = 1 if index == 2 else 2

                type_title = {
                    'type':
                    'category',
                    'data': [
                        item
                        for item in cate_group_dict[cate_group_ordered[0][0]]
                    ]
                }
                for r_l in json_data['results']:
                    type_data = {
                        'name': '',
                        'type': 'bar',
                        'stack': 'test',
                        'data': []
                    }
                    type_data['name'] = r_l[index2]
                    for item in cate_group_dict[cate_group_ordered[0][0]]:
                        if r_l[index] == item:
                            type_data['data'].append(r_l[0])
                        else:
                            type_data['data'].append('-')
                    type_templates_bar['series'].append(type_data)
                type_templates_bar['xAxis'].append(type_title)
                return jsonify(status='ok', data=type_templates_bar, width=100)
        elif type_txt == 'line_':
            type_templates_line = copy.deepcopy(type_templates['line'])
            if len(cate_group) == 1:
                type_data = {
                    'name': 'value',
                    'type': 'line',
                    'data': [item[0] for item in json_data['results']]
                }
                type_title = {
                    'type': 'category',
                    'data': [item[1] for item in json_data['results']]
                }
                type_templates_line['xAxis'].append(type_title)
                type_templates_line['series'].append(type_data)
                return jsonify(
                    status='ok', data=type_templates_line, width=100)
            else:
                index = cate_group.index(cate_group_ordered[0][0]) + 1
                index2 = 1 if index == 2 else 2

                type_title = {
                    'type':
                    'category',
                    'data': [
                        item
                        for item in cate_group_dict[cate_group_ordered[0][0]]
                    ]
                }
                for r_l in json_data['results']:
                    type_data = {'name': '', 'type': 'line', 'data': []}
                    type_data['name'] = r_l[index2]
                    for item in cate_group_dict[cate_group_ordered[0][0]]:
                        if r_l[index] == item:
                            type_data['data'].append(r_l[0])
                        else:
                            type_data['data'].append('-')
                    type_templates_line['series'].append(type_data)
                type_templates_line['xAxis'].append(type_title)
                return jsonify(
                    status='ok', data=type_templates_line, width=100)
    return jsonify(status='ok')


@report.route('/data_report_data_quality')
def data_report_data_quality():
    sql = 'select tablename,inserttime from data_quality_management GROUP by tablename ORDER by inserttime desc'
    result_project = getresult(sql, (), mysql_database['portal'])
    result_project = [(None, item[0], str(item[1])) for item in result_project]
    result = {
        "columns": [{
            'title': '序号'
        }, {
            "title": "报表项目名"
        }, {
            "title": "生成时间"
        }, {
            "title": "查看"
        }, {
            "title": "删除"
        }],
        "dataset":
        result_project
    }
    return render_template(
        'data_report/data_report_data_quality.html',
        project_info=json.dumps(result))


@report.route('/data_report_data_quality_delete_report')
def data_report_data_quality_delete_report():
    tablename = request.args['projectname']
    sql = 'delete from data_quality_management where tablename="%s"' % tablename
    executesql(sql, (), mysql_database['portal'])
    return 'ok'


@report.route('/data_report_data_quality_get_report')
def data_report_data_quality_get_report():
    table_name = request.args['tablename']
    sql_select = 'select * from data_quality_report'
    if table_name != 'all':
        sql_select = ''.join(
            [sql_select, ' where tablename="', table_name, '"'])
    result_report = getresult(sql_select, (), mysql_database['portal'])

    result = {
        'columns': [{
            'title': '项目'
        }, {
            'title': '字段'
        }, {
            'title': '数据类型'
        }, {
            'title': '行数'
        }, {
            'title': 'NULL行数'
        }, {
            'title': '空字符行数'
        }, {
            'title': '唯一值行数'
        }, {
            'title': '众数'
        }, {
            'title': '少数'
        }, {
            'title': '分位数'
        }, {
            'title': '最大值'
        }, {
            'title': '最小值'
        }, {
            'title': '均值'
        }, {
            'title': '类型转换失败行数'
        }],
        'dataset':
        result_report
    }
    return jsonify(data=result)


def get_dataquality_table_config():
    sql_project_config = 'select * from data_quality_management_project_config;'
    sql_field_operation_config = 'select * from data_quality_management_field_operation_config;'
    sql_field_config = 'select * from data_quality_management_field_config;'
    sql_config = 'select * from data_quality_management_config;'
    results = getresult(''.join([
        sql_field_operation_config, sql_project_config, sql_config,
        sql_field_config
    ]), (), mysql_database['portal'])
    project_config = results[1]
    config = results[2]
    field_config = results[3]
    field_operation_config = results[0]
    config = dict([(item[0], [item[1], item[2], item[3]]) for item in config])

    field_operation_config = [(item[1], config[item[2]][0])
                              for item in field_operation_config]
    field_config = [(item[3], item[0], item[1], config[item[2]][0], item[4]
                     if item[4] is not None else "{}")
                    for item in field_config]
    table_field_config = {}
    for field in field_config:
        if field[0] in table_field_config:
            table_field_config[field[0]].append([
                field[2], field[3], [
                    item[1] for item in field_operation_config
                    if item[0] == field[1]
                ], field[4]
            ])
        else:
            table_field_config[field[0]] = [[
                field[2], field[3], [
                    item[1] for item in field_operation_config
                    if item[0] == field[1]
                ], field[4]
            ]]
    project_config = [(item[0], item[1], config[item[2]][0], config[item[3]][0]
                       if item[3] is not None else None, item[4], item[5],
                       table_field_config[item[0]], None, None, str(item[7]),
                       str(item[8]), str(item[9]), item[6])
                      for item in project_config]
    result = {
        'columns': [{
            'title': '序号'
        }, {
            'title': '项目名称'
        }, {
            'title': '加载类型'
        }, {
            'title': '数据源类型'
        }, {
            'title': '数据源'
        }, {
            'title': '其他(分隔符)'
        }, {
            'title': '字段配置'
        }, {
            'title': '编辑'
        }, {
            'title': '删除'
        }, {
            'title': '添加'
        }, {
            'title': '更新'
        }, {
            'title': '最近计算'
        }, {
            'title': '重新计算'
        }],
        'dataset':
        project_config
    }
    return result, config


@report.route('/data_report_data_quality_get_config')
def data_report_data_quality_get_config():
    result, config = get_dataquality_table_config()
    return jsonify(data=result, config=config)


def get_config():
    sql = 'select * from data_quality_management_config'
    result = getresult(sql, (), mysql_database['portal'])
    config = {}
    for c in result:
        config[c[1]] = c[0]
    return config


@report.route('/data_report_data_quality_update_config', methods=['POST'])
def data_report_data_quality_update_config():
    status = request.form['status']
    if status == 'Delete':
        project_id = request.form['project_id']
        executesql(
            'delete from data_quality_management_project_config where id=%s' %
            project_id, (), mysql_database['portal'])
        return jsonify(status='Delete', state='ok')
    elif status == 'Add':
        configs = get_config()
        project_info = json.loads(request.form['project'])
        cols = json.loads(request.form['cols'])
        sql_insert_project = 'insert into data_quality_management_project_config(projectname,dataloadtype,datasourcetype,datasource,otherinfo,inserttime) values ("%s","%s","%s","%s","%s","%s")' % (
            project_info['name'], project_info['dataloadtypeid'],
            project_info['datasourcetypeid'], project_info['datasource'],
            project_info['otherinfo'],
            datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S"))
        sql_select_max_projectid = 'select max(id) from data_quality_management_project_config'
        project_id = getresult(
            sql_insert_project + ";" + sql_select_max_projectid, (),
            mysql_database['portal'])
        sql_insert = 'insert into data_quality_management_field_config(fieldname,datatypeid,projectid,warn_rule) values("%s","%s","%s",\'%s\');select max(id) from data_quality_management_field_config;'
        sql_insert_compute = 'insert into data_quality_management_field_operation_config(fieldid,computeid) values(%s,%s)'
        for col in cols:
            filedname = col['name']
            datatype = col['datatype']
            computes = col['computes'].split(',')
            warn_rules = col['warn_rules']
            computes = [configs[c] for c in computes if c != '']
            id = getresult(sql_insert % (filedname, configs[datatype],
                                         project_id[1][0][0], warn_rules), (),
                           mysql_database['portal'])
            if computes:
                insert_computes = [
                    sql_insert_compute % (id[1][0][0], c) for c in computes
                ]
                executesql(';'.join(insert_computes), (),
                           mysql_database['portal'])
    elif status == 'Edit':
        configs = get_config()
        project_info = json.loads(request.form['project'])
        cols = json.loads(request.form['cols'])
        sql_update = 'update data_quality_management_project_config set projectname="%s",dataloadtype="%s",datasourcetype="%s",datasource="%s",otherinfo="%s" where id=%s' % (
            project_info['name'], project_info['dataloadtypeid'],
            project_info['datasourcetypeid'], project_info['datasource'],
            project_info['otherinfo'], project_info['projectid'], )
        executesql(sql_update, (), mysql_database['portal'])
        sql_delete = 'delete from data_quality_management_field_config where projectid="%s"' % project_info[
            'projectid']
        executesql(sql_delete, (), mysql_database['portal'])
        sql_insert = 'insert into data_quality_management_field_config(fieldname,datatypeid,projectid,warn_rule) values("%s","%s","%s",\'%s\');select max(id) from data_quality_management_field_config;'
        sql_insert_compute = 'insert into data_quality_management_field_operation_config(fieldid,computeid) values(%s,%s)'
        for col in cols:
            filedname = col['name']
            datatype = col['datatype']
            computes = col['computes'].split(',')
            warn_rules = col['warn_rules']
            computes = [configs[c] for c in computes if c != '']
            id = getresult(sql_insert % (filedname, configs[datatype],
                                         project_info['projectid'],
                                         warn_rules), (),
                           mysql_database['portal'])
            if computes:
                insert_computes = [
                    sql_insert_compute % (id[1][0][0], c) for c in computes
                ]
                executesql(';'.join(insert_computes), (),
                           mysql_database['portal'])
    return 'ok'


@report.route('/data_report_data_quality_submit_compute')
def data_report_data_quality_submit_compute():
    try:
        project_name = request.args['project_name']
        executesql(
            'update data_quality_management_project_config set isactive=2 where project_name="%s"'
            % project_name, (), mysql_database['portal'])
        command = 'spark-submit   --name dataquality_' + project_name + '   --class com.ppcredit.dp.dataquality.CaculateModelNew   --master yarn  --queue ml --deploy-mode cluster --driver-memory 2g --executor-memory 4g --num-executors 8 --executor-cores 4  hdfs:///tmp/data_quality/data_quality/DataQuality-jar-with-dependencies.jar ' + project_name
        import subprocess
        subp = subprocess.Popen(command, shell=False, stdout=subprocess.PIPE)
        for c in subp.stdout.readline():
            if 'RUNNING' in c:
                return 'ok'
    except Exception, e:
        return 'error' + e.message


def get_data_report_log_report():
    start_date = request.args['start'] if 'start' in request.args else (
        datetime.now() - datetime.timedelta(days=1)).strftime('%Y-%m-%d')
    end_date = request.args['end'] if 'end' in request.args else (
        datetime.now() - datetime.timedelta(days=1)).strftime('%Y-%m-%d')
    sql_select = 'select * from rpt.log_compare_daily where ds>="%s" and ds<="%s"' % (
        start_date, end_date)
    result_counts = getresult(sql_select, (), mysql_database['portal'])
    result = {
        'columns': [{
            'title': 'Date'
        }, {
            'title': 'Table Name'
        }, {
            'title': 'Source Count'
        }, {
            'title': 'Hive Count'
        }, {
            'title': 'Status'
        }],
        'dataset':
        result_counts
    }
    return jsonify(data=result)


@report.route('/data_report_graph')
def data_report_graph():
    graphs = get_graph_list()
    from utilities import gremlin_helper
    scripts = '''
    result = ["vertexLabels":new HashSet(),"edgeLabels":new HashSet(),"vertexIndices":new HashSet(),"edgeIndices":new HashSet(),"vertexProperties":new HashSet(),"edgeProperties":new HashSet()]
    graph.getLabels(ElementType.VERTEX).forEachRemaining{result["vertexLabels"].add(it.label());it.propertyTypes().keySet().forEach{result["vertexProperties"].add(it)};}
    graph.getLabels(ElementType.EDGE).forEachRemaining{result["edgeLabels"].add(it.label());it.propertyTypes().keySet().forEach{result["edgeProperties"].add(it)};}
    graph.getIndices(OperationType.READ,ElementType.VERTEX).forEachRemaining{result["vertexIndices"].add(it.key().propertyKey())}
    graph.getIndices(OperationType.READ,ElementType.EDGE).forEachRemaining{result["edgeIndices"].add(it.key().propertyKey())}
    result'''
    return render_template(
        'data_report/data_report_graph.html',
        graphs=graphs,
        schema=json.dumps(
            gremlin_helper.execute_gremlin(scripts, "hgraph_ppc")))


@report.route('/data_report_graph_change')
def data_report_graph_change():
    from utilities import gremlin_helper
    graph_name = request.args['cube_name']
    scripts = '''
    result = ["vertexLabels":new HashSet(),"edgeLabels":new HashSet(),"vertexIndices":new HashSet(),"edgeIndices":new HashSet(),"vertexProperties":new HashSet(),"edgeProperties":new HashSet()]
    graph.getLabels(ElementType.VERTEX).forEachRemaining{result["vertexLabels"].add(it.label());it.propertyTypes().keySet().forEach{result["vertexProperties"].add(it)};}
    graph.getLabels(ElementType.EDGE).forEachRemaining{result["edgeLabels"].add(it.label());it.propertyTypes().keySet().forEach{result["edgeProperties"].add(it)};}
    graph.getIndices(OperationType.READ,ElementType.VERTEX).forEachRemaining{result["vertexIndices"].add(it.key().propertyKey())}
    graph.getIndices(OperationType.READ,ElementType.EDGE).forEachRemaining{result["edgeIndices"].add(it.key().propertyKey())}
    result
    '''
    return jsonify(data=gremlin_helper.execute_gremlin(scripts, graph_name))


@report.route('/data_report_graph_query_vertex')
def data_report_graph_query_vertex():
    from utilities import gremlin_helper
    result = {}
    key_type = request.args['key']
    key_type_value = request.args['value']
    table_name = request.args['table_name']
    scripts = 'graph.traversal().V().has("' + key_type + '","' + key_type_value + '").valueMap()'
    result['value'] = json.dumps(
        gremlin_helper.execute_gremlin(scripts, table_name),
        indent=2,
        separators=(', ', ': '))
    if result['value'] == '[]':
        result['status'] = 'bad'
    else:
        result['status'] = 'ok'
        scripts = 'graph.traversal().V().has("' + key_type + '","' + key_type_value + '")'
        result_user = gremlin_helper.execute_gremlin(scripts, table_name)
        if len(result_user) > 0:
            result_user = result_user[0]
            result['name'] = result_user.id
            result['category'] = result_user.label
    return jsonify(data=result)


@report.route('/data_report_graph_query_relation')
def data_report_graph_query_relation():
    from utilities import gremlin_helper
    result = {'data': [], 'links': [], 'categories': []}
    vertexid = request.args['vertexid']
    table_name = request.args['table_name']
    scripts = '''
    tempList=[];myList=[];bbb=%s;
    graph.traversal().V(bbb).out().each{myList+=it}
    tempList << myList;myList=[]
    graph.traversal().V(bbb).out().valueMap().each{myList+=it}
    tempList << myList;myList=[]
    graph.traversal().V(bbb).in().each{myList+=it}
    tempList << myList;myList=[]
    graph.traversal().V(bbb).in().valueMap().each{myList+=it}
    tempList << myList;myList=[]
    graph.traversal().V(bbb).bothE().each{myList+=it}
    tempList << myList;myList=[]
    graph.traversal().V(bbb).bothE().valueMap().each{myList+=it}
    tempList << myList
    ''' % ('"' + vertexid + '"')
    raw_result = gremlin_helper.execute_gremlin(scripts, table_name)
    categories = []
    for i, item in enumerate(raw_result[0]):
        node = {
            'name':
            str(item.id),
            'value':
            json.dumps(raw_result[1][i], indent=2, separators=(', ', ': ')),
            'category':
            item.label
        }
        categories.append(item.label)
        result['data'].append(node)
    for i, item in enumerate(raw_result[2]):
        node = {
            'name':
            str(item.id),
            'value':
            json.dumps(raw_result[3][i], indent=2, separators=(', ', ': ')),
            'category':
            item.label
        }
        categories.append(item.label)
        result['data'].append(node)
    for i, item in enumerate(raw_result[4]):
        link = {
            'source':
            str(item.outV.id),
            'target':
            str(item.inV.id),
            'category':
            item.label,
            'name':
            item.id,
            'value':
            json.dumps(raw_result[5][i], indent=2, separators=(', ', ': '))
        }
        categories.append(item.label)
        result['links'].append(link)
    result['categories'] = [{'name': item} for item in categories]
    return jsonify(data=result)
