import logging
import json
import time
from flask import Blueprint, render_template, request, session, jsonify, current_app, redirect
import os
import csv
import numpy as np
from util import get_conn_mysql, get_conn_pg
import pandas as pd
from itertools import takewhile, repeat
import chardet
# 存放数据
class DataStore:
    option = []

dataStore = DataStore()


# 注册蓝图
table = Blueprint('table', __name__, template_folder='../templates_table', static_url_path="")


@table.route('/favicon.ico')
def get_fav():
    '''
    返回图标
    :return: 图标
    '''
    return current_app.send_static_file('icon.png')

@table.route('/table_operate')
def table_operate():
    if session.get('isLogin') == 1:
        """
        返回表操作
        :return: table_operate页面,用data接收util中方法返回到html页面中
        """
        # 初始化已上传的表数量
        session['file_size'] = 0

        # 初始化文件名
        session['file_name'] = []

        # 初始化连接信息
        session['conn_info'] = None

        # 初始化仓库
        global dataStore
        dataStore.option = []

        # 表名 生成8个随机的小写英文字母作为
        t = str(int(time.time()))[:7]
        name = ''.join([chr(i) for i in np.random.randint(97, 123, 8)])
        # 新建临时文件
        file = open('temp_files/{}.csv'.format(name + t), mode='w+', encoding='utf-8', newline='')
        file.close()

        # 初始化临时文件
        session['temp_file'] = 'temp_files/{}.csv'.format(name + t)

        # 返回表操作页面
        return render_template('table_operate.html')
    else:
        return render_template('login.html', status='false')


@table.route('/file_upload', methods=['POST'])
def file_upload():
    """
    文件上传
    :param
    :return: fileName or fail
    """
    # 判断是否为POST请求
    if request.method == 'POST':
        # 表名 生成8个随机的小写英文字母作为
        t = str(int(time.time()))[:7]
        name = ''.join([chr(i) for i in np.random.randint(97, 123, 8)])
        file_path = 'temp_files/{}.csv'.format(name + t)
        try:
            # 获取文件
            f = request.files['file']
            suffix = f.filename.split('.')[1]
            if suffix == 'csv':
                cur_encoding = chardet.detect(f.read())['encoding']
                if cur_encoding == 'GB2312':
                    cur_encoding = 'GBK'
                elif cur_encoding is None:
                    cur_encoding = 'ANSI'
                f.seek(0)
                with open(file_path, mode='wb+') as file:
                    file.write(f.read().decode(cur_encoding).encode('utf-8'))
            elif suffix == 'xlsx' or suffix == 'xlt' or suffix == 'xls':
                data = pd.read_excel(f.read(), sheet_name=0)
                data.to_csv(file_path)
            session['file_size'] += 1
            session['file_name'] = session.get('file_name') + [(name + t + '.csv')]
        except Exception as e:
            logging.info(e)
            # 返回失败
            return jsonify({'status': 'fail'})
        else:
            # 记录文件保存时间
            with open(file='file_time.txt', mode='a+', encoding='utf-8', newline='\n') as file:
                file.write('temp_files/{}.csv'.format(name + t) + '\t'
                           + time.strftime('%Y-%m-%d-%H:%m:%S', time.localtime(time.time())) + '\n')
            # 返回成功
            return jsonify({'status': 'success'})

    # 不是POST请求Y
    else:

        # 返回失败
        return 'fail'


@table.route('/get_conn_db')
def get_conn_db():
    '''
    获取连接数据库页面
    :return: 连接数据库页面
    '''

    # 获取session存放的信息
    conn_info = session.get('conn_info')
    if conn_info is not None and session.get('isLogin') == 1:
        # 数据库类型
        db_type = conn_info[0]

        # 地址
        db_host = conn_info[1]

        # 端口
        db_port = conn_info[2]

        # 用户名
        db_username = conn_info[3]

        # 密码
        db_password = conn_info[4]

        # 数据库名
        db_name = conn_info[5]

        # 返回带连接信息的连接数据库页面
        return render_template('conn_db.html', db_type=db_type, db_host=db_host, db_port=db_port,
                               db_username=db_username, db_password=db_password, db_name=db_name)

    # 返回不带连接信息的连接数据库页面
    else:
        return render_template('conn_db.html')


@table.route('/conn_db', methods=['POST'])
def conn_db():
    """
    连接数据库
    :return:
    """
    if session.get('isLogin') == 1:
        # 获取表单信息
        data = request.form
        # 获取数据库类型
        db_type = data['db_type']
        if session.get('conn_info') is None:
            session['conn_info'] = [db_type, data['host'], data['port'], data['username'],
                                    data['password'], data['db']]
        else:
            del session['conn_info']
            session['conn_info'] = [db_type, data['host'], data['port'], data['username'],
                                    data['password'], data['db']]
        result = []
        try:
            # 如果类型为mysql
            if db_type == 'mysql':

                # 获取连接和游标
                conn, cur = get_conn_mysql(data['host'], int(data['port']), data['username'], data['password'], data['db'])

                # 查询该数据库的所有表
                cur.execute('select table_name from information_schema.tables where table_schema=%s', (data['db'],))

                # 获取查询结果
                result = list(cur.fetchall())

                # 关闭连接和游标
                cur.close()
                conn.close()

            # 类型为pg数据库
            elif db_type == 'pg':
                # 获取连接和游标
                conn, cur = get_conn_pg(data['host'], int(data['port']), data['username'], data['password'], data['db'])

                # 查询该数据库的所有表
                cur.execute(
                    "SELECT table_name FROM information_schema.tables WHERE table_schema ='public' AND table_type ='BASE TABLE'; ",
                    (data['db'],))

                # 获取查询结果
                result = list(cur.fetchall())

                # 关闭连接和游标
                cur.close()
                conn.close()
        # 捕获异常信息
        except Exception as e:

            # 记录日志
            logging.info(e)

            # 返回连接数据库页面 提示信息: 连接超时
            return render_template('conn_db.html', tips="连接超时")

        # 成功查询
        else:

            # 表数量一半的长度 10个表 返回5  7个表 返回4
            data_half = int(len(result) / 2) if len(result) % 2 == 0 else int((len(result) + 1) / 2)

        # 测试阶段 再次访问初始化
        if session.get('file_size') is None:
            session['file_size'] = 0

        # 返回选择表界面 表信息 表一半的长度 表的总长度 剩余表选择数量
        return render_template('table_select.html', data=result, data_half=data_half, data_len=len(result),
                               file_limit=10 - session.get('file_size'))
    else:
        return render_template("login.html", status='false')


@table.route('/select_table', methods=['POST'])
def select_table():
    '''
    选择表
    :return: 返回状态
    '''
    try:
        # 获取session的数据库连接信息
        conn_info = session.get('conn_info')
        db_type = conn_info[0]
        db_host = conn_info[1]
        db_port = conn_info[2]
        db_username = conn_info[3]
        db_password = conn_info[4]
        db_name = conn_info[5]

        # 根据不用的数据库类型获取连接和游标
        if db_type == 'mysql':
            conn, cur = get_conn_mysql(db_host, int(db_port), db_username, db_password, db_name)
        else:
            conn, cur = get_conn_pg(db_host, int(db_port), db_username, db_password, db_name)

        # 解析表单里的数据
        data = json.loads(request.data.decode())

        # 临时存放表名
        table_name = []

        # 获取表单里的表名
        for item in data['table_name']:
            # 文件数量+1
            session['file_size'] += 1

            # 将表名存放到列表
            table_name.append(item['name'])

            # 查询该表字段
            cur.execute("SELECT * FROM INFORMATION_SCHEMA.columns WHERE TABLE_NAME='{}'".format(item['name']))

            head = list(map(lambda x: x[3], list(cur.fetchall())))

            # 查询该表的数据
            cur.execute('select * from {}'.format(item['name']))

            # 获取查询结果
            result = list(cur.fetchall())

            # 表名 生成8个随机的小写英文字母作为表名加上时间戳
            t = str(int(time.time()))[:7]
            name = ''.join([chr(i) for i in np.random.randint(97, 123, 8)])

            # 将查询结果保存到temp_files/表名
            with open('temp_files/{}.{}'.format(name + t, 'csv'), mode='w+', encoding='utf-8', newline='') as file:
                csv_write = csv.writer(file)
                csv_write.writerow(head)
                csv_write.writerows(result)

            # 记录行和列的大小
            data = pd.read_csv('temp_files/{}.{}'.format(name + t, 'csv'))

            # 将文件名存放到session的文件名列表里
            session['file_name'] = session.get('file_name') + [(name + t + '.csv')]

            # 记录文件保存时间
            with open(file='file_time.txt', mode='a+', encoding='utf-8', newline='\n') as file:
                file.write('temp_files/{}.csv'.format(name + t) + '\t'
                           + time.strftime('%Y-%m-%d-%H:%m:%S', time.localtime(time.time())))
        # 将本次上传的所有文件存放到session的临时表名上去
        session['table_name'] = table_name

        # 关闭游标和连接
        cur.close()
        conn.close()
    # 捕获异常
    except Exception as e:

        # 记录错误信息
        logging.info(e)
        print(e)

        # 返回状态 失败
        return jsonify({'status': 'fail'})

    else:

        # 返回状态 成功
        return jsonify({'status': 'success'})


@table.route('/get_table_name')
def get_table_name():
    """
    获取当前临时存放表名里的表名
    :return: 状态或状态+表名
    """

    # 获取当前session里的临时存放表名
    table_name = session.get('table_name')

    # 如果不是空的
    if table_name is not None:

        # 返回成功以及表名列表
        return jsonify({'status': 'success', 'table_name': table_name})

    else:
        # 如果为空 返回失败
        return jsonify({'status': 'fail'})


@table.route('/clear_table_name')
def clear_table_name():
    """
    清除临时存放表名的列表
    :return:
    """
    try:
        # 清除列表
        session['table_name'] = []
    except Exception as e:
        logging.info(e)
        # 返回状态
        return jsonify({'status': 'fail'})
    else:
        # 返回状态
        return jsonify({'status': 'success'})


@table.route('/delete_file/<int:file_index>')
def delete_file(file_index):
    """
    删除指定的表
    :param file_index: 表的下标
    :return: 状态
    """
    try:

        # 判断当前文件名列表是否不为空
        if session.get('file_name') is not None:

            # 根据文件下标获取文件名
            file_name = session.get('file_name')[file_index]

            # 删除该文件
            os.remove("temp_files/" + file_name)

        # 为空则什么都不做
        else:
            pass

    # 捕获异常
    except Exception as e:

        # 记录错误信息
        logging.info(e)

        # 返回状态
        return jsonify({'status': 'fail'})
    else:
        # 文件大小-1
        session['file_size'] -= 1
        # 删除该文件名字
        del session['file_name'][int(file_index)]

        # 返回状态
        return jsonify({'status': 'success'})


@table.route('/get_table_col/<int:index>')
def get_table_col(index):
    """
    获取表的所有字段
    :param index: 文件下标
    :return: 返回表的所有字段
    """
    try:

        # 通过文件下标获取到文件名, 打开该文件
        with open(file='temp_files/{}'.format(session.get('file_name')[index]), mode='r', encoding='utf-8') as file:

            # csv读取文件
            reader = csv.reader(file)

            # 获取第一行 也就是字段
            head_row = next(reader)

    # 捕获异常
    except Exception as e:

        # 记录错误信息
        logging.info(e)

        # 返回失败状态和错误信息
        return jsonify({'status': 'fail', 'e': e})

    # 没有异常
    else:

        # 返回成功状态和该表的字段
        return jsonify({'status': 'success', 'columns': head_row})


@table.route('/test_conn', methods=['POST'])
def test_conn():
    """
    测试表连接
    :return: 返回数据以及字段
    """
    try:
        # 获取 测试连接的表的下标以及连接字段
        data = json.loads(request.data.decode())['data']

        # 基础路径
        base_url = 'temp_files/'

        # 文件名
        names = []

        # 字段明个
        cols = []

        # 获取所有文件名
        file_name = session.get('file_name')

        # 是否纵向连接
        is_vertical = True

        # 遍历data的key, key为文件下标 value为连接字段
        for key in data.keys():
            # 通过下标获取文件名
            names.append(file_name[int(key)])

            # 如果不为纵向连接
            if data[key] != '纵向连接':
                is_vertical = False

            # 拼接字段
            cols.append(data[key])
        # 合并
        if is_vertical is False:
            # 文件列表
            pd_list = []

            # 遍历文件名
            for n in names:
                # 读取文件并添加进列表
                pd_list.append(pd.read_csv(base_url + n, sep=',', encoding='utf-8'))

            # 赋值第一个文件
            temp = pd_list[0]

            # 遍历剩下的文件
            for i in range(len(data.keys()) - 1):
                # 连接表
                temp = pd.merge(temp, pd_list[i + 1], how='inner', left_on=cols[i], right_on=cols[i + 1], sort=False)

            # 去空值
            temp = temp.fillna(value='None')

            # 时间戳
            t = str(int(time.time()))[:7]

            # 生成文件名
            name = ''.join([chr(i) for i in np.random.randint(97, 123, 8)])

            # 将连接结果保存为新文件
            temp.to_csv('temp_files/{}.{}'.format(name + t, 'csv'), index=False)

            # 将文件名存到session
            session['conn_file'] = name + t + '.csv'

            # 去掉第一列
            heads = list(map(lambda x: x.replace("Unnamed: ", ""), list(temp)))

        else:
            # 文件列表
            pd_list = []

            # 遍历文件名
            for n in names:
                print(n)
                # 读取文件并添加进列表
                pd_list.append(pd.read_csv(base_url + n, sep=',', encoding='utf-8'))

            # 赋值第一个文件
            temp = pd_list[0]

            # 遍历剩下的文件
            for i in range(len(data.keys()) - 1):

                # 连接表
                temp = pd.concat([temp, pd_list[i + 1]], axis=0)

            # 去空值
            temp = temp.fillna(value='None')

            # 时间戳
            t = str(int(time.time()))[:7]

            # 生成文件名
            name = ''.join([chr(i) for i in np.random.randint(97, 123, 8)])

            # 将连接结果保存为新文件
            temp.to_csv('temp_files/{}.{}'.format(name + t, 'csv'), index=False)

            # 将文件名存到session
            session['conn_file'] = name + t + '.csv'

            heads = temp.columns.tolist()

    # 捕获异常
    except Exception as e:
        print(e)
        # 记录错误信息
        logging.info(e)

        # 返回失败状态
        return jsonify({'status': 'fail'})

    # 没有异常
    else:

        # 返回成功状态 返回数据以及字段
        return jsonify({'status': 'success', 'filename': 'temp_files/{}.{}'.format(name + t, 'csv')})


@table.route('/get_data', methods=['POST'])
def get_data():
    '''
    获取表内容
    :return:
    '''
    try:
        parameter = json.loads(request.data.decode())
        is_conn = parameter['is_conn']
        file_index = parameter['index']
        page_index = parameter['page_index']
        print(page_index)
        if is_conn is False:

            # 判断该下标的文件是否存在
            file_name = session.get('file_name')
            base_url = 'temp_files/'
            if len(file_name) > int(file_index):
                name = file_name[int(file_index)]

                # 获取行数
                buffer = 1024 * 1024
                with open(base_url + name, encoding='utf-8') as f:
                    buf_gen = takewhile(lambda x: x, (f.read(buffer) for _ in repeat(None)))
                    lines_num = sum(buf.count('\n') for buf in buf_gen)

                if page_index == 1:
                    temp_data = pd.read_csv(base_url + name, sep=',', encoding='utf-8', skiprows=0, nrows=101)
                    page_index = 1

                elif page_index * 100 > lines_num:
                    temp_data = pd.read_csv(base_url + name, sep=',', encoding='utf-8', skiprows=(page_index - 1) * 100 + 1)
                else:
                    temp_data = pd.read_csv(base_url + name, sep=',', encoding='utf-8', skiprows=(page_index - 1) * 100 + 1, nrows=100)
                head = pd.read_csv(base_url + name, sep=',',  skiprows=0, nrows=1)
                heads = head.columns.tolist()
                data = temp_data.fillna(value='None')
            else:
                return jsonify({'status': 'fail'})
        else:
            # 获取行数
            buffer = 1024 * 1024
            with open(file_index, encoding='utf-8') as f:
                buf_gen = takewhile(lambda x: x, (f.read(buffer) for _ in repeat(None)))
                lines_num = sum(buf.count('\n') for buf in buf_gen)
            # 判断该下标的文件是否存在
            if page_index == 1:
                temp_data = pd.read_csv(file_index, sep=',', encoding='utf-8', skiprows=0, nrows=101)
                page_index = 1

            elif page_index * 100 > lines_num:
                temp_data = pd.read_csv(file_index, sep=',',  encoding='utf-8', skiprows=(page_index - 1) * 100 + 1)
            else:
                temp_data = pd.read_csv(file_index, sep=',',  encoding='utf-8', skiprows=(page_index - 1) * 100 + 1, nrows=100)
            data = temp_data.fillna(value='None')
            head = pd.read_csv(file_index, sep=',', skiprows=0, nrows=1)
            heads = head.columns.tolist()
    except Exception as e:
        logging.info(e)

        print(e)
        return jsonify({'status': 'fail'})
    else:
        return jsonify({'status': 'success', 'data': data.values.tolist(), 'heads': heads, 'lines_num': (lines_num-1),
                        'page_index': page_index})


@table.route('/save_conn')
def save_conn():
    '''
    保存表连接结果
    :return:
    '''
    try:
        conn_file = session.get('conn_file')
        if conn_file is not None:
            session['file_name'] = session.get('file_name') + [conn_file]
            session['conn_file'] = None
        else:
            return jsonify({'status': 'fail'})
    except Exception as e:
        logging.info(e)
    else:
        return jsonify({'status': 'success'})


@table.route('/save_result', methods=['POST'])
def save_result():
    '''
    保存结果文件
    :return:
    '''
    try:
        data = json.loads(request.data.decode())
        save_file_index = data['file_index']
        session['save_file_name'] = data['file_name']
        file_name = session.get('file_name')
        session['save_file_list'] = []
        for index in save_file_index:
            session['save_file_list'] = session.get('save_file_list') + [file_name[int(index)]]
    except Exception as e:
        logging.info(e)
        return jsonify({'status': 'fail'})
    else:
        return jsonify({'status': 'success'})


@table.route('/create_chart')
def create_chart():
    '''
    返回创建echarts图页面
    :return:
    '''
    save_file_list = session.get('save_file_list')
    if save_file_list is not None and session.get('isLogin') == 1:
        try:
            # 初始化仓库
            global dataStore
            dataStore.option = []

            save_file_list = session.get('save_file_list')
            save_file_name = session.get('save_file_name')
            table_dict = {}
            cols = []
            for name in save_file_list:
                with open(file='temp_files/' + name, mode='r', encoding='utf-8') as file:
                    reader = csv.reader(file)
                    temp = [i for i in list(next(reader)) if i != '']
                    cols.append(temp)
                table_dict['i'] = cols
            session['table_dict'] = table_dict
        except Exception as e:
            logging.info(e)
            return redirect('/')
        else:
            return render_template('create_chart.html', header=save_file_name, cols=cols, file_len=len(save_file_name))
    else:
        return redirect('/')


@table.route('/get_col_data/<int:data_index>', methods=['POST'])
def get_col_data(data_index):
    '''
    获取字段数据
    :return:
    '''
    try:
        data = json.loads(request.data.decode())
        file_index = data['file_index']
        col = data['col']
        file_name = session.get('save_file_list')
        name = 'temp_files/' + file_name[int(file_index)]
        if data_index == 0:
            temp_data = pd.read_csv(name, sep=',', encoding='utf-8', skiprows=data_index, nrows=100)
            data = temp_data[col]
        else:
            temp_data = pd.read_csv(name, sep=',', encoding='utf-8', header=None, skiprows=data_index, nrows=20)
            data = temp_data[int(col)]
    except Exception as e:
        logging.info(e)
        print(e)
        return jsonify({'status': 'fail'})
    else:
        return jsonify({'status': 'success', 'data': data.values.tolist(), 'col_name': file_index + col})


@table.route('/get_x_y', methods=['POST'])
def get_x_y():
    '''
    获取x轴和y轴数据
    :return:
    '''
    try:
        data = json.loads(request.data.decode())
        file_name = session.get('save_file_list')
        chart_type = data['chart_type']
        data_index_list = data['data_index_list']
        if chart_type == "bar" or chart_type == "line":
            x = data['x']
            ys = data['ys']
            title = data['title']
            subtitle = data['subtitle']
            max_ = data['max']
            min_ = data['min']
            avg_ = data['avg']
            is_stack = data['is_stack']
            x_label = data['x_label']
            y_label = data['y_label']
            y_names = str(ys).replace(' ', '').split("yy_rm")[:-1]
            y_data = []
            for name in y_names:
                y_index = int(str(name)[:1])
                y_file = 'temp_files/' + file_name[y_index]
                y_content = pd.read_csv(y_file, sep=',', encoding='utf-8')
                if len(data_index_list) > 0:
                    y_data.append(y_content[name[1:]].iloc[data_index_list].values.tolist())
                else:
                    y_data.append(y_content[name[1:]].values.tolist())
            x_name = str(x)[1:]
            x_index = int(str(x)[:1])
            x_file = 'temp_files/' + file_name[x_index]
            x_content = pd.read_csv(x_file, sep=',', encoding='utf-8')
            if len(data_index_list) > 0:
                x_data = x_content[x_name].iloc[data_index_list].values.tolist()
            else:
                x_data = x_content[x_name].values.tolist()
            series = "["
            for index in range(len(y_names)):
                temp = {"name": y_names[index][1:], "type": chart_type, "data": y_data[index]}
                if max_ and min_ is False:
                    temp["markPoint"] = {"data": [{"type": "max", "name": "最大值"}]}
                elif min_ and max_ is False:
                    temp["markPoint"] = {"data": [{"type": "min", "name": "最小值"}]}
                elif max_ and min_:
                    temp["markPoint"] = {"data": [{"type": "max", "name": "最大值"}, {"type": "min", "name": "最小值"}]}
                if avg_:
                    temp["markLine"] = {"data": [{"type": "average", "name": "平均值"}]}
                if is_stack == "堆叠":
                    temp["stack"] = "total"
                if len(y_names) - 1 == index:
                    series += str(temp).replace("'", '"')
                else:
                    series += str(temp).replace("'", '"') + ','
            series += "]"
            return jsonify({'status': 'success', 'x_data': x_data, 'y_data': y_data, 'title': title,
                            'x_label': x_label, 'y_label': y_label, 'subtitle': subtitle, 'series': series})
        elif chart_type == "radar":
            x = data['x']
            ys = data['ys']
            title = data['title']
            subtitle = data['subtitle']
            y_names = str(ys).replace(' ', '').split("yy_rm")[:-1]
            y_data = []
            for name in y_names:
                y_index = int(str(name)[:1])
                y_file = 'temp_files/' + file_name[y_index]
                y_content = pd.read_csv(y_file, sep=',', encoding='utf-8')
                if len(data_index_list) > 0:
                    y_data.append(y_content[name[1:]].iloc[data_index_list].values.tolist())
                else:
                    y_data.append(y_content[name[1:]].values.tolist())
            x_name = str(x)[1:]
            x_index = int(str(x)[:1])
            x_file = 'temp_files/' + file_name[x_index]
            x_content = pd.read_csv(x_file, sep=',', encoding='utf-8')
            if len(data_index_list) > 0:
                x_data = x_content[x_name].iloc[data_index_list].values.tolist()
            else:
                x_data = x_content[x_name].values.tolist()
            radar_data = []

            indicator = []
            # 遍历行
            for i in range(len(x_data)):
                temp = []
                for item in y_data:
                    temp.append(item[i])
                radar_data.append({"name": str(x_data[i]), "value": temp})
            radar_data = str(radar_data).replace("'", '"')
            for item in y_names:
                indicator.append({"name": item[1:]})
            indicator = str(indicator).replace("'", '"')
            return jsonify({'status': 'success', 'indicator': indicator, 'title': title,
                            'subtitle': subtitle, 'radar_data': radar_data})
        elif chart_type == "pie_line":
            x = data['x']
            ys = data['ys']
            title = data['title']
            subtitle = data['subtitle']
            y_names = str(ys).replace(' ', '').split("yy_rm")[:-1]
            y_data = []
            for name in y_names:
                y_index = int(str(name)[:1])
                y_file = 'temp_files/' + file_name[y_index]
                y_content = pd.read_csv(y_file, sep=',', encoding='utf-8')
                if len(data_index_list) > 0:
                    y_data.append([name[1:]]+y_content[name[1:]].iloc[data_index_list].values.tolist())
                else:
                    y_data.append([name[1:]]+y_content[name[1:]].values.tolist())
            x_name = str(x)[1:]
            x_index = int(str(x)[:1])
            x_file = 'temp_files/' + file_name[x_index]
            x_content = pd.read_csv(x_file, sep=',', encoding='utf-8')
            if len(data_index_list) > 0:
                x_data = x_content[x_name].iloc[data_index_list].values.tolist()
            else:
                x_data = x_content[x_name].values.tolist()
            source = [[x_name]+x_data] + y_data
            source = str(source).replace("'", '"')
            print(source)
            return jsonify({'status': 'success', 'title': title, 'line_count': (len(source) - 1),
                            'subtitle': subtitle, 'source': source})
        elif chart_type == "map":
            x = data['x']
            ys = data['ys']
            title = data['title']
            subtitle = data['subtitle']
            y_name = str(ys).replace(' ', '').split("yy_rm")[:-1][0]
            y_index = int(str(y_name)[:1])
            y_file = 'temp_files/' + file_name[y_index]
            y_content = pd.read_csv(y_file, sep=',', encoding='utf-8')
            if len(data_index_list) > 0:
                y_data = y_content[y_name[1:]].iloc[data_index_list].values.tolist()
            else:
                y_data = y_content[y_name[1:]].values.tolist()
            x_name = str(x)[1:]
            x_index = int(str(x)[:1])
            x_file = 'temp_files/' + file_name[x_index]
            x_content = pd.read_csv(x_file, sep=',', encoding='utf-8')
            if len(data_index_list) > 0:
                x_data = x_content[x_name].iloc[data_index_list].values.tolist()
            else:
                x_data = x_content[x_name].values.tolist()
            min_ = min(y_data)
            max_ = max(y_data)
            map_data = []
            # 遍历行
            for i in range(len(x_data)):
                map_data.append({'name': x_data[i], 'value': y_data[i]})
            map_data = str(map_data).replace("'", '"')
            return jsonify({'status': 'success', 'title': title, 'min_': min_, 'max_': max_,
                            'subtitle': subtitle, 'map_data': map_data})
        elif chart_type == "scatter":
            x = data['x']
            y = data['y']
            title = data['title']
            subtitle = data['subtitle']
            x_label = data['x_label']
            y_label = data['y_label']
            curr_option = data['curr_option']
            y_index = int(str(y)[:1])
            y_file = 'temp_files/' + file_name[y_index]
            y_content = pd.read_csv(y_file, sep=',', encoding='utf-8')
            if len(data_index_list) > 0:
                y_data = y_content[y[1:-7]].iloc[data_index_list].values.tolist()
            else:
                y_data = y_content[y[1:-7]].values.tolist()
            x_name = str(x)[1:]
            x_index = int(str(x)[:1])
            x_file = 'temp_files/' + file_name[x_index]
            x_content = pd.read_csv(x_file, sep=',', encoding='utf-8')
            if len(data_index_list) > 0:
                x_data = x_content[x_name].iloc[data_index_list].values.tolist()
            else:
                x_data = x_content[x_name].values.tolist()

            x_y = list(map(lambda xx: list(xx), zip(y_data, x_data)))

            return jsonify({'status': 'success', 'scatter_data': x_y, 'title': title, 'curr_option': curr_option,
                            'x_label': x_label, 'y_label': y_label, 'subtitle': subtitle})
        elif chart_type == "stack":
            x = data['x']
            ys = data['ys']
            title = data['title']
            subtitle = data['subtitle']
            max_ = data['max']
            min_ = data['min']
            avg_ = data['avg']
            x_label = data['x_label']
            y_label = data['y_label']
            y_names = str(ys).replace(' ', '').split("yy_rm")[:-1]
            y_data = []
            for name in y_names:
                y_index = int(str(name)[:1])
                y_file = 'temp_files/' + file_name[y_index]
                y_content = pd.read_csv(y_file, sep=',', encoding='utf-8')

                if len(data_index_list) > 0:
                    y_data.append(y_content[name[1:]].iloc[data_index_list].values.tolist())
                else:
                    y_data.append(y_content[name[1:]].values.tolist())
            x_name = str(x)[1:]
            x_index = int(str(x)[:1])
            x_file = 'temp_files/' + file_name[x_index]
            x_content = pd.read_csv(x_file, sep=',', encoding='utf-8')
            if len(data_index_list) > 0:
                x_data = x_content[x_name].iloc[data_index_list].values.tolist()
            else:
                x_data = x_content[x_name].values.tolist()
            series = "["
            for index in range(len(y_names)):
                temp = {"name": y_names[index][1:], "type": "line", "data": y_data[index]}
                if chart_type == 'scatter':
                    temp['areaStyle'] = {}
                if max_ and min_ is False:
                    temp["markPoint"] = {"data": [{"type": "max", "name": "最大值"}]}
                elif min_ and max_ is False:
                    temp["markPoint"] = {"data": [{"type": "min", "name": "最小值"}]}
                elif max_ and min_:
                    temp["markPoint"] = {"data": [{"type": "max", "name": "最大值"}, {"type": "min", "name": "最小值"}]}
                if avg_:
                    temp["markLine"] = {"data": [{"type": "average", "name": "平均值"}]}
                temp["stack"] = "total"
                if len(y_names) - 1 == index:
                    series += str(temp).replace("'", '"')[:-1] + ',"areaStyle": {}}'
                else:
                    series += str(temp).replace("'", '"')[:-1] + ',"areaStyle": {}}' + ','
            series += "]"
            return jsonify({'status': 'success', 'x_data': x_data, 'y_data': y_data, 'title': title,
                            'x_label': x_label, 'y_label': y_label, 'subtitle': subtitle, 'series': series})
    except Exception as e:
        logging.info(e)
        print(e)
        return jsonify({'status': 'fail'})


@table.route('/get_k_v', methods=['POST'])
def get_k_v():
    '''
    饼图获取键值对
    :return:
    '''
    try:
        r_data = json.loads(request.data.decode())
        k = r_data['k']
        v = r_data['v']
        data_index_list = r_data['data_index_list']
        title = r_data['title']
        subtitle = r_data['subtitle']
        x_index = int(str(k)[:1])
        y_index = int(str(v)[:1])
        x_name = str(k)[1:]
        y_name = str(v)[1:-7]
        file_name = session.get('save_file_list')
        x_file = 'temp_files/' + file_name[x_index]
        y_file = 'temp_files/' + file_name[y_index]
        x_content = pd.read_csv(x_file, sep=',', encoding='utf-8')
        if len(data_index_list) > 0:
            x_data = x_content[x_name].iloc[data_index_list].values.tolist()
        else:
            x_data = x_content[x_name].values.tolist()
        y_content = pd.read_csv(y_file, sep=',', encoding='utf-8')
        if len(data_index_list) > 0:
            y_data = y_content[y_name].iloc[data_index_list].values.tolist()
        else:
            y_data = y_content[y_name].values.tolist()
        data = []
        for index in range(len(x_data)):
            data.append({'name': x_data[index], 'value': y_data[index]})
        data.sort(key=lambda x: float(x.get("value")))
    except Exception as e:
        logging.info(e)
        print(e)
        return jsonify({'status': 'fail'})
    else:
        return jsonify({'status': 'success', 'title': title, 'subtitle': subtitle, 'data': data})


@table.route('/get_w_c', methods=['POST'])
def get_w_c():
    '''
    词云图获取键值对
    :return:
    '''
    try:
        wc_data = json.loads(request.data.decode())
        ys = wc_data['ys']
        title = wc_data['title']
        subtitle = wc_data['subtitle']
        shape = wc_data['shape']
        data_index_list = wc_data['data_index_list']
        y_names = str(ys).replace(' ', '').split("yy_rm")[:-1]
        y_data = []
        file_name = session.get('save_file_list')

        for name in y_names:
            y_index = int(str(name)[:1])
            y_file = 'temp_files/' + file_name[y_index]
            y_content = pd.read_csv(y_file, sep=',', encoding='utf-8')

            if len(data_index_list) > 0:
                y_data.append(y_content[name[1:]].iloc[data_index_list].values.tolist())
            else:
                y_data.append(y_content[name[1:]].values.tolist())
        wc = {}
        for y_list in y_data:
            for item in y_list:
                if item not in wc.keys():
                    wc[item] = 1
                else:
                    wc[item] = wc.get(item) + 1

        data = []
        for key in wc.keys():
            data.append({'name': key, 'value': wc.get(key)})
        data = str(data).replace("'", '"')
    except Exception as e:
        logging.info(e)
        print(e)
        return jsonify({'status': 'fail'})
    else:
        print(data)
        return jsonify({'status': 'success', 'title': title, 'subtitle': subtitle, 'shape': shape, 'data': data})


@table.route('/save_chart', methods=['POST'])
def save_chart():
    '''
    保存视图
    :return:
    '''
    try:
        data = json.loads(request.data.decode())
        save_option = data['save_option']
        dataStore.option = save_option
    except Exception as e:
        logging.info(e)
        return jsonify({'status': 'fail'})
    else:
        return jsonify({'status': 'success'})

@table.route('/del_chart/<int:index>')
def del_chart(index):
    '''
    删除视图
    :return:
    '''
    try:
        del dataStore.option[index]
    except Exception as e:
        logging.info(e)
        return jsonify({'status': 'fail'})
    else:
        return jsonify({'status': 'success'})

@table.route('/get_col_split/<string:parameter>')
def get_col_split(parameter):
    if session.get('isLogin') == 1:
        """
        返回列分割页面
        :param parameter: 文件下标+字段
        :return:
        """
        temp = str(parameter).split("+")
        index = temp[0]
        column = temp[1][1:]
        return render_template("col_split.html", index=index, column=column)
    else:
        return render_template("login.html", status='false')


@table.route('/get_data_filter/<string:parameter>')
def get_data_filter(parameter):
    if session.get('isLogin') == 1:
        """
        返回数据过滤页面
        :param parameter:
        :return:
        """
        temp = str(parameter).split("+")
        index = temp[0]
        column = temp[1][1:]
        return render_template("data_filter.html", index=index, column=column)
    else:
        return render_template("login.html", status='false')


@table.route('/rollback')
def rollback():
    """
    回滚操作
    :return:
    """
    try:
        # 获取回滚文件
        rollback_file = session.get('rollback_file')

        # 获取临时文件
        temp_file = session.get('temp_file')

        # 表名 生成8个随机的小写英文字母作为
        t = str(int(time.time()))[:7]
        name = ''.join([chr(i) for i in np.random.randint(97, 123, 8)])

        # 临时文件更名临时文件名
        os.rename(temp_file, 'temp_files/{}.csv'.format(name + t))

        # 回滚文件更改为临时文件名
        os.rename(rollback_file, temp_file)

        # 临时文件更名为回滚文件名
        os.rename('temp_files/{}.csv'.format(name + t), rollback_file)

    except Exception as e:
        logging.info(e)
        print(e)
        return jsonify({'status': 'fail'})
    else:
        return jsonify({'status': 'success'})


@table.route('/check_filter')
def check_filter():
    """
    检测是否成功过滤
    :return:
    """
    # 获取回滚文件
    is_filter = session.get('is_filter')

    # 过滤的文件名
    filter_name = session.get('filter_name')

    if is_filter == 'true':
        return jsonify({'status': 'success', 'filter_name': filter_name})
    else:
        return jsonify({'status': 'fail'})

@table.route('/dashboard')
def dashboard():
    """
    仪表板操作
    :return: 仪表板操作页面和已经保存的视图
    """

    # 获取所有的视图数据
    options = dataStore.option
    print(options)
    # 如果视图数据不为空
    if len(options) > 0 and session.get('isLogin') == 1:

        # 转字符串
        data = str(options)

        # 替换成符合json格式的字符串
        data = data.replace("True", 'true')
        data = data.replace("False", 'false')

        # 返回仪表板和保存的视图
        return render_template("dashboard.html", options=data)

    # 重定向到首页
    else:
        return redirect("/create_chart")


@table.route('/show_chart/<int:file_index>')
def show_chart(file_index):
    if session.get('isLogin') == 1:
        try:
            # 获取文件名
            file_name = 'temp_files/' + session.get('file_name')[file_index]

            data = pd.read_csv(file_name, encoding='utf-8')

            data['day_id'] = pd.to_datetime(data['day_id'])

            # 出发城市
            dc_group = data[['dpt_cty_cd', 'pax_qty']].groupby(by='dpt_cty_cd')
            # 到达城市
            ac_group = data[['arrv_cty_cd', 'pax_qty']].groupby(by='arrv_cty_cd')
            # 出发城市
            dc_top = dc_group.sum().reset_index()
            # 到达城市
            ac_top = ac_group.sum().reset_index()

            # 数据合并
            data1 = pd.merge(dc_top, ac_top, how='inner', left_on='dpt_cty_cd', right_on='arrv_cty_cd')
            data1 = data1.drop('arrv_cty_cd', axis=1)
            data1['pax_qty_sum'] = data1['pax_qty_x'] + data1["pax_qty_y"]
            data1 = data1.sort_values(by=['pax_qty_sum'], ascending=False)
            data1 = data1.drop('pax_qty_sum', axis=1)
            source1 = data1.head(20).values.tolist()

            # 出发机场
            da_group = data[['dpt_airpt_cd', 'pax_qty']].groupby(by='dpt_airpt_cd')
            # 到达机场
            aa_group = data[['arrv_airpt_cd', 'pax_qty']].groupby(by='arrv_airpt_cd')
            # 出发机场
            da_top = da_group.sum().reset_index()
            # 到达机场
            aa_top = aa_group.sum().reset_index()

            # 数据合并
            data2 = pd.merge(da_top, aa_top, how='inner', left_on='dpt_airpt_cd', right_on='arrv_airpt_cd')
            data2 = data2.drop('arrv_airpt_cd', axis=1)
            data2['pax_qty_sum'] = data2['pax_qty_x'] + data2["pax_qty_y"]
            data2 = data2.sort_values(by=['pax_qty_sum'], ascending=False)
            data2 = data2.drop('pax_qty_sum', axis=1)
            source2 = data2.head(20).values.tolist()

            day_group1 = data[["day_id", "pax_qty", "fc_pax_qty", "grp_pax_qty", "ffp_pax_qty"]].groupby(by='day_id')
            source3 = day_group1.sum().sort_values(by='day_id', ascending=True).reset_index()
            source3['day_id'] = source3['day_id'].astype(str)
            source3 = source3.values.tolist()

            day_group2 = data[["flt_nbr", "pax_qty", "fc_pax_qty", "grp_pax_qty", "ffp_pax_qty"]].groupby(by='flt_nbr')
            source4 = day_group2.sum().reset_index()
            source4 = source4.values.tolist()

            day_group3 = data[["pax_qty", "net_amt"]].groupby(by='pax_qty')
            source5 = day_group3.sum().reset_index().values.tolist()
            source5 = list(map(lambda x: [x[0], round(float(x[1]), 2)], source5))

            day_group4 = data[["flt_rte_cd", "pax_qty"]].groupby(by='flt_rte_cd')
            source6 = day_group4.sum().reset_index()
            source6 = source6.values.tolist()
        except Exception as e:
            logging.info(e)
            return render_template('error.html')
        else:

            return render_template('index.html', source1=source1, source2=source2, source3=source3, source4=source4,
                                   source5=source5, source6=source6)
    else:
        return render_template("login.html", status='false')
