import csv
import datetime
from io import StringIO

from flask import Response
from flask import request, jsonify
from sqlalchemy import text

from tzq.extensions import db
from tzq.sql_file_analysis_tool import sfat_bp
from tzq.sql_file_analysis_tool.sfat_model import SqlFileAnalysis


# 首页
# 上传文件
@sfat_bp.route('/api/uploadedFiles')
def get_uploaded_files():
    page = int(request.args.get('page', 1))
    limit = int(request.args.get('limit', 10))
    offset = (page - 1) * limit

    # 获取搜索参数
    # comp_task_name = request.args.get('task_name', '')
    # connect_name = request.args.get('connect_name', '')
    # 分页SQL
    v_record_sql = text(
        """
        select id
             , filename
             , content
             , to_char(upload_time, 'yyyy-mm-dd hh24:mi:ss') as upload_time
             , file_size
        from cs_uploaded_file_t
        where 1 = 1
        order by upload_time desc, id desc limit :limit
        OFFSET :offset
        """)
    # 统计总数SQL
    v_count_sql = text(
        """
        select count(*) as sl
        from cs_uploaded_file_t
        where 1 = 1
        """)
    # 拿到分页记录
    results = db.session.execute(v_record_sql, {
        'limit': limit,
        'offset': offset
    }).fetchall()
    records = []
    for item_temp in results:
        item_temp = item_temp._asdict()
        records.append(item_temp)
    # 拿到总量
    v_count_results = db.session.execute(v_count_sql).fetchone()
    v_count_results2 = v_count_results._asdict()
    v_count_num = int(v_count_results2['sl'])
    return jsonify({
        'code': 0,
        'msg': '',
        'count': v_count_num,
        'data': records
    })


# 得到文件分析结果
@sfat_bp.route('/api/getSqlFileAnalysisResults', methods=['GET', 'POST'])
def get_sql_file_analysis_results():
    export = request.args.get('export')
    # 导出
    if export == '1':
        id = request.values.get("id")
        file_name = request.values.get("file_name")
        file_content = request.values.get("file_content")
        sql_num = request.values.get("sql_num")
        sql_type = request.values.get("sql_type")
        sql_detail = request.values.get("sql_detail")
        table_name = request.values.get("table_name")
        index_name = request.values.get("index_name")
        has_last_update_date = request.values.get("has_last_update_date")
        update_has_where_condition = request.values.get("update_has_where_condition")
        delete_has_where_condition = request.values.get("delete_has_where_condition")
        check_chinese_semicolon = request.values.get("check_chinese_semicolon")
        first_5_lines_has_search_path = request.values.get("first_5_lines_has_search_path")

        data = SqlFileAnalysis.query

        if id:
            data = data.filter(SqlFileAnalysis.id.ilike(f'%{id}%'))
        if file_name:
            data = data.filter(SqlFileAnalysis.file_name.ilike(f'%{file_name}%'))
        if file_content:
            data = data.filter(SqlFileAnalysis.file_content.ilike(f'%{file_content}%'))
        if sql_num:
            data = data.filter(SqlFileAnalysis.sql_num.ilike(f'%{sql_num}%'))
        if sql_type:
            data = data.filter(SqlFileAnalysis.sql_type.ilike(f'%{sql_type}%'))
        if sql_detail:
            data = data.filter(SqlFileAnalysis.sql_detail.ilike(f'%{sql_detail}%'))
        if table_name:
            data = data.filter(SqlFileAnalysis.table_name.ilike(f'%{table_name}%'))
        if index_name:
            data = data.filter(SqlFileAnalysis.index_name.ilike(f'%{index_name}%'))
        if has_last_update_date:
            data = data.filter(SqlFileAnalysis.has_last_update_date.ilike(f'%{has_last_update_date}%'))
        if update_has_where_condition:
            data = data.filter(SqlFileAnalysis.update_has_where_condition.ilike(f'%{update_has_where_condition}%'))
        if delete_has_where_condition:
            data = data.filter(SqlFileAnalysis.delete_has_where_condition.ilike(f'%{delete_has_where_condition}%'))
        if check_chinese_semicolon:
            data = data.filter(SqlFileAnalysis.check_chinese_semicolon.ilike(f'%{check_chinese_semicolon}%'))
        if first_5_lines_has_search_path:
            data = data.filter(
                SqlFileAnalysis.first_5_lines_has_search_path.ilike(f'%{first_5_lines_has_search_path}%'))

        # sort_field = request.args.get('field', 'creation_date')
        # sort_order = request.args.get('order', 'desc')
        # if hasattr(SqlFileAnalysis, sort_field):
        #     if sort_order == 'asc':
        #         order_by = getattr(SqlFileAnalysis, sort_field).asc()
        #     else:
        #         order_by = getattr(SqlFileAnalysis, sort_field).desc()
        #     data = data.order_by(order_by)
        # else:
        #     data = data.order_by(SqlFileAnalysis.creation_date.desc())

        data = data.paginate(page=1, per_page=1000000, error_out=False)

        # 创建内存文件对象
        output = StringIO()
        output.write('\ufeff')  # 添加UTF-8 BOM头

        # 创建CSV写入器
        writer = csv.writer(output, quoting=csv.QUOTE_ALL)

        # 写入表头
        headers = [
            '文件ID', '文件名', 'SQL序号', 'SQL类型', '行号', 'SQL行数', 'SQL明细', '表名',
            '索引名', '是否更新last_update_date', 'update有where条件', 'delete有where条件', '无中文分号', '有search_path'
        ]  # 使用中文表头
        writer.writerow(headers)

        # 写入数据行
        for item in data:
            row = [
                item.id,
                item.file_name,
                item.sql_num,
                item.sql_type,
                item.line_numbers,
                item.line_count,
                item.sql_detail,
                item.table_name,
                item.index_name,
                item.has_last_update_date,
                item.update_has_where_condition,
                item.delete_has_where_condition,
                item.check_chinese_semicolon,
                item.first_5_lines_has_search_path
            ]
            writer.writerow(row)

        date_string = datetime.datetime.now().strftime("%Y%m%d-%H%M%S")

        # 创建响应对象
        response = Response(
            output.getvalue(),
            mimetype="text/csv; charset=utf-8",
            headers={
                "Content-disposition": "attachment; filename=" + date_string + "-sql_file_analysis_data.csv",
                "Content-type": "text/csv; charset=utf-8"
            }
        )
        return response
    # 分页查询
    else:
        page = int(request.args.get('page', 1))
        limit = int(request.args.get('limit', 10))
        offset = (page - 1) * limit

        # 获取搜索参数
        id = request.values.get("id")
        file_name = request.values.get("file_name")
        file_content = request.values.get("file_content")
        sql_num = request.values.get("sql_num")
        sql_type = request.values.get("sql_type")
        sql_detail = request.values.get("sql_detail")
        table_name = request.values.get("table_name")
        index_name = request.values.get("index_name")
        has_last_update_date = request.values.get("has_last_update_date")
        update_has_where_condition = request.values.get("update_has_where_condition")
        delete_has_where_condition = request.values.get("delete_has_where_condition")
        check_chinese_semicolon = request.values.get("check_chinese_semicolon")
        first_5_lines_has_search_path = request.values.get("first_5_lines_has_search_path")

        if id:
            id_string = """and id::text ~* :id"""
        else:
            id_string = ""
        if file_name:
            file_name_string = """and file_name::text ~* :file_name"""
        else:
            file_name_string = ""
        if file_content:
            file_content_string = """and file_content::text ~* :file_content"""
        else:
            file_content_string = ""
        if sql_num:
            sql_num_string = """and sql_num::text ~* :sql_num"""
        else:
            sql_num_string = ""
        if sql_type:
            sql_type_string = """and sql_type::text ~* :sql_type"""
        else:
            sql_type_string = ""
        if sql_detail:
            sql_detail_string = """and sql_detail::text ~* :sql_detail"""
        else:
            sql_detail_string = ""
        if table_name:
            table_name_string = """and table_name::text ~* :table_name"""
        else:
            table_name_string = ""
        if index_name:
            index_name_string = """and index_name::text ~* :index_name"""
        else:
            index_name_string = ""
        if has_last_update_date:
            has_last_update_date_string = """and has_last_update_date::text ~* :has_last_update_date"""
        else:
            has_last_update_date_string = ""
        if update_has_where_condition:
            update_has_where_condition_string = """and update_has_where_condition::text ~* :update_has_where_condition"""
        else:
            update_has_where_condition_string = ""
        if delete_has_where_condition:
            delete_has_where_condition_string = """and delete_has_where_condition::text ~* :delete_has_where_condition"""
        else:
            delete_has_where_condition_string = ""
        if check_chinese_semicolon:
            check_chinese_semicolon_string = """and check_chinese_semicolon::text ~* :check_chinese_semicolon"""
        else:
            check_chinese_semicolon_string = ""
        if first_5_lines_has_search_path:
            first_5_lines_has_search_path_string = """and first_5_lines_has_search_path::text ~* :first_5_lines_has_search_path"""
        else:
            first_5_lines_has_search_path_string = ""

        v_record_sql = text(
            """
            select id,
                   file_name,
                   file_content,
                   sql_num,
                   sql_type,
                   line_numbers,
                   line_count,
                   each_sql_content,
                   sql_detail,
                   table_name,
                   index_name,
                   has_last_update_date,
                   update_has_where_condition,
                   delete_has_where_condition,
                   check_chinese_semicolon,
                   first_5_lines_has_search_path
            from sql_file_analysis_tool_t
            where 1 = 1
              """ + id_string + """
              """ + file_name_string + """
              """ + file_content_string + """
              """ + sql_num_string + """
              """ + sql_type_string + """
              """ + sql_detail_string + """
              """ + table_name_string + """
              """ + index_name_string + """
              """ + has_last_update_date_string + """
              """ + update_has_where_condition_string + """
              """ + delete_has_where_condition_string + """
              """ + check_chinese_semicolon_string + """
              """ + first_5_lines_has_search_path_string + """
            limit :limit
            OFFSET :offset
            """)
        # 统计总数SQL
        v_count_sql = text(
            """
            select count(*) as sl
            from sql_file_analysis_tool_t
            where 1 = 1
              """ + id_string + """
              """ + file_name_string + """
              """ + file_content_string + """
              """ + sql_num_string + """
              """ + sql_type_string + """
              """ + sql_detail_string + """
              """ + table_name_string + """
              """ + index_name_string + """
              """ + has_last_update_date_string + """
              """ + update_has_where_condition_string + """
              """ + delete_has_where_condition_string + """
              """ + check_chinese_semicolon_string + """
              """ + first_5_lines_has_search_path_string + """
            """)
        # 拿到分页记录
        results = db.session.execute(v_record_sql, {
            'limit': limit,
            'offset': offset,
            'id': id,
            'file_name': file_name,
            'file_content': file_content,
            'sql_num': sql_num,
            'sql_type': sql_type,
            'sql_detail': sql_detail,
            'table_name': table_name,
            'index_name': index_name,
            'has_last_update_date': has_last_update_date,
            'update_has_where_condition': update_has_where_condition,
            'delete_has_where_condition': delete_has_where_condition,
            'check_chinese_semicolon': check_chinese_semicolon,
            'first_5_lines_has_search_path': first_5_lines_has_search_path,
        }).fetchall()
        records = []
        for item_temp in results:
            item_temp = item_temp._asdict()
            records.append(item_temp)
        # 拿到总量
        v_count_results = db.session.execute(v_count_sql, {
            'id': id,
            'file_name': file_name,
            'file_content': file_content,
            'sql_num': sql_num,
            'sql_type': sql_type,
            'sql_detail': sql_detail,
            'table_name': table_name,
            'index_name': index_name,
            'has_last_update_date': has_last_update_date,
            'update_has_where_condition': update_has_where_condition,
            'delete_has_where_condition': delete_has_where_condition,
            'check_chinese_semicolon': check_chinese_semicolon,
            'first_5_lines_has_search_path': first_5_lines_has_search_path,
        }).fetchone()
        v_count_results2 = v_count_results._asdict()
        v_count_num = int(v_count_results2['sl'])
        return jsonify({
            'code': 0,
            'msg': '',
            'count': v_count_num,
            'data': records
        })
