import csv
import io
import chardet
from flask import Blueprint, jsonify, request
from flask_jwt_extended import jwt_required
from libs.conn import *
from libs.utils import *

logs_bp = Blueprint('logs', __name__)

@logs_bp.route('/api/get_logs', methods=['GET'])
@jwt_required()
def get_logs():
    log_db_index = request.args.get('log_db_index')
    client_id = request.args.get('client_id')
    log_time = request.args.get('log_time')
    search = request.args.get('search')
    page_size = int(request.args.get('page_size'))
    page_current = int(request.args.get('page_current'))
    if log_db_index == '0' or log_db_index == None:
        conn, cursor = create_conn()
    else:
        archive_dbs = get_archive_log_dbs()
        if len(archive_dbs) < int(log_db_index):
            return { 'status': 'error', 'data': '无效的日志文件索引' }
        archive_name = archive_dbs[int(log_db_index) - 1]
        conn, cursor = create_conn(f'{log_dir_name}/{archive_name}')
    sql = "FROM logs WHERE 1=1"
    params = []
    if client_id != None:
        sql += " AND client_id = ?"
        params.append(client_id)
    if log_time != None:
        sql += " AND log_time like ?"
        params.append('%' + log_time + '%')
    if search != None:
        sql += " AND log_content like ?"
        params.append('%' + search + '%')
    offset = (page_current - 1) * page_size
    cursor.execute('SELECT * ' + sql + " ORDER BY log_id DESC LIMIT ? OFFSET ?", tuple(params + [page_size, offset]))
    rows = rows_to_dict(cursor, cursor.fetchall())
    cursor.execute('SELECT COUNT(*) ' + sql, tuple(params))
    total_count = cursor.fetchone()[0]
    cursor.close()
    conn.close()
    return { 'status': 'ok', 'data': rows, 'page_total': total_count}

@logs_bp.route('/api/export_logs')
@jwt_required()
def export_logs():
    log_db_index = request.args.get('log_db_index')
    client_id = request.args.get('client_id')
    log_time = request.args.get('log_time')
    search = request.args.get('search')
    if log_db_index == '0' or log_db_index == None:
        conn, cursor = create_conn()
    else:
        archive_dbs = get_archive_dbs()
        if len(archive_dbs) < int(log_db_index):
            return { 'status': 'error', 'data': '无效的日志文件索引' }
        archive_name = archive_dbs[int(log_db_index) - 1]
        conn, cursor = create_conn(f'{log_dir_name}/{archive_name}')
    sql = "FROM logs WHERE 1=1"
    params = []
    if client_id != None:
        sql += " AND client_id = ?"
        params.append(client_id)
    if log_time != None:
        sql += " AND log_time like ?"
        params.append('%' + log_time + '%')
    if search != None:
        sql += " AND log_content like ?"
        params.append('%' + search + '%')
    cursor.execute('SELECT * ' + sql + " LIMIT 50000", tuple(params))
    rows = cursor.fetchall()
    if len(rows) > 0:
        file_name = export_data(cursor, rows)
        cursor.close()
        conn.close()
        system_log('导出日志文件：' + file_name)
        return { 'status': 'ok', 'data': file_name }
    else:
        cursor.close()
        conn.close()
        return { 'status': 'error', 'data': '没有数据' }

# 日志归档
@logs_bp.route('/api/archive_logs', methods=['POST'])
@jwt_required()
def handle_archive_logs():
    limit_time_str = request.json.get('limit_time')
    limit_time = parse_time(limit_time_str, '%Y-%m-%d')
    if limit_time.strftime('%Y-%m-%d') >= datetime.now().strftime('%Y-%m-%d'):
        return { 'status': 'error', 'data': '不能归档时间大于等于今天的日志' }
    real_limit_time = limit_time + timedelta(days=1)
    logs_count = fetchvalue('SELECT COUNT(*) FROM logs WHERE log_time < ?', (real_limit_time.strftime('%Y-%m-%d'),))
    if logs_count > 0:
        archived_logs_count, archive_path = archive_logs(limit_time)
        if archived_logs_count == logs_count:
            # 归档成功，删除原日志
            execute('DELETE FROM logs WHERE log_time < ?', (real_limit_time.strftime('%Y-%m-%d'),))
            system_log('归档日志，归档数量：' + str(archived_logs_count))
            return { 'status': 'ok', 'data': logs_count }
        else:
            # 归档失败，删除归档日志
            remove_db_file(archive_path)
            return { 'status': 'error', 'data': '归档失败，请检查日志文件' }
    else:
        return { 'status': 'error', 'data': f'{limit_time_str}之前没有日志，无需归档' }

# 归档日志列表
@logs_bp.route('/api/get_archive_dbs', methods=['GET'])
@jwt_required()
def get_archive_dbs():
    archive_dbs = get_archive_log_dbs()
    return { 'status': 'ok', 'data': archive_dbs }
