import mysql.connector
from datetime import datetime
from flask import Flask, request, jsonify
from flask_cors import CORS
import time
import random
from datetime import datetime
import json
import requests

app = Flask(__name__)
CORS(app) 
# 数据库连接信息
hostname = '192.168.43.112'  # 数据库主机地址
port = 3306               # MySQL默认端口是3306
database = 'aibi'  # 你的MySQL数据库名
username = 'database'       # MySQL用户名（常用root，但建议使用专用用户）
password = 'Pisces.633' # 数据库密码

# 创建数据库连接函数
def get_db_connection():
    conn = mysql.connector.connect(
        host=hostname,
        port=port,
        user=username,
        password=password,
        database=database
    )
    return conn


# ####################################
#                                    #
#         日志表中涉及到的查询        #
#                                    #   
######################################   

#动态查询日志                                        
@app.route('/query_simulation_logs', methods=['GET'])
def query_simulation_logs():
    try:
        # 获取查询参数
        user_id = request.args.get('user_id')
        clicknews_id = request.args.get('clicknews_id')
        news_category = request.args.get('news_category')
        news_topic = request.args.get('news_topic')
        exposure_time = request.args.get('exposure_time')  # 格式应为YYYY-MM-DD HH:MM:SS

        # 验证必要参数
        if not exposure_time:
            return jsonify({"status": "error", "message": "exposure_time参数是必需的"}), 400

        conn = get_db_connection()
        cursor = conn.cursor(dictionary=True)

        # 构建基础查询
        query = """
            SELECT 
                SIMULATION_LOG_ID, USER_ID, CLICKNEWS_ID, 
                NEWS_CATEGORY, NEWS_TOPIC, DWELLTIME, EXPOSURE_TIME
            FROM SIMULATION_LOG
            WHERE EXPOSURE_TIME > %s
        """
        params = [exposure_time]

        # 添加可选条件
        if user_id:
            query += " AND USER_ID = %s"
            params.append(user_id)
        if clicknews_id:
            query += " AND CLICKNEWS_ID = %s"
            params.append(clicknews_id)
        if news_category:
            query += " AND NEWS_CATEGORY = %s"
            params.append(news_category)
        if news_topic:
            query += " AND NEWS_TOPIC = %s"
            params.append(news_topic)

        # 执行查询
        start_time = time.perf_counter()
        cursor.execute(query, params)
        results = cursor.fetchall()
        query_spend_time = round(time.perf_counter() - start_time, 6)
        if results:  # 只有查询到结果才记录日志
            try:
                log_query = """
                    INSERT INTO QUERY_LOG 
                    (QUERY_TIME, QUERY_SPEND_TIME, QUERY_TYPE, QUERY_SQL)
                    VALUES (%s, %s, %s, %s)
                """
                current_time = datetime.now().strftime('%Y-%m-%d %H:%M:%S')
                cursor.execute(log_query, (
                    current_time,
                    query_spend_time,
                    '动态查询日志',
                    query
                ))
                conn.commit()
            except Exception as log_error:
                conn.rollback()
                app.logger.error(f"记录查询日志失败: {str(log_error)}")

        cursor.close()
        conn.close()

        return jsonify({"status": "success", "data": results})

    except Exception as e:
        return jsonify({"status": "error", "message": str(e)}), 500
    
#查询单个日志 
@app.route('/query_simulation_log', methods=['GET'])
def get_simulation_log():
    try:
        # 获取所有可能的查询参数
        params = {
            'simulation_log_id': request.args.get('simulation_log_id'),
            'user_id': request.args.get('user_id'),
            'clicknews_id': request.args.get('clicknews_id'),
            'news_category': request.args.get('news_category'),
            'news_topic': request.args.get('news_topic'),
            'dwelltime': request.args.get('dwelltime'),
            'exposure_time': request.args.get('exposure_time')
        }

        # 过滤掉空参数
        query_params = {k: v for k, v in params.items() if v is not None}
        if not query_params:
            return jsonify({"error": "至少需要一个查询参数"}), 400

        conn = get_db_connection()
        if not conn:
            return jsonify({"error": "数据库连接失败"}), 500

        cursor = conn.cursor(dictionary=True)

        # 动态构建SQL查询
        sql = "SELECT * FROM SIMULATION_LOG WHERE "
        conditions = []
        values = []
        for key, value in query_params.items():
            conditions.append(f"{key} = %s")
            values.append(value)
        sql += " AND ".join(conditions) 
        start_time = time.perf_counter()
        cursor.execute(sql, values)
        result = cursor.fetchone()
        query_spend_time = round(time.perf_counter() - start_time, 6)
        if result:  # 只有查询到结果才记录日志
            try:
                log_query = """
                    INSERT INTO QUERY_LOG 
                    (QUERY_TIME, QUERY_SPEND_TIME, QUERY_TYPE, QUERY_SQL)
                    VALUES (%s, %s, %s, %s)
                """
                current_time = datetime.now().strftime('%Y-%m-%d %H:%M:%S')
                cursor.execute(log_query, (
                    current_time,
                    query_spend_time,
                    '查询单个日志',
                    sql
                ))
                conn.commit()
            except Exception as log_error:
                conn.rollback()
                app.logger.error(f"记录查询日志失败: {str(log_error)}")

        cursor.close()
        conn.close()

        if result:
            return jsonify({"status": "success", "data": result})
        else:
            return jsonify({"status": "not_found", "message": "未找到匹配记录"}), 404

    except Exception as e:
        return jsonify({"error": str(e)}), 500

#查询单个新闻的每一天停留时间 
@app.route('/get_dwelltime_sum', methods=['GET'])
def get_dwelltime_sum():
    try:
        # 获取查询参数
        clicknews_id = request.args.get('clicknews_id')
        start_time = request.args.get('start_time')
        end_time = request.args.get('end_time')

        # 验证必要参数
        if not all([clicknews_id, start_time, end_time]):
            return jsonify({
                "status": "error",
                "message": "clicknews_id, start_time和end_time参数都是必需的"
            }), 400

        # 验证时间格式
        try:
            datetime.strptime(start_time, '%Y-%m-%d %H:%M:%S')
            datetime.strptime(end_time, '%Y-%m-%d %H:%M:%S')
        except ValueError:
            return jsonify({
                "status": "error",
                "message": "时间格式应为YYYY-MM-DD HH:MM:SS"
            }), 400

        conn = get_db_connection()
        if not conn:
            return jsonify({"status": "error", "message": "数据库连接失败"}), 500

        cursor = conn.cursor()

        # 执行查询：按天分组计算dwelltime总和
        query = """
            SELECT 
                DATE(EXPOSURE_TIME) as date,
                SUM(DWELLTIME) as total_dwelltime
            FROM SIMULATION_LOG
            WHERE CLICKNEWS_ID = %s
              AND EXPOSURE_TIME BETWEEN %s AND %s
            GROUP BY DATE(EXPOSURE_TIME)
            ORDER BY DATE(EXPOSURE_TIME)
        """
        query_start_time = time.perf_counter()
        cursor.execute(query, (clicknews_id, start_time, end_time))
        results = cursor.fetchall()
        query_spend_time = round(time.perf_counter() - query_start_time, 6)
        if results:  # 只有查询到结果才记录日志
            try:
                log_query = """
                    INSERT INTO QUERY_LOG 
                    (QUERY_TIME, QUERY_SPEND_TIME, QUERY_TYPE, QUERY_SQL)
                    VALUES (%s, %s, %s, %s)
                """
                current_time = datetime.now().strftime('%Y-%m-%d %H:%M:%S')
                cursor.execute(log_query, (
                    current_time,
                    query_spend_time,
                    '查询单个新闻的每一天曝光时间',
                    query
                ))
                conn.commit()
            except Exception as log_error:
                conn.rollback()
                app.logger.error(f"记录查询日志失败: {str(log_error)}")

        cursor.close()
        conn.close()

        # 格式化结果
        formatted_results = [
            {
                "date": row[0].strftime('%Y-%m-%d'),
                "total_dwelltime": int(row[1]) if row[1] else 0
            }
            for row in results
        ]

        return jsonify({
            "status": "success",
            "data": {
                "clicknews_id": clicknews_id,
                "start_time": start_time,
                "end_time": end_time,
                "daily_dwelltimes": formatted_results,
                "total_sum": sum(row[1] for row in results if row[1])
            }
        })

    except Exception as e:
        return jsonify({"status": "error", "message": str(e)}), 500

# 查询单个新闻的每一天点击次数
@app.route('/get_news_exposure_count', methods=['GET'])
def get_news_exposure_count():
    try:
        # 获取查询参数
        clicknews_id = request.args.get('clicknews_id')
        start_time = request.args.get('start_time')
        end_time = request.args.get('end_time')
        # 验证必要参数
        if not all([clicknews_id, start_time, end_time]):
            return jsonify({
                "status": "error",
                "message": "clicknews_id, start_time和end_time参数都是必需的"
            }), 400

        # 验证时间格式
        try:
            datetime.strptime(start_time, '%Y-%m-%d %H:%M:%S')
            datetime.strptime(end_time, '%Y-%m-%d %H:%M:%S')
        except ValueError:
            return jsonify({
                "status": "error",
                "message": "时间格式应为YYYY-MM-DD HH:MM:SS"
            }), 400

        conn = get_db_connection()
        if not conn:
            return jsonify({"status": "error", "message": "数据库连接失败"}), 500

        cursor = conn.cursor()

        # 执行查询：按天分组计算曝光次数
        query = """
            SELECT 
                DATE(EXPOSURE_TIME) as date,
                COUNT(*) as exposure_count
            FROM SIMULATION_LOG
            WHERE CLICKNEWS_ID = %s
              AND EXPOSURE_TIME BETWEEN %s AND %s
            GROUP BY DATE(EXPOSURE_TIME)
            ORDER BY DATE(EXPOSURE_TIME)
        """
        query_start_time = time.perf_counter()
        cursor.execute(query, (clicknews_id, start_time, end_time))
        results = cursor.fetchall()
        query_spend_time = round(time.perf_counter() - query_start_time, 6)
        if results:  # 只有查询到结果才记录日志
            try:
                log_query = """
                    INSERT INTO QUERY_LOG 
                    (QUERY_TIME, QUERY_SPEND_TIME, QUERY_TYPE, QUERY_SQL)
                    VALUES (%s, %s, %s, %s)
                """
                current_time = datetime.now().strftime('%Y-%m-%d %H:%M:%S')
                cursor.execute(log_query, (
                    current_time,
                    query_spend_time,
                    '查询单个新闻的每一天点击次数',
                    query
                ))
                conn.commit()
            except Exception as log_error:
                conn.rollback()
                app.logger.error(f"记录查询日志失败: {str(log_error)}")

        cursor.close()
        conn.close()

        # 格式化结果
        formatted_results = [
            {
                "date": row[0].strftime('%Y-%m-%d'),
                "exposure_count": int(row[1]) if row[1] else 0
            }
            for row in results
        ]

        return jsonify({
            "status": "success",
            "data": {
                "clicknews_id": clicknews_id,
                "start_time": start_time,
                "end_time": end_time,
                "daily_exposure_counts": formatted_results,
                "total_count": sum(row[1] for row in results if row[1])
            }
        })

    except Exception as e:
        return jsonify({"status": "error", "message": str(e)}), 500
    
#查询某一类型新闻的每一天点击次数
@app.route('/get_category_exposure_count', methods=['GET'])
def get_category_exposure_count():
    try:
        # 获取查询参数
        news_category = request.args.get('news_category')
        start_time = request.args.get('start_time')
        end_time = request.args.get('end_time')

        # 验证必要参数
        if not all([news_category, start_time, end_time]):
            return jsonify({
                "status": "error",
                "message": "news_category, start_time和end_time参数都是必需的"
            }), 400

        # 验证时间格式
        try:
            datetime.strptime(start_time, '%Y-%m-%d %H:%M:%S')
            datetime.strptime(end_time, '%Y-%m-%d %H:%M:%S')
        except ValueError:
            return jsonify({
                "status": "error",
                "message": "时间格式应为YYYY-MM-DD HH:MM:SS"
            }), 400

        conn = get_db_connection()
        if not conn:
            return jsonify({"status": "error", "message": "数据库连接失败"}), 500

        cursor = conn.cursor()

        # 执行查询：按天分组计算分类曝光次数
        query = """
            SELECT 
                DATE(EXPOSURE_TIME) as date,
                COUNT(*) as exposure_count
            FROM SIMULATION_LOG
            WHERE NEWS_CATEGORY = %s
              AND EXPOSURE_TIME BETWEEN %s AND %s
            GROUP BY DATE(EXPOSURE_TIME)
            ORDER BY DATE(EXPOSURE_TIME)
        """
        query_start_time = time.perf_counter()
        cursor.execute(query, (news_category, start_time, end_time))
        results = cursor.fetchall()
        query_spend_time = round(time.perf_counter() - query_start_time, 6)
        if results:  # 只有查询到结果才记录日志
            try:
                log_query = """
                    INSERT INTO QUERY_LOG 
                    (QUERY_TIME, QUERY_SPEND_TIME, QUERY_TYPE, QUERY_SQL)
                    VALUES (%s, %s, %s, %s)
                """
                current_time = datetime.now().strftime('%Y-%m-%d %H:%M:%S')
                cursor.execute(log_query, (
                    current_time,
                    query_spend_time,
                    '查询某一类型新闻的每一天点击次数',
                    query
                ))
                conn.commit()
            except Exception as log_error:
                conn.rollback()
                app.logger.error(f"记录查询日志失败: {str(log_error)}")

        cursor.close()
        conn.close()

        # 格式化结果
        formatted_results = [
            {
                "date": row[0].strftime('%Y-%m-%d'),
                "exposure_count": int(row[1]) if row[1] else 0
            }
            for row in results
        ]

        return jsonify({
            "status": "success",
            "data": {
                "news_category": news_category,
                "start_time": start_time,
                "end_time": end_time,
                "daily_exposure_counts": formatted_results,
                "total_count": sum(row[1] for row in results if row[1])
            }
        })

    except Exception as e:
        return jsonify({"status": "error", "message": str(e)}), 500

#查询某一用户观看新闻的每一天点击次数  
@app.route('/get_user_exposure_count', methods=['GET'])
def get_user_exposure_count():
    try:
        # 获取查询参数
        user_id = request.args.get('user_id')
        start_time = request.args.get('start_time')
        end_time = request.args.get('end_time')

        # 验证必要参数
        if not all([user_id, start_time, end_time]):
            return jsonify({
                "status": "error",
                "message": "user_id, start_time和end_time参数都是必需的"
            }), 400

        # 验证时间格式
        try:
            datetime.strptime(start_time, '%Y-%m-%d %H:%M:%S')
            datetime.strptime(end_time, '%Y-%m-%d %H:%M:%S')
        except ValueError:
            return jsonify({
                "status": "error",
                "message": "时间格式应为YYYY-MM-DD HH:MM:SS"
            }), 400

        conn = get_db_connection()
        if not conn:
            return jsonify({"status": "error", "message": "数据库连接失败"}), 500

        cursor = conn.cursor()

        # 执行查询：按天分组计算用户曝光次数
        query = """
            SELECT 
                DATE(EXPOSURE_TIME) as date,
                COUNT(*) as exposure_count
            FROM SIMULATION_LOG
            WHERE USER_ID = %s
              AND EXPOSURE_TIME BETWEEN %s AND %s
            GROUP BY DATE(EXPOSURE_TIME)
            ORDER BY DATE(EXPOSURE_TIME)
        """
        query_start_time = time.perf_counter()
        cursor.execute(query, (user_id, start_time, end_time))
        results = cursor.fetchall()
        query_spend_time = round(time.perf_counter() - query_start_time, 6)
        if results:  # 只有查询到结果才记录日志
            try:
                log_query = """
                    INSERT INTO QUERY_LOG 
                    (QUERY_TIME, QUERY_SPEND_TIME, QUERY_TYPE, QUERY_SQL)
                    VALUES (%s, %s, %s, %s)
                """
                current_time = datetime.now().strftime('%Y-%m-%d %H:%M:%S')
                cursor.execute(log_query, (
                    current_time,
                    query_spend_time,
                    '查询某一用户观看新闻的每一天点击次数',
                    query
                ))
                conn.commit()
            except Exception as log_error:
                conn.rollback()
                app.logger.error(f"记录查询日志失败: {str(log_error)}")

        cursor.close()
        conn.close()

        # 格式化结果
        formatted_results = [
            {
                "date": row[0].strftime('%Y-%m-%d'),
                "exposure_count": int(row[1]) if row[1] else 0
            }
            for row in results
        ]

        return jsonify({
            "status": "success",
            "data": {
                "user_id": user_id,
                "start_time": start_time,
                "end_time": end_time,
                "daily_exposure_counts": formatted_results,
                "total_count": sum(row[1] for row in results if row[1])
            }
        })

    except Exception as e:
        return jsonify({"status": "error", "message": str(e)}), 500
    

#统计查询
@app.route('/get_news_stats', methods=['GET'])
def get_news_stats():
    try:
        # 获取查询参数
        params = {
            'user_id': request.args.get('user_id'),
            'news_category1': request.args.get('news_category1'),
            'news_category2': request.args.get('news_category2'),
            'news_category3': request.args.get('news_category3'),
            'start_time': request.args.get('start_time'),
            'end_time': request.args.get('end_time'),
            'min_headline_len': request.args.get('min_headline_len', type=int),
            'max_headline_len': request.args.get('max_headline_len', type=int),
            'min_body_len': request.args.get('min_body_len', type=int),
            'max_body_len': request.args.get('max_body_len', type=int)
        }

        # 验证必要参数
        if not all([params['start_time'], params['end_time']]):
            return jsonify({
                "status": "error",
                "message": "start_time和end_time是必需参数"
            }), 400

        # 验证时间格式
        try:
            datetime.strptime(params['start_time'], '%Y-%m-%d %H:%M:%S')
            datetime.strptime(params['end_time'], '%Y-%m-%d %H:%M:%S')
        except ValueError:
            return jsonify({
                "status": "error",
                "message": "时间格式应为YYYY-MM-DD HH:MM:SS"
            }), 400

        conn = get_db_connection()
        if not conn:
            return jsonify({"status": "error", "message": "数据库连接失败"}), 500

        cursor = conn.cursor(dictionary=True)

        # 构建基础查询（使用明确的字段名）
        base_query = """
            SELECT 
                n.NEWS_ID,
                n.HEADLINE,
                n.NEWS_BODY,
                n.CATEGORY as NEWS_CATEGORY,  # 添加别名保持一致性
                n.TOPIC as NEWS_TOPIC,        # 添加别名保持一致性
                sl.DWELLTIME,
                sl.EXPOSURE_TIME,
                sl.USER_ID
            FROM NEWS n
            JOIN SIMULATION_LOG sl ON n.NEWS_ID = sl.CLICKNEWS_ID
            WHERE sl.EXPOSURE_TIME BETWEEN %s AND %s
        """
        query_params = [params['start_time'], params['end_time']]

        # 添加过滤条件
        conditions = []
        if params['user_id']:
            conditions.append("sl.USER_ID = %s")
            query_params.append(params['user_id'])
        category_conditions = []
        if params['news_category1']:
            category_conditions.append("n.CATEGORY = %s")
            query_params.append(params['news_category1'])
        if params['news_category2']:
            category_conditions.append("n.CATEGORY = %s")
            query_params.append(params['news_category2'])
        if params['news_category3']:
            category_conditions.append("n.CATEGORY = %s")
            query_params.append(params['news_category3'])
            
        if category_conditions:
            conditions.append("(" + " OR ".join(category_conditions) + ")")
            
        if params['min_headline_len'] is not None:
            conditions.append("CHAR_LENGTH(n.HEADLINE) >= %s")
            query_params.append(params['min_headline_len'])
        if params['max_headline_len'] is not None:
            conditions.append("CHAR_LENGTH(n.HEADLINE) <= %s")
            query_params.append(params['max_headline_len'])
        if params['min_body_len'] is not None:
            conditions.append("CHAR_LENGTH(n.NEWS_BODY) >= %s")
            query_params.append(params['min_body_len'])
        if params['max_body_len'] is not None:
            conditions.append("CHAR_LENGTH(n.NEWS_BODY) <= %s")
            query_params.append(params['max_body_len'])

        if conditions:
            base_query += " AND " + " AND ".join(conditions)

       # 查询1: 基本统计
        stats_query = f"""
            SELECT
                COUNT(DISTINCT data.NEWS_ID) as news_count,
                AVG(data.DWELLTIME) as avg_dwelltime,
                COUNT(DISTINCT data.USER_ID) as user_count,
                COUNT(*) as log_count,
                COUNT(DISTINCT data.NEWS_CATEGORY) as category_count  # 新增分类数量统计
            FROM ({base_query}) as data
        """
        query_start_time = time.perf_counter()
        cursor.execute(stats_query, query_params)
        stats = cursor.fetchone() or {
            'news_count': 0,
            'user_count': 0,
            'avg_dwelltime': None,
            'log_count': 0,
            'category_count': 0
        }
        # 查询2: 分类统计（增加平均标题和内容长度）
        category_query = f"""
            SELECT
                data.NEWS_CATEGORY,
                COUNT(DISTINCT data.NEWS_ID) as news_count,
                COUNT(*) as exposure_count,
                AVG(data.DWELLTIME) as avg_dwelltime,
                AVG(CHAR_LENGTH(data.HEADLINE)) as avg_headline_len,  # 平均标题长度
                AVG(CHAR_LENGTH(data.NEWS_BODY)) as avg_body_len      # 平均内容长度
            FROM ({base_query}) as data
            GROUP BY data.NEWS_CATEGORY
        """
        cursor.execute(category_query, query_params)
        category_stats = cursor.fetchall() or []
        query_spend_time = round(time.perf_counter() - query_start_time, 6)
        if category_stats and stats:  # 只有查询到结果才记录日志
            try:
                log_query = """
                    INSERT INTO QUERY_LOG 
                    (QUERY_TIME, QUERY_SPEND_TIME, QUERY_TYPE, QUERY_SQL)
                    VALUES (%s, %s, %s, %s)
                """
                current_time = datetime.now().strftime('%Y-%m-%d %H:%M:%S')
                cursor.execute(log_query, (
                    current_time,
                    query_spend_time,
                    '统计查询',
                    stats_query+"\n"+category_query
                ))
                conn.commit()
            except Exception as log_error:
                conn.rollback()
                app.logger.error(f"记录查询日志失败: {str(log_error)}")

        cursor.close()
        conn.close()

        # 格式化结果
        response = {
            "status": "success",
            "data": {
                "summary": {
                    "total_news": stats['news_count'],
                    "user_count": stats['user_count'],
                    "category_count": stats['category_count'],  # 分类数量
                    "avg_dwelltime": float(stats['avg_dwelltime']) if stats['avg_dwelltime'] else 0,
                    "total_exposures": stats['log_count']
                },
                "by_category": [
                    {
                        "category": item['NEWS_CATEGORY'],
                        "news_count": item['news_count'],
                        "exposure_count": item['exposure_count'],
                        "avg_dwelltime": float(item['avg_dwelltime']) if item['avg_dwelltime'] else 0,
                        "avg_headline_length": float(item['avg_headline_len']) if item['avg_headline_len'] else 0,  # 平均标题长度
                        "avg_body_length": float(item['avg_body_len']) if item['avg_body_len'] else 0               # 平均内容长度
                    }
                    for item in category_stats
                ]
            }
        }

        return jsonify(response)

    except Exception as e:
        return jsonify({"status": "error", "message": str(e)}), 500
# ####################################
#                                    #
#         新闻表中涉及到的查询        #
#                                    #   
######################################   

#查询单个新闻
@app.route('/get_news_by_id', methods=['GET'])
def get_news_by_id():
    try:
        # 获取查询参数
        news_id = request.args.get('news_id')
        
        # 验证必要参数
        if not news_id:
            return jsonify({"status": "error", "message": "news_id参数是必需的"}), 400

        conn = get_db_connection()
        if not conn:
            return jsonify({"status": "error", "message": "数据库连接失败"}), 500

        cursor = conn.cursor(dictionary=True)

        # 执行查询
        query = "SELECT * FROM NEWS WHERE NEWS_ID = %s"
        # 记录开始时间（用于计算查询耗时）
        # 修改为（仅改变计时方式）
        start_time = time.perf_counter()  # 使用更高精度计时器
        cursor.execute(query, (news_id,))
        result = cursor.fetchone()
        query_spend_time = round(time.perf_counter() - start_time, 6)  # 保留6位小数，单位秒

        # 记录查询日志
        if result:  # 只有查询到结果才记录日志
            try:
                log_query = """
                    INSERT INTO QUERY_LOG 
                    (QUERY_TIME, QUERY_SPEND_TIME, QUERY_TYPE, QUERY_SQL)
                    VALUES (%s, %s, %s, %s)
                """
                current_time = datetime.now().strftime('%Y-%m-%d %H:%M:%S')
                cursor.execute(log_query, (
                    current_time,
                    query_spend_time,
                    '单个新闻查询',
                    query
                ))
                conn.commit()
            except Exception as log_error:
                conn.rollback()
                app.logger.error(f"记录查询日志失败: {str(log_error)}")

        cursor.close()
        conn.close()

        if result:
            return jsonify({"status": "success", "data": result})
        else:
            return jsonify({"status": "not_found", "message": "未找到匹配的新闻"}), 404

    except Exception as e:
        return jsonify({"status": "error", "message": str(e)}), 500
    
# ####################################
#                                    #
#         查询日志表中涉及到的查询     #
#                                    #   
######################################  
@app.route('/query_logs', methods=['GET'])
def query_logs():
    try:
        # 获取查询参数
        params = {
            'start_time': request.args.get('start_time'),  # 开始时间 YYYY-MM-DD HH:MM:SS
            'end_time': request.args.get('end_time'),      # 结束时间 YYYY-MM-DD HH:MM:SS
            'query_spend_time': request.args.get('query_spend_time', type=float),  # 查询耗时(秒)
            'query_type': request.args.get('query_type')   # 查询类型
        }

        # 验证必须参数
        if not all([params['start_time'], params['end_time']]):
            return jsonify({
                "status": "error",
                "message": "start_time和end_time必须同时提供"
            }), 400

        # 验证时间格式
        try:
            start_dt = datetime.strptime(params['start_time'], '%Y-%m-%d %H:%M:%S')
            end_dt = datetime.strptime(params['end_time'], '%Y-%m-%d %H:%M:%S')
            if start_dt > end_dt:
                return jsonify({
                    "status": "error",
                    "message": "start_time不能晚于end_time"
                }), 400
        except ValueError:
            return jsonify({
                "status": "error",
                "message": "时间格式应为YYYY-MM-DD HH:MM:SS"
            }), 400

        conn = get_db_connection()
        if not conn:
            return jsonify({"status": "error", "message": "数据库连接失败"}), 500

        cursor = conn.cursor(dictionary=True)

        # 构建基础查询
        base_query = """
            SELECT * FROM QUERY_LOG 
            WHERE QUERY_TIME BETWEEN %s AND %s
        """
        query_params = [params['start_time'], params['end_time']]

        # 添加其他可选条件
        if params['query_spend_time'] is not None:
            base_query += " AND QUERY_SPEND_TIME = %s"
            query_params.append(params['query_spend_time'])

        if params['query_type']:
            base_query += " AND QUERY_TYPE = %s"
            query_params.append(params['query_type'])

        # 添加默认排序
        base_query += " ORDER BY QUERY_TIME DESC"

        # 执行查询
        cursor.execute(base_query, query_params)
        results = cursor.fetchall()

        cursor.close()
        conn.close()

        # 格式化结果
        for result in results:
            if 'QUERY_TIME' in result and isinstance(result['QUERY_TIME'], datetime):
                result['QUERY_TIME'] = result['QUERY_TIME'].strftime('%Y-%m-%d %H:%M:%S')

        return jsonify({
            "status": "success",
            "data": {
                "logs": results,
                "time_range": {
                    "start_time": params['start_time'],
                    "end_time": params['end_time']
                },
                "count": len(results)  # 返回记录总数
            }
        })

    except Exception as e:
        return jsonify({
            "status": "error",
            "message": str(e)
        }), 500

# ####################################
#                                    #
#     查询爆款新闻表中涉及到的查询     #
#                                    #   
######################################  
@app.route('/query_hot_news', methods=['GET'])
def query_hot_news():
    try:
        conn = get_db_connection()
        if not conn:
            return jsonify({"status": "error", "message": "数据库连接失败"}), 500

        cursor = conn.cursor(dictionary=True)

        # 查询热门新闻，按时间降序排列（假设有create_time字段）
        query = """
            SELECT * FROM HOT_NEWS 
        """
        query_start_time = time.perf_counter()
        cursor.execute(query)
        results = cursor.fetchall()
        query_spend_time = round(time.perf_counter() - query_start_time, 6)
        if results:  # 只有查询到结果才记录日志
            try:
                log_query = """
                    INSERT INTO QUERY_LOG 
                    (QUERY_TIME, QUERY_SPEND_TIME, QUERY_TYPE, QUERY_SQL)
                    VALUES (%s, %s, %s, %s)
                """
                current_time = datetime.now().strftime('%Y-%m-%d %H:%M:%S')
                cursor.execute(log_query, (
                    current_time,
                    query_spend_time,
                    '爆款新闻查询',
                    query
                ))
                conn.commit()
            except Exception as log_error:
                conn.rollback()
                app.logger.error(f"记录查询日志失败: {str(log_error)}")

        cursor.close()
        conn.close()

        # 成功返回数据
        return jsonify({
            "status": "success",
            "data": results,
            "count": len(results)
        })

    except Exception as e:
        # 记录错误日志
        app.logger.error(f"查询热门新闻失败: {str(e)}")
        return jsonify({
            "status": "error",
            "message": "获取热门新闻失败",
            "detail": str(e)
        }), 500
    









LOG_FILE = 'D:/桌面/simulation_log.tsv'  # 使用正斜杠避免转义问题
CHUNK_SIZE = 1000  # 每次处理的日志行数
URL = "http://192.168.185.243:44444"
# 使用文件指针位置代替行号
file_position = 0
last_time = None
total = 0
def parse_custom_datetime(time_str):
    """解析 日/月/年 时:分:秒 AM/PM 格式的时间字符串"""
    return datetime.strptime(time_str, '%m/%d/%Y %I:%M:%S %p')


def find_logs_until_time(target_time):
    global file_position

    print(f"\nTarget time: {target_time}")
    print("开始查找日志")
    try:
        target_timestamp = datetime.strptime(target_time,'%Y/%m/%d %H:%M:%S').timestamp()
    except ValueError as e:
        print(f"Invalid target time format: {e}")
        return

    try:
        with open(LOG_FILE, 'r', encoding='utf-8') as f:
            # 移动到上次读取的位置
            f.seek(file_position)
                        
            # 如果是第一次读取，跳过标题行
            if file_position == 0:
                f.readline()
            
            with open(LOG_FILE, 'r', encoding='utf-8') as f:
                # 移动到上次读取的位置
                f.seek(file_position)

                # 如果是第一次读取，跳过标题行
                if file_position == 0:
                    f.readline()
                
                logs_count = 0

                while True:
                    current_position = f.tell()
                    line = f.readline()

                    if not line:
                        break
                    
                    # 手动解析TSV行
                    row = line.strip().split('\t')
                    if len(row) < 6:
                        continue
                    
                    try:
                        exposure_time = parse_custom_datetime(row  [5]).timestamp()
                        if exposure_time <= target_timestamp:
                            logs_count+=1
                            yield{
                                'user_id': row  [0],
                                'clicknews_id': row  [1],
                                'news_category': row  [2],
                                'news_topic': row  [3],
                                'dwelltime': row  [4],
                                'exposure_time': parse_custom_datetime(row  [5]).strftime('%Y/%m/%d %H:%M:%S') 
                            }
                        else:
                            # 回退到这一行的开始位置，因为这一行不应该被包含
                            f.seek(current_position)
                            break
                    except (ValueError, IndexError) as e:
                        print(f"Error parsing row: {e}")
                        continue
                    
                # 更新文件指针位置
                file_position = f.tell()
                print(f'本次输出日志{logs_count}条')
                global total
                total+=logs_count
                print(f'共输出日志{total}条')
    except FileNotFoundError:
        print(f"Error: File not found at {LOG_FILE}")
    except Exception as e:
        print(f"Unexpected error: {e}")


@app.route('/api/logs', methods=['GET'])
def get_logs():
    global file_position, last_time
    
    target_time = request.args.get('time')
    if not target_time:
        return jsonify({'error': 'Time parameter is required'}), 400

    logs = find_logs_until_time(target_time)
    data = []
    n = 0
    for idx,log in enumerate(logs):
        data.append({"body":json.dumps(log)})
        if (idx+1) % 50 == 0:
            n+=1
            headers = {'Content-Type': 'application/json'}
            requests.post(URL, headers=headers, data=json.dumps(data))
            print(f"发送第{n}次请求\n")
            data = []
    if len(data)>0:
        n+=1
        headers = {'Content-Type': 'application/json'}
        requests.post(URL, headers=headers, data=json.dumps(data))
        print(f"发送第{n}次请求\n")
        data = []

    return jsonify({
        'message': 'success'
    })

@app.route('/api/reset', methods=['POST'])
def reset_position():
    global file_position, last_time
    file_position = 0
    last_time = None
    total = 0
    return jsonify({'message': 'Read position reset to beginning'})


if __name__ == '__main__':
    app.run(debug=True)


