import json
import os
from datetime import datetime
from flask import Flask, request, jsonify
from flask_cors import CORS
from transformers import AutoTokenizer
import threading
import logging
import time
import hashlib
from collections import deque

app = Flask(__name__)
# 配置路由 待修改
CORS(app)

# 配置日志 - 增强日志配置
logging.basicConfig(
    level=logging.DEBUG,  # 设置为DEBUG级别以获取更详细输出
    format='%(asctime)s - %(name)s - %(levelname)s - %(message)s',
    datefmt='%Y-%m-%d %H:%M:%S'
)
logger = logging.getLogger(__name__)

# 全局变量
tokenizer = None
tokenizer_lock = threading.Lock()
tokenizer_name = "Qwen/Qwen2.5-7B-Instruct"


# ===== 配置区 =====
class Config:
    # 阈值的设定要预留出摘要空间
    # 当新消息到来：
    # 1. 计算当前总 token 数
    # 2. 如果 < 16,000 → 正常追加
    # 3. 如果 16,000 ~ 24,000 → 可选：生成轻量摘要
    # 4. 如果 > 24,000 → 必须：将早期对话压缩为摘要

    # Qwen-Max	32,768 tokens	✅ 主力模型，长上下文支持好
    # Qwen-Plus	32,768 tokens	✅ 平衡型，支持长上下文 


    # 这个之后考虑
    # < 16,000	✅ 正常状态，绿色指示灯
    # 16,000 ~ 24,000	⚠️ 黄色预警：“建议开始考虑摘要”
    # 24,000 ~ 28,000	🔴 红色警告：“必须准备摘要，否则可能丢失早期记忆”
    # > 28,000	💥 危险！“立即摘要，否则新消息将覆盖旧内容”

    # 用25000 留7000给摘要逻辑

    TOKEN_THRESHOLD = 25000  # 触发摘要的Token阈值
    SESSION_TIMEOUT = 1800  # 会话过期时间(秒)
    # TOKEN_COST_PER_CHAR = 0.25  # 简单估算: 每个字符消耗的Token数 (中英文混合粗略估算)


# ===== 通用Token计算工具函数 =====
def calculate_messages_tokens(message_contents):
    """
    计算字符串列表中所有内容的Token总数。
    使用与Qwen模型匹配的AutoTokenizer进行精确计算。

    Args:
        message_contents: 字符串列表，包含需要计算Token的文本内容。

    Returns:
        int: 所有文本内容的总Token数
    """
    if not message_contents:
        return 0

    total_tokens = 0

    # 使用AutoTokenizer进行精确计算（与Qwen模型匹配）
    global tokenizer, tokenizer_lock

    try:
        with tokenizer_lock:
            if tokenizer is None:
                tokenizer = AutoTokenizer.from_pretrained(tokenizer_name, trust_remote_code=True)

            for msg in message_contents:
                # 模拟模型输入格式
                text = f"{msg['role']}\n{msg.get('date', '')}\n{msg['content']}"
                tokens = tokenizer.encode(text, add_special_tokens=False)
                total_tokens += len(tokens)
                logger.debug(f"Token: {len(tokens)} | {msg['role']}: {msg['content'][:30]}...")


        logger.info(f"🧮 Token计算完成: {len(message_contents)}条消息, 共{total_tokens} tokens")

    except Exception as e:
        # 降级方案：字符估算（当AutoTokenizer失败时）
        logger.error(f"❌ AutoTokenizer计算失败: {e}")
        # 如果需要降级方案，可以在这里启用字符估算
        # total_chars = sum(len(str(content)) for content in message_contents)
        # total_tokens = int(total_chars * Config.TOKEN_COST_PER_CHAR)
        # logger.warning(f"📊 字符估算Token: 总字符数={total_chars}, 估算Token数={total_tokens}")

    return total_tokens


# ===== 索引摘要的函数工具========
def searchFromEnd(last_summary_content, new_messages):
    # 从最后一个元素开始遍历，i是反转后的索引（0代表原列表最后一个元素）
    for i, element in enumerate(reversed(new_messages)):
        if element.get('content') == last_summary_content:
            # 计算在原始列表中的索引：总长度 - 1 - 反转后的索引
            return len(new_messages) - 1 - i
    return -1  # 未找到


# ===== JSON文件持久化存储 =====
class SessionFileManager:
    """管理会话状态的JSON文件存储"""
    
    def __init__(self, file_path='sessions.json'):
        self.file_path = file_path
        self._ensure_file_exists()
    
    def _ensure_file_exists(self):
        """确保JSON文件存在"""
        if not os.path.exists(self.file_path):
            with open(self.file_path, 'w', encoding='utf-8') as f:
                json.dump({}, f, ensure_ascii=False, indent=2)
            logger.info(f"创建新的会话存储文件: {self.file_path}")
            
    # 待修改   len(data) 这里没看懂具体指什么
    def load_all_sessions(self):
        """从文件加载所有会话状态"""
        try:
            with open(self.file_path, 'r', encoding='utf-8') as f:
                data = json.load(f)
                logger.info(f"从文件加载会话状态，共 {len(data)} 个会话")
                return data
        except (json.JSONDecodeError, FileNotFoundError) as e:
            logger.warning(f"加载会话文件失败，创建空文件: {e}")
            self._ensure_file_exists()
            return {}
    
    def save_session(self, session_id, session_data):
        """保存单个会话状态到文件"""
        try:
            # 先加载所有数据
            all_sessions = self.load_all_sessions()
            # 更新特定会话
            all_sessions[session_id] = session_data
            # 写回文件
            with open(self.file_path, 'w', encoding='utf-8') as f:
                json.dump(all_sessions, f, ensure_ascii=False, indent=2)
            
            logger.debug(f"会话状态已保存: session_id={session_id}")
            return True
        except Exception as e:
            logger.error(f"保存会话状态失败: {e}")
            return False
    
    def delete_session(self, session_id):
        """从文件删除会话"""
        try:
            all_sessions = self.load_all_sessions()
            if session_id in all_sessions:
                del all_sessions[session_id]
                with open(self.file_path, 'w', encoding='utf-8') as f:
                    json.dump(all_sessions, f, ensure_ascii=False, indent=2)
                logger.info(f"会话已删除: session_id={session_id}")
                return True
            return False
        except Exception as e:
            logger.error(f"删除会话失败: {e}")
            return False

# ===== 修改SessionState类，支持序列化 =====
class SessionState:
    """会话状态，支持JSON序列化"""
    
    def __init__(self, session_data=None):
        if session_data is None:
            session_data = {}
        
        # 核心状态字段
        self.expected_count = session_data.get('expected_count', 0) # 期望消息序列
        self.token_count = session_data.get('token_count', 0)       # 总token数
        self.message_count = session_data.get('message_count', 0)   # 总消息数
        self.temp_token_count = session_data.get('temp_token_count', 0)  # 临时token数
        self.last_summary_content = session_data.get('last_summary_content', '')  # 最后一次摘要内容
        
        # 队列和时间信息（需要特殊处理序列化）
        self.pending_queue = deque()
        # 这里重启时 不取值  队列直接清空
        # queue_data = session_data.get('pending_queue', [])
        # for item in queue_data:
        #     self.pending_queue.append(tuple(item))
        self.marked_sequences = set()  # 未处理消息队列
        self.last_updated = session_data.get('last_updated', time.time())
        # self.message_content = session_data.get('message_content', {})
    
    def to_dict(self):
        """转换为可序列化的字典"""
        return {
            'expected_count': self.expected_count,
            'token_count': self.token_count,
            'message_count': self.message_count,
            'temp_token_count': self.temp_token_count,
            'last_summary_content': self.last_summary_content,
            # 这里不再持久化队列
            # 'pending_queue': list(self.pending_queue),  # 将deque转换为list
            'last_updated': self.last_updated,
            # 'message_content': self.message_content
        }
    
    def __str__(self):
        return (f"SessionState(expected_count={self.expected_count}, "
                f"message_count={self.message_count}, "
                f"token_count={self.token_count}, "
                f"temp_token_count={self.temp_token_count})")

# ===== 全局状态存储（基于文件） =====
session_file_manager = SessionFileManager()
sessions = {}  # 内存中的会话缓存

# 待修改  这个加载会话不好  我用到谁就加载谁 不是一下都加载出来 这样太暂内存了
def load_sessions_from_file():
    """启动时从文件加载所有会话"""
    global sessions
    file_data = session_file_manager.load_all_sessions()
    
    for session_id, session_data in file_data.items():
        sessions[session_id] = SessionState(session_data)
        logger.debug(f"加载会话: {session_id} -> {sessions[session_id]}")
    
    logger.info(f"会话加载完成，共 {len(sessions)} 个会话")

def save_session_to_file(session_id, session_state):
    """保存会话状态到文件"""
    session_data = session_state.to_dict()
    return session_file_manager.save_session(session_id, session_data)

# ===== 核心同步逻辑 =====
def sync_session_with_frontend_history(session_id, frontend_message_count, new_messages):
    """
    根据前端数据同步会话状态
    返回: (session_state, needs_recalculation)
    """
    global sessions

    # 提前计算token，避免重复计算[1,3](@ref)
    token_count = calculate_messages_tokens(new_messages)
    
    # 情况1: session_id不存在，创建新会话
    if session_id not in sessions:
        logger.info(f"🆕 创建新会话: session_id={session_id}")
        return _create_new_session(session_id, frontend_message_count, new_messages, token_count)
    
    session_state = sessions[session_id]
    
    # 情况2: 消息数相同，无需同步
    if session_state.message_count == frontend_message_count:
        logger.debug(f"✅ 会话状态同步: 前后端消息数一致")
        return session_state, False
    
    # 情况3: 消息数不同，需要同步
    return _sync_existing_session(session_state, frontend_message_count, new_messages, token_count)


    
def sync_session_with_frontend_add(session_id):
    """
    修复版：使用序列号精确匹配移除消息
    """
    global sessions
    session_state = sessions[session_id]
    current_expected = session_state.expected_count
    processed_count = 0
    needs_recalculation = False

    # 创建队列副本并排序
    sorted_queue = sorted(list(session_state.pending_queue), key=lambda x: x['sequence_number'])
    logger.info(f'📊 消息队列详情：{sorted_queue}')

    # 使用索引遍历
    index = 0
    while index < len(sorted_queue):
        item = sorted_queue[index]
        item_sequence = item['sequence_number']

        if item_sequence == current_expected:
            # ✅ 情况1：序列号等于期望值 - 即时处理并删除
            token_count = calculate_messages_tokens(item['messages'])
            session_state.token_count += token_count
            session_state.temp_token_count += token_count
            session_state.message_count += 1
            current_expected += 1
            processed_count += 1
            
            # ✅ 关键修复：使用序列号精确查找并移除
            removed = False
            for queue_item in list(session_state.pending_queue):  # 创建副本避免修改时迭代
                if queue_item['sequence_number'] == item_sequence:
                    session_state.pending_queue.remove(queue_item)
                    logger.debug(f"✅ 已从原始队列移除消息: 序列号 {item_sequence}")
                    removed = True
                    break
            
            if not removed:
                logger.warning(f"⚠️ 消息已不在原始队列中: 序列号 {item_sequence}")
            
            # 从处理队列中移除
            sorted_queue.pop(index)
            # 索引不增加，因为下一项会自动成为当前索引位置的新项
            
            # 检查Token阈值
            if session_state.temp_token_count >= Config.TOKEN_THRESHOLD:
                session_state.temp_token_count = 0
                needs_recalculation = True
                logger.info("🔔 Token超阈值，要求生成摘要")
                break

        elif item_sequence > current_expected:
            # ✅ 情况2：序列号大于期望值 - 标记不连续
            logger.warning(f"🔄 序列号不连续: 期望 {current_expected}, 收到 {item_sequence}。同步期望值。")
            
            # 记录缺失序列号
            for lost_seq in range(current_expected, item_sequence):
                session_state.marked_sequences.add(lost_seq)
                logger.debug(f"📌 标记可能丢失的序列号: {lost_seq}")
            
            current_expected = item_sequence
            index += 1  # 移动到下一项

        else:  # item_sequence < current_expected
            # ✅ 情况3：序列号小于期望值 - 同步并清理
            logger.info(f"🗑️ 序列号 {item_sequence} 小于当前期望值 {current_expected}，同步期望值并清理标记。")
            current_expected = item_sequence
            
            # 清理旧标记
            old_expected = current_expected - 1
            if old_expected in session_state.marked_sequences:
                session_state.marked_sequences.remove(old_expected)
                logger.debug(f"🧹 已从标记序列中剔除旧期望值: {old_expected}")
            
            index += 1  # 移动到下一项

    # 更新会话状态中的期望值
    session_state.expected_count = current_expected

    logger.info(f"✅ 处理完成。处理了 {processed_count} 条消息。最新期望值: {current_expected}")
    return session_state, needs_recalculation


    



def _create_new_session(session_id, frontend_message_count, new_messages, token_count):
    """创建新会话"""
    global sessions
    
    session_state = SessionState()
    session_state.message_count = frontend_message_count
    session_state.expected_count = frontend_message_count + 1
    session_state.message_content = new_messages[:]  # 需要存储副本
    session_state.token_count = token_count
    
    sessions[session_id] = session_state
    
    # 判断是否需要重新计算
    needs_recalc = token_count > Config.TOKEN_THRESHOLD
    if needs_recalc:
        logger.info(f'后端没有 {session_id} 记录，内容大于阈值')
        session_state.temp_token_count = 0
    else:
        logger.info(f'后端没有{session_id}记录，内容小于阈值')
        session_state.temp_token_count = token_count
        
    return session_state, needs_recalc

def _sync_existing_session(session_state, frontend_message_count, new_messages, token_count):
    """同步现有会话"""
    logger.info(f"🔄 会话消息数不同步，以前端为准: "
               f"后端={session_state.message_count}, 前端={frontend_message_count}")
    
    # 更新会话状态
    session_state.message_count = frontend_message_count
    session_state.expected_count = frontend_message_count + 1
    session_state.token_count = token_count
    session_state.message_content = new_messages[:]  # 需要存储副本
    
    # 检查是否需要重新计算
    if token_count <= Config.TOKEN_THRESHOLD:
        session_state.temp_token_count = token_count
        return session_state, False
        
    # 处理超过阈值的情况
    summary_count = searchFromEnd(session_state.last_summary_content, new_messages)
    
    if summary_count == -1:
        session_state.temp_token_count = 0
        return session_state, True
    
    # 计算从摘要位置开始的token数
    messages_after_summary = new_messages[summary_count:]
    session_state.temp_token_count = calculate_messages_tokens(messages_after_summary)
    
    needs_recalculation = session_state.temp_token_count > Config.TOKEN_THRESHOLD
    if needs_recalculation:
        session_state.temp_token_count = 0
        
    return session_state, needs_recalculation


# ===== 修改API端点 =====
@app.route('/api/report-message', methods=['POST'])
def handle_message_report():
    """核心API: 处理前端上报的消息（增加持久化支持）"""
    try:
        data = request.get_json()
        session_id = data.get('session_id')
        front_current_total = data.get('current_total')
        message_type = data.get('message_type')
        new_messages = data.get('new_messages')

        logger.info(f"📨 收到前端消息: session_id={session_id}, front_current_total={front_current_total}, "
                    + f"type={message_type},message_type={message_type},new_messages={new_messages}")


        # 参数验证 
        ## 参数验证失败
        if not session_id or front_current_total is None or not new_messages:
            return jsonify({"status": "error", "message": "缺少必要参数"}), 400
        

        session_state = None
        needs_recalculation = False




        # # 以下全部为参数验证成功后逻辑
        # #  
        # # 🔄 关键同步逻辑：根据前端数据同步会话状态
        # # 这一步仅仅用来同步消息，不包含处理新消息的逻辑
        if message_type == 'history' :
            session_state, needs_recalculation = sync_session_with_frontend_history(
                session_id, front_current_total, new_messages
            )





        # 安全检查：确保session_state已赋值，目前逻辑只要重置页面就不会运行这一步
        if session_state is None:
            if session_id in sessions:
                session_state = sessions[session_id]
            else:
                logger.error(f"会话不存在: {session_id}")
                return jsonify({"status": "error", "message": "会话不存在"}), 404

        # 更新时间戳
        session_state.last_updated = time.time()

        # 如果消息是 add消息 
        if message_type == 'add':
            
            # 无脑添加到队列
            queue_item = {
            'messages': new_messages,
            'sequence_number': front_current_total,
            'timestamp': time.time()  # 用于超时检查
            }

            session_state.pending_queue.append(queue_item)


            session_state,needs_recalculation = sync_session_with_frontend_add(
                session_id
            )



        # 🔄 保存到文件（每次重要更新后持久化）
        if not save_session_to_file(session_id, session_state):
            logger.error(f"❌ 会话状态保存失败: session_id={session_id}")


        return jsonify({
            "status": "received",  # 必须与前端匹配
            "sessionId": session_id,
            "token_count": session_state.token_count if session_state else 0,
            "temp_token_count":session_state.temp_token_count if session_state else 0,
            "total_uploaded": session_state.message_count if session_state else 0,
            
            # 可选：添加更多有用信息
            "summary_required": needs_recalculation,
            "threshold": Config.TOKEN_THRESHOLD
        }), 200

    except Exception as e:
        logger.error(f"❌ 服务器内部错误: {str(e)}", exc_info=True)
        return jsonify({"status": "error", "message": f"服务器内部错误: {str(e)}"}), 500

# ===== 新增会话管理API =====
@app.route('/api/session/<session_id>', methods=['DELETE'])
def delete_session(session_id):
    """删除会话"""
    try:
        if session_id in sessions:
            del sessions[session_id]
        
        if session_file_manager.delete_session(session_id):
            return jsonify({"status": "success", "message": "会话已删除"}), 200
        else:
            return jsonify({"status": "error", "message": "会话删除失败"}), 500
    except Exception as e:
        return jsonify({"status": "error", "message": str(e)}), 500

@app.route('/api/sessions', methods=['GET'])
def list_sessions():
    """列出所有会话（调试用）"""
    try:
        sessions_data = {}
        for session_id, state in sessions.items():
            sessions_data[session_id] = state.to_dict()
        
        return jsonify({"sessions": sessions_data, "count": len(sessions_data)}), 200
    except Exception as e:
        return jsonify({"status": "error", "message": str(e)}), 500

# ===== 修改定时清理任务 =====
def cleanup_expired_sessions():
    """定期清理过期会话（同时清理文件和内存）"""
    while True:
        time.sleep(300)  # 每5分钟清理一次
        now = time.time()
        expired_sessions = []
        
        for session_id, state in sessions.items():
            if (now - state.last_updated) > Config.SESSION_TIMEOUT:
                expired_sessions.append(session_id)
        
        for session_id in expired_sessions:
            logger.info(f"🧹 清理过期会话: {session_id}")
            del sessions[session_id]
            session_file_manager.delete_session(session_id)
        
        if expired_sessions:
            logger.info(f"已清理 {len(expired_sessions)} 个过期会话")

# ===== 应用启动时加载会话 =====
if __name__ == '__main__':
    # 启动时加载持久化的会话
    load_sessions_from_file()
    
    logger.info("🚀 启动Flask服务器（带持久化支持）...")
    app.run(debug=True, port=5000)