import json
import os
import threading
import time
from flask import Flask, request, jsonify
from flask_cors import CORS
import redis
import requests

PT_SERVER_URL = ""

app = Flask(__name__)
CORS(app)

redis_client = redis.Redis(

    max_connections=100
)

# 两层嵌套字典
stats_cache = {}
# 全局锁
cache_lock = threading.RLock()
# 缓存文件路径
CACHE_FILE = 'stats_cache.json'


# 加载缓存数据
def load_cache():
    global stats_cache
    try:
        if os.path.exists(CACHE_FILE):
            with open(CACHE_FILE, 'r') as f:
                stats_cache = json.load(f)
            print(
                f"已加载缓存：{len(stats_cache)}个用户，{sum(len(sessions) for sessions in stats_cache.values())}个会话")
    except Exception as e:
        print(f"加载缓存失败：{e}")
        stats_cache = {}


# 保存缓存数据
def save_cache():
    try:
        # 首先读取现有文件
        existing_data = {}
        if os.path.exists(CACHE_FILE):
            try:
                with open(CACHE_FILE, 'r') as f:
                    existing_data = json.load(f)
            except json.JSONDecodeError:
                # 文件可能损坏，使用空字典
                existing_data = {}

        # 创建内存缓存的副本
        with cache_lock:
            current_cache = {}
            for passkey, sessions in stats_cache.items():
                current_cache[passkey] = sessions.copy()

        # 合并数据：智能合并，保留最新的记录
        for passkey, current_sessions in current_cache.items():
            # 如果passkey不在现有数据中，直接添加整个passkey的数据
            if passkey not in existing_data:
                existing_data[passkey] = current_sessions
            else:
                # passkey存在，需要逐个会话比较
                for session_key, current_session_data in current_sessions.items():
                    # 如果会话不在现有数据中，直接添加
                    if session_key not in existing_data[passkey]:
                        existing_data[passkey][session_key] = current_session_data
                    else:
                        # 会话存在，比较last_updated时间戳
                        existing_timestamp = existing_data[passkey][session_key].get("last_updated", 0)
                        current_timestamp = current_session_data.get("last_updated", 0)

                        # 只有当内存中的记录更新时才替换现有记录
                        if current_timestamp > existing_timestamp:
                            existing_data[passkey][session_key] = current_session_data

        # 写入合并后的数据
        with open(CACHE_FILE, 'w') as f:
            json.dump(existing_data, f)

        print(
            f"已保存缓存：合并后共{len(existing_data)}个用户，{sum(len(sessions) for sessions in existing_data.values())}个会话")
    except Exception as e:
        print(f"保存缓存失败：{e}")


# 清理过期会话
def cleanup_sessions():
    save_cache()

    current_time = time.time()
    session_expiry = 3600  # 1小时
    passkey_expiry = 86400  # 24小时

    with cache_lock:
        # 记录清理前的统计信息
        users_before = len(stats_cache)
        sessions_before = sum(len(sessions) for sessions in stats_cache.values())

        expired_passkeys = []

        for passkey, sessions in stats_cache.items():
            # 检查整个passkey是否过期
            passkey_last_active = max([data.get('last_updated', 0) for data in sessions.values()], default=0)
            if current_time - passkey_last_active > passkey_expiry:
                expired_passkeys.append(passkey)
                continue

            # 检查单个会话是否过期
            expired_sessions = []
            for session_key, session_data in sessions.items():
                if current_time - session_data.get('last_updated', 0) > session_expiry:
                    expired_sessions.append(session_key)

            # 删除过期会话
            for session_key in expired_sessions:
                del sessions[session_key]

        # 删除过期的passkey
        for passkey in expired_passkeys:
            del stats_cache[passkey]

        # 记录清理后的统计信息
        users_after = len(stats_cache)
        sessions_after = sum(len(sessions) for sessions in stats_cache.values())

    print(f"清理完成：删除了{users_before - users_after}个用户，{sessions_before - sessions_after}个会话")

    # 设置下一次清理
    threading.Timer(60, cleanup_sessions).start()  # 1分钟保存并清理一次


@app.route('/api/passkey/validate/<passkey>', methods=['GET'])
def validate_passkey(passkey):
    try:
        # 1. 获取请求中的tracker_name参数
        tracker_name = request.args.get('tracker_name')
        if not tracker_name:
            print("请求中缺少tracker_name参数")
            return jsonify({'valid': False, 'message': 'Missing tracker_name parameter'})

        # 2. 调用PT服务器的验证接口验证passkey
        passkey_response = requests.post(
            f"{PT_SERVER_URL}/api/user/verify-passkey",
            json={"passkey": passkey},
            timeout=5  # 设置超时时间
        )

        # 3. 检查passkey验证响应状态
        if passkey_response.status_code != 200:
            print(f"验证Passkey失败: HTTP状态码 {passkey_response.status_code}, 响应: {passkey_response.text}")
            return jsonify({'valid': False, 'message': 'Passkey verification failed'})

        # 4. 解析passkey验证响应
        passkey_data = passkey_response.json()
        is_passkey_valid = passkey_data.get('data', {}).get('valid', False)

        # 5. 如果passkey无效，直接返回
        if not is_passkey_valid:
            return jsonify({'valid': False, 'message': 'Invalid passkey'})

        # 6. 验证Tracker状态
        tracker_response = requests.post(
            f"{PT_SERVER_URL}/api/network/tracker/verify-status",
            json={"trackerName": tracker_name},
            timeout=5  # 设置超时时间
        )

        # 7. 检查tracker验证响应状态
        if tracker_response.status_code != 200:
            print(f"验证Tracker状态失败: HTTP状态码 {tracker_response.status_code}, 响应: {tracker_response.text}")
            # 即使Tracker验证失败，但passkey有效，我们仍然允许用户访问
            # 这里根据业务需求可以修改为更严格的验证
            return jsonify({'valid': True, 'message': 'Valid passkey but tracker verification failed'})

        # 8. 解析tracker验证响应
        tracker_data = tracker_response.json()
        is_tracker_active = tracker_data.get('data', {}).get('isActive', False)

        # 9. 根据两个验证结果返回最终结果
        if is_tracker_active:
            return jsonify({'valid': True, 'message': 'Valid passkey and active tracker'})
        else:
            # 如果Tracker不活跃，我们返回详细的错误信息
            # 这里也可以根据业务需求修改为允许或拒绝访问
            return jsonify({
                'valid': False,
                'message': 'Tracker is inactive or not registered in the system'
            })

    except Exception as e:
        # 捕获异常（如网络错误、超时等）
        print(f"验证过程中发生错误: {str(e)}")
        # 在异常情况下，为了安全起见，我们返回验证失败
        return jsonify({'valid': False, 'message': f'Verification error: {str(e)}'})


@app.route('/api/stats/update', methods=['POST'])
def update_stats():
    data = request.json
    tracker = data.get('tracker', 'unknown-tracker')  # 获取tracker名称，默认为'unknown-tracker'
    passkey = data.get('passkey')
    peerid = data.get('peerid')
    infohash = data.get('infohash')
    uploaded = int(data.get('uploaded', 0))
    downloaded = int(data.get('downloaded', 0))
    left = int(data.get('left', 0))  # 可选的left字段

    # 创建会话标识
    session_key = f"{peerid}:{infohash}"

    with cache_lock:
        # 检查并处理缓存中不存在的情况
        if passkey not in stats_cache:
            # 情况1: passkey完全不在缓存中，尝试从JSON文件加载
            if os.path.exists(CACHE_FILE):
                try:
                    with open(CACHE_FILE, 'r') as f:
                        file_cache = json.load(f)
                        if passkey in file_cache:
                            stats_cache[passkey] = file_cache[passkey]
                            print(f"从JSON文件加载整个passkey的数据: {passkey}")
                except Exception as e:
                    print(f"加载passkey数据出错: {e}")

            # 如果JSON中也没有，则创建新的空字典
            if passkey not in stats_cache:
                stats_cache[passkey] = {}

        # 情况2: passkey在缓存中，但特定会话不在
        elif session_key not in stats_cache[passkey]:
            # 尝试从JSON文件加载特定会话
            if os.path.exists(CACHE_FILE):
                try:
                    with open(CACHE_FILE, 'r') as f:
                        file_cache = json.load(f)
                        if passkey in file_cache and session_key in file_cache[passkey]:
                            stats_cache[passkey][session_key] = file_cache[passkey][session_key]
                            print(f"从JSON文件加载特定会话: {passkey}:{session_key}")
                except Exception as e:
                    print(f"加载会话数据出错: {e}")

        # 获取上次记录（如果存在）
        last_record = stats_cache[passkey].get(session_key, {
            "uploaded": 0,
            "downloaded": 0,
            "tracker": tracker,  # 默认设置tracker名称
            "infohash": infohash,  # 添加infohash
            "last_updated": 0
        })

        # 计算增量
        upload_delta = max(0, uploaded - last_record.get("uploaded", 0))
        download_delta = max(0, downloaded - last_record.get("downloaded", 0))

        # 只有有实际增量时才记录
        if upload_delta > 0 or download_delta > 0:
            # 创建要添加到Stream的数据字段
            fields = {
                'passkey': passkey,
                'tracker': tracker,  # 添加tracker名称
                'infohash': infohash,  # 添加infohash到消息队列
                'timestamp': int(time.time())
            }

            if upload_delta > 0:
                fields['uploaded'] = upload_delta

            if download_delta > 0:
                fields['downloaded'] = download_delta

            # 添加到Redis Stream
            redis_client.xadd('user_stats_stream', fields)

            # 记录日志
            print(
                f"增量: 用户={passkey}, Tracker={tracker}, 资源={infohash}, 上传={upload_delta}, 下载={download_delta}")

        # 更新会话缓存
        stats_cache[passkey][session_key] = {
            "uploaded": uploaded,
            "downloaded": downloaded,
            "tracker": tracker,  # 保存tracker名称
            "infohash": infohash,  # 保存infohash到缓存
            "last_updated": time.time()
        }

    return jsonify({'success': True})


@app.route('/status', methods=['GET'])
def status():
    with cache_lock:
        # 计算缓存的会话总数
        total_sessions = sum(len(user_sessions) for user_sessions in stats_cache.values())

        # 计算每个tracker的会话数
        tracker_stats = {}
        # 计算每个资源(infohash)的会话数
        infohash_stats = {}

        for passkey, sessions in stats_cache.items():
            for session_data in sessions.values():
                # 统计tracker
                tracker = session_data.get('tracker', 'unknown-tracker')
                if tracker not in tracker_stats:
                    tracker_stats[tracker] = 0
                tracker_stats[tracker] += 1

                # 统计infohash
                infohash = session_data.get('infohash', 'unknown-infohash')
                if infohash not in infohash_stats:
                    infohash_stats[infohash] = 0
                infohash_stats[infohash] += 1

    # 获取Stream长度
    stream_length = redis_client.xlen('user_stats_stream')

    return jsonify({
        'status': 'running',
        'users_cached': len(stats_cache),
        'sessions_cached': total_sessions,
        'trackers': tracker_stats,  # tracker统计信息
        'infohashes': infohash_stats,  # 添加了infohash统计信息
        'pending_stats_entries': stream_length,
        'timestamp': time.time()
    })


# 替换 @app.before_first_request 装饰器
def startup():
    load_cache()
    threading.Timer(1, cleanup_sessions).start()


# 在Flask 2.x中，使用with app.app_context()
if __name__ == '__main__':
    # 直接调用启动函数
    startup()
    # 启动Flask应用
    app.run(debug=False, host='0.0.0.0', port=5000)