# -*- coding: utf-8 -*- 
"""
Project: AppleSnApi
Creator: k
Create time: 2025-06-16 11:33
IDE: PyCharm
Introduction: redis、MongoDB、proxy
"""
import redis
import threading
import time
from typing import Dict
from functools import wraps
from loguru import logger
import os
import sys
import json
import random
from pymongo import MongoClient
from pymongo.errors import ConnectionFailure
from sqlalchemy import create_engine, Column, String, Integer, Text
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import sessionmaker
from sqlalchemy.exc import OperationalError
from datetime import datetime
from sqlalchemy import event

current_path = os.path.abspath(os.path.dirname(__file__))
rootPath = os.path.split(current_path)[0]
sys.path.append(rootPath)

from settings import LOG_DIR, REDIS_CONFIG, MONGO_CONF, MYSQL_CONF

# 配置日志
logger.add(
    os.path.join(LOG_DIR, 'db.log'),
    rotation="500 MB",
    serialize=True,
    level='INFO',
    format="{time:YYYY-MM-DD HH:mm:ss} {level} From {module}.{function} : {message}",
    filter=lambda record: record["extra"].get("source") == "db",
    encoding='utf-8'
)

db_logger = logger.bind(source="db")


class RedisConfig:
    """优化后的配置类"""

    def __init__(self):
        self.host = REDIS_CONFIG.get("host", "127.0.0.1")
        self.port = int(REDIS_CONFIG.get("port", 6379))
        self.password = REDIS_CONFIG.get("password", None)
        self.max_connections = REDIS_CONFIG.get("max_connections", 500)
        self.socket_timeout = REDIS_CONFIG.get("socket_timeout", None)
        self.socket_connect_timeout = REDIS_CONFIG.get("socket_connect_timeout", None)
        self.health_check_interval = 30
        self.retry_on_timeout = True
        self.connection_retries = 3  # 最大重试次数
        self.retry_delay = 0.1  # 重试基础延迟


class RedisClientManager:
    """改进的连接管理器"""
    _instance = None
    _lock = threading.Lock()

    def __new__(cls):
        with cls._lock:
            if cls._instance is None:
                cls._instance = super().__new__(cls)
                cls._instance.config = RedisConfig()
                cls._instance._pools: Dict[int, redis.ConnectionPool] = {}
        return cls._instance

    def get_client(self, db: int = 0) -> 'RedisClient':
        """获取带自动重连的客户端"""
        self._validate_db(db)
        with self._lock:
            pool = self._get_pool(db)
            db_logger.info(f'Connected to Redis at {self.config.host} db-{db} Success')
            return RedisClient(pool, self.config)

    def _get_pool(self, db: int) -> redis.ConnectionPool:
        """创建或复用连接池"""
        if db not in self._pools:
            self._pools[db] = redis.ConnectionPool(
                host=self.config.host,
                port=self.config.port,
                password=self.config.password,
                db=db,
                max_connections=self.config.max_connections,
                socket_timeout=self.config.socket_timeout,
                health_check_interval=self.config.health_check_interval,
                retry_on_timeout=self.config.retry_on_timeout,
                decode_responses=True
            )
        return self._pools[db]

    @staticmethod
    def _validate_db(db: int):
        """数据库编号验证"""
        if not 0 <= db <= 255:
            raise ValueError(f"Invalid Redis DB number: {db} (0-255 allowed)")


class RedisClient:
    """优化后的Redis客户端"""

    def __init__(self, pool: redis.ConnectionPool, config: RedisConfig):
        self._pool = pool
        self.config = config
        self._client = None  # 延迟初始化
        self._lock = threading.RLock()
        self._last_health_check = 0

    @property
    def client(self) -> redis.Redis:
        """带健康检查的连接获取"""
        with self._lock:
            now = time.time()
            if now - self._last_health_check > 30:
                self._check_connection()
                self._last_health_check = now
            return self._client or self._reconnect()

    def _check_connection(self):
        """连接健康检查"""
        try:
            if not self._client or not self._client.ping():
                self._reconnect()
        except redis.RedisError:
            self._reconnect()

    def _reconnect(self) -> redis.Redis:
        """带指数退避的重连机制"""
        for attempt in range(self.config.connection_retries):
            try:
                self._client = redis.Redis(connection_pool=self._pool)
                if self._client.ping():
                    return self._client
            except redis.RedisError as e:
                if attempt == self.config.connection_retries - 1:
                    db_logger.critical("Redis connection failed after retries")
                    raise
                delay = self.config.retry_delay * (2 ** attempt)
                db_logger.warning(f"Reconnect attempt {attempt + 1}, retrying in {delay}s")
                time.sleep(delay)
        raise redis.ConnectionError("Failed to establish Redis connection")

    def __getattr__(self, name: str):
        """动态方法分发器"""
        redis_method = getattr(self.client, name)

        if callable(redis_method):
            @wraps(redis_method)
            def wrapped(*args, **kwargs):
                for attempt in range(self.config.connection_retries + 1):
                    try:
                        return redis_method(*args, **kwargs)
                    except (redis.ConnectionError, redis.TimeoutError) as e:
                        if attempt == self.config.connection_retries:
                            db_logger.error(f"Operation {name} failed after retries")
                            raise
                        db_logger.warning(f"Retrying {name} (attempt {attempt + 1})")
                        time.sleep(self.config.retry_delay * (2 ** attempt))
                        self._reconnect()
                raise redis.RedisError("Unexpected error in wrapped method")

            return wrapped
        return redis_method

    def close(self):
        """安全关闭连接"""
        with self._lock:
            if self._client:
                self._client.close()
                self._client = None


# 初始化redis管理器
REDIS_MANAGER = RedisClientManager()
redis_client = REDIS_MANAGER.get_client(20)


class ProxyManager:

    def __init__(self, proxy_db=10):
        self._client = REDIS_MANAGER.get_client(proxy_db)
        self._cache = {
            'proxies': [],
            'last_updated': 0,
            'weight_info': []
        }
        self.CACHE_TTL = 30  # 缓存时间(秒)

    def _scan_keys(self, pattern: str = "*", batch_size: int = 500) -> list:
        """安全扫描键空间替代keys命令"""
        cursor = '0'
        keys = []
        while cursor != 0:
            cursor, partial_keys = self._client.scan(
                cursor=cursor,
                match=pattern,
                count=batch_size
            )
            keys.extend(partial_keys)
        return keys

    def _refresh_cache(self):
        """批量获取代理信息并缓存"""
        try:
            # 批量获取所有键
            keys = self._scan_keys()
            if not keys:
                return

            # 使用管道批量获取数据和TTL
            pipe = self._client.pipeline()
            for key in keys:
                pipe.get(key)
                pipe.ttl(key)
            results = pipe.execute()

            # 结构化处理数据
            valid_proxies = []
            for i in range(0, len(results), 2):
                try:
                    proxy_data = json.loads(results[i])
                    ttl = results[i + 1]
                    if ttl > 10:  # 过滤即将过期的代理
                        valid_proxies.append({
                            'key': keys[i // 2],
                            'data': proxy_data,
                            'ttl': ttl
                        })
                except (TypeError, json.JSONDecodeError) as e:
                    db_logger.warning(f"Invalid proxy data: {e}")

            # 预计算权重
            weight_info = []
            total_weight = 0
            for proxy in valid_proxies:
                weight = proxy['ttl'] * random.uniform(0.8, 1.2)
                total_weight += weight
                weight_info.append((proxy, weight))

            # 原子更新缓存
            self._cache = {
                'proxies': valid_proxies,
                'weight_info': weight_info,
                'total_weight': total_weight,
                'last_updated': time.time()
            }

        except redis.RedisError as e:
            db_logger.error(f"Refresh proxy cache failed: {e}")

    def _get_cached_proxies(self):
        """获取带缓存的代理信息"""
        if time.time() - self._cache['last_updated'] > self.CACHE_TTL:
            self._refresh_cache()
        return self._cache

    def get_proxy(self, strategy: str = 'weight') -> Dict[str, str]:
        """
        获取代理主入口
        :param strategy: 选择策略 (weight/random)
        :return: 代理字典
        """
        cache = self._get_cached_proxies()

        if not cache['proxies']:
            db_logger.warning("No available proxies in pool")
            return {"http": None, "https": None}
        try:
            if strategy == 'random':
                return self._get_random_proxy(cache)
            return self._get_weighted_proxy(cache)
        except Exception as e:
            db_logger.error(f"Select proxy failed: {e}")
            return {"http": None, "https": None}

    def _get_random_proxy(self, cache: dict) -> Dict[str, str]:
        """随机选择策略"""
        proxy = random.choice(cache['proxies'])
        return proxy['data']

    def _get_weighted_proxy(self, cache: dict) -> Dict[str, str]:
        """权重选择策略"""
        r = random.uniform(0, cache['total_weight'])  # 生成一个 0 到总权重之间的随机数 r
        upto = 0
        for proxy, weight in cache['weight_info']:
            # 遍历代理的权重信息，累加权重，直到累加值大于等于 r，返回对应的代理数据
            if upto + weight >= r:
                return proxy['data']
            upto += weight
        # 如果没有找到符合条件的代理，保底返回第一个代理。
        return cache['proxies'][0]['data']  # 保底返回第一个


# 初始化代理管理器单例
PROXY_MANAGER = ProxyManager()


class MongoDBConnector:

    def __init__(self, uri="mongodb://localhost:27017/", db_name="test"):
        """
        初始化 MongoDB 连接
        :param uri: MongoDB 的连接 URI，默认为本地连接
        :param db_name: 要使用的数据库名称，默认为 test_db
        """
        self.uri = uri
        self.db_name = db_name
        self.client = None
        self.db = None
        self._connect()

    def _connect(self):
        """
        建立 MongoDB 连接
        """
        try:
            self.client = MongoClient(self.uri)
            # 尝试连接并发送 ping 命令检查连接是否成功
            self.client.admin.command('ping')
            db_logger.info(f"Connected to MongoDB at {self.uri} successfully!")
            self.db = self.client[self.db_name]
        except ConnectionFailure as e:
            db_logger.error(f"Could not connect to MongoDB: {e}")

    def get_collection(self, collection_name):
        """
        获取指定名称的集合
        :param collection_name: 集合名称
        :return: 指定名称的集合对象
        """
        if self.db is not None:
            return self.db[collection_name]
        else:
            db_logger.error("No database connection. Please check the connection.")
            return None

    def close_connection(self):
        """
        关闭 MongoDB 连接
        """
        if self.client:
            self.client.close()
            db_logger.info("MongoDB connection closed.")


# MongoDB client
MONGO_CONNECTOR = MongoDBConnector(MONGO_CONF, 'test')
sn_collection = MONGO_CONNECTOR.get_collection("apple_sn_records")
sn_collection.create_index([('sn', 1)], unique=True, background=True)
sn_collection.create_index("task_id")

################################## mysql 部分 #####################################
#
# # 创建基础模型类
# Base = declarative_base()
#
#
# class SNQuery(Base):
#     __tablename__ = "apple_sn_queries"
#     __table_args__ = {'extend_existing': True}
#
#     seq_id = Column(Integer, primary_key=True, comment='主键,自增序列', autoincrement=True)
#     sn = Column(String(20), nullable=False, index=True, unique=True, comment='Apple设备序列号')  # 查询sn - 唯一标识
#     user = Column(String(20), nullable=True, index=True, comment='查询用户')  # 用户（可为空）
#     bs_id = Column(String(255), nullable=True, index=True, comment='业务id')  # 业务ID
#
#     status = Column(String(20), nullable=True, comment='查询状态')  # 查询状态
#     msg = Column(String(255), nullable=True, comment='查询消息结果')  # 查询结果消息
#     data = Column(Text, nullable=True, comment='查询原始结果,压缩json字符串')  # 实际查询结果
#
#     callback_status = Column(String(20), nullable=True, comment='回调状态')  # 回调状态
#     callback_msg = Column(Text, nullable=True, comment='回调响应结果')  # 回调结果
#
#     extra = Column(Text, nullable=True, comment='附加参数,预设字段')  # 附加字段
#     created_at = Column(String(32), nullable=True, comment='查询创建时间')  # 查询创建时间
#     updated_at = Column(String(32), nullable=True, comment='更新时间')  # 更新时间
#     ss = Column(String(32), nullable=True, comment='实际抓取时间')  ##抓取时间
#
#     def to_dict(self):
#         """将模型转换为字典格式"""
#         return {
#             'seq_id': self.seq_id,
#             'sn': self.sn,
#             'user': self.user,
#             'bs_id': self.bs_id,
#             'status': self.status,
#             'msg': self.msg,
#             'data': json.loads(self.data) if self.data else None,
#             'callback_status': self.callback_status,
#             'callback_msg': json.loads(self.callback_msg) if self.callback_msg else None,
#             'extra': json.loads(self.extra) if self.extra else None,
#             'created_at': self.created_at,
#             'updated_at': self.updated_at,
#             'ss': self.ss if self.ss else None
#         }
#
#
# # 定义时间字符串格式化函数
# def get_current_time_str():
#     return datetime.now().strftime('%Y-%m-%d %H:%M:%S')
#
#
# # 监听插入事件，设置created_at和updated_at
# @event.listens_for(SNQuery, 'before_insert')
# def before_insert(mapper, connection, target):
#     time_str = get_current_time_str()
#     target.created_at = time_str
#     target.updated_at = time_str
#
#
# # 监听更新事件，只更新updated_at
# @event.listens_for(SNQuery, 'before_update')
# def before_update(mapper, connection, target):
#     target.updated_at = get_current_time_str()
#
#
# # 数据库连接类
# class Database:
#     def __init__(self, db_url, pool_size=100, max_overflow=10, pool_timeout=30, pool_recycle=3600):
#         self.db_url = db_url
#         self.engine = create_engine(
#             db_url,
#             pool_size=pool_size,
#             max_overflow=max_overflow,
#             pool_timeout=pool_timeout,
#             pool_recycle=pool_recycle,
#             echo=False  # 设置为True可查看SQL语句
#         )
#         self.Session = sessionmaker(autocommit=False, autoflush=False, bind=self.engine)
#
#     def create_tables(self):
#         """创建所有表结构"""
#         Base.metadata.create_all(self.engine, checkfirst=True)
#
#     def get_session(self):
#         """获取数据库会话，带有连接检查"""
#         session = self.Session()
#
#         # 检查连接是否有效，如果无效则重新连接
#         try:
#             session.execute("SELECT 1")
#         except OperationalError:
#             # 连接失效，重新创建引擎和会话
#             self.engine = create_engine(
#                 self.db_url,
#                 pool_size=self.engine.pool.size(),
#                 max_overflow=self.engine.pool.max_overflow,
#                 pool_timeout=self.engine.pool.timeout,
#                 pool_recycle=self.engine.pool.recycle,
#                 echo=False
#             )
#             self.Session = sessionmaker(autocommit=False, autoflush=False, bind=self.engine)
#             session = self.Session()
#
#         return session
#
#
# # 数据库操作类 - 基于SN进行操作
# class DBService:
#     def __init__(self, database):
#         self.db = database
#
#     def add_or_update_query(self, sn, bs_id=None, user=None, status="pending", msg="查询处理中", data=None,
#                             callback_status="", callback_msg="", extra=None):
#         """添加或更新查询记录"""
#         session = self.db.get_session()
#
#         try:
#             # 检查是否已存在该SN的记录
#             query = session.query(SNQuery).filter_by(sn=sn).first()
#
#             if not query:
#                 # 不存在记录，创建新记录
#                 query = SNQuery(
#                     sn=sn,
#                     user=user,
#                     bs_id=bs_id,
#                     status=status,
#                     msg=msg,
#                     data=json.dumps(data, ensure_ascii=False, separators=(',', ':')) if data else None,
#                     callback_status=callback_status,
#                     callback_msg=json.dumps(callback_msg, ensure_ascii=False,
#                                             separators=(',', ':')) if callback_msg else None,
#                     extra=json.dumps(extra, ensure_ascii=False, separators=(',', ':')) if extra else None
#                 )
#                 session.add(query)
#             else:
#                 # 存在记录，更新记录
#                 query.user = user or query.user  # 保持原有user，除非提供新的
#                 query.bs_id = bs_id or query.bs_id
#                 query.status = status
#                 query.msg = msg
#                 query.data = json.dumps(data, ensure_ascii=False, separators=(',', ':')) if data else None
#                 query.callback_status = callback_status
#                 query.callback_msg = json.dumps(callback_msg, ensure_ascii=False,
#                                                 separators=(',', ':')) if callback_msg else None
#                 query.extra = json.dumps(extra, ensure_ascii=False, separators=(',', ':')) if extra else None
#
#             session.commit()
#             return query.seq_id
#         except Exception as e:
#             session.rollback()
#             db_logger.error(f'add_or_update error:{e}')
#         finally:
#             session.close()
#
#     def get_query_by_sn(self, sn):
#         """获取特定SN的查询记录"""
#         session = self.db.get_session()
#
#         try:
#             query = session.query(SNQuery).filter_by(sn=sn).first()
#
#             if not query:
#                 return None
#
#             return query.to_dict()
#         except Exception as e:
#             db_logger.error(f'add_or_update error:{e}')
#         finally:
#             session.close()
#
#     def update_query_status(self, sn, status, msg=None, data=None,
#                             callback_status=None, callback_msg=None, extra=None, ss=''):
#         """更新查询状态"""
#         session = self.db.get_session()
#
#         try:
#             query = session.query(SNQuery).filter_by(sn=sn).first()
#
#             if not query:
#                 raise ValueError(f"未找到查询记录: sn={sn}")
#
#             # 更新现有记录
#             query.status = status
#             if msg is not None:
#                 query.msg = msg
#             if data is not None:
#                 query.data = json.dumps(data, ensure_ascii=False, separators=(',', ':'))
#             if callback_status is not None:
#                 query.callback_status = callback_status
#             if callback_msg is not None:
#                 query.callback_msg = json.dumps(callback_msg, ensure_ascii=False, separators=(',', ':'))
#             if extra is not None:
#                 query.extra = json.dumps(extra, ensure_ascii=False, separators=(',', ':'))
#             if ss is not None:
#                 query.ss = ss
#
#             session.commit()
#             return True
#         except Exception as e:
#             session.rollback()
#             db_logger.error(f'add_or_update error:{e}')
#         finally:
#             session.close()
#
#     def get_latest_queries(self, limit=20, offset=0):
#         """获取最新的查询记录"""
#         session = self.db.get_session()
#
#         try:
#             queries = session.query(SNQuery).order_by(SNQuery.created_at.desc()).limit(limit).offset(offset).all()
#             return [query.to_dict() for query in queries]
#         except Exception as e:
#             db_logger.error(f'add_or_update error:{e}')
#         finally:
#             session.close()
#
#
# # 初始化数据库服务
# def init_db_service():
#     """初始化并返回数据库服务实例"""
#     # 从环境变量或配置文件获取数据库连接信息
#     db_url = f'mysql+pymysql://{MYSQL_CONF["user"]}:{MYSQL_CONF["password"]}@{MYSQL_CONF["host"]}:{MYSQL_CONF["port"]}/{MYSQL_CONF["database"]}'
#     db_logger.info(f'数据库连接:{db_url}')
#     db = Database(db_url)
#     db.create_tables()  # 创建表结构（如果不存在）
#     return DBService(db)
#
#
# # 单例模式获取数据库服务
# db_service = init_db_service()
