import logging
import threading
import time
import redis

logger = logging.getLogger("redis_client")


class RedisClient:
    def __init__(self, host="127.0.0.1", port=6379, db=0, heartbeat_interval=60, retry_time=5, key="goods"):
        # 初始化参数
        self.host = host
        self.port = port
        self.db = db
        self.key = key
        self.retry_time = retry_time

        # 初始化连接
        self._init_connection()

        # 心跳配置
        self.heartbeat_interval = heartbeat_interval
        self._heartbeat_timer = None
        self._start_heartbeat()

        logger.info(f"Redis缓存初始化成功 {host}:{port}/{db}")

    def _init_connection(self):
        """初始化Redis连接"""
        try:
            self.redis = redis.Redis(
                host=self.host,
                port=self.port,
                db=self.db,
                socket_connect_timeout=3,
                decode_responses=True
            )
            if not self.redis.ping():
                raise ConnectionError("Redis连接失败")
        except Exception as e:
            logger.error(f"Redis连接异常: {str(e)}")
            raise

    def _start_heartbeat(self):
        """启动心跳定时器"""
        if self._heartbeat_timer:
            self._heartbeat_timer.cancel()
        self._heartbeat_timer = threading.Timer(
            self.heartbeat_interval, self._run_heartbeat
        )
        self._heartbeat_timer.daemon = True
        self._heartbeat_timer.start()

    def _run_heartbeat(self):
        """心跳检测执行"""
        try:
            if self.redis.ping():
                logger.debug("Redis心跳检测成功")
            else:
                logger.warning("Redis心跳异常，尝试重连...")
                for i in self.retry_time:
                    is_connect = self._reconnect()
                    if is_connect:
                        break
                else:
                    logger.error("Redis心跳异常，重连失败")
                    raise Exception("Redis心跳异常，重连失败")
        except Exception as e:
            logger.error(f"心跳检测失败: {str(e)}，尝试重连...")
            self._reconnect()
        finally:
            # 开启下一次检测
            self._start_heartbeat()

    def _reconnect(self):
        """重连机制"""
        try:
            new_conn = redis.Redis(
                host=self.host,
                port=self.port,
                db=self.db,
                socket_connect_timeout=3,
                decode_responses=True
            )
            if new_conn.ping():
                self.redis = new_conn
                logger.info("Redis重连成功")
                return True
        except Exception as e:
            logger.error(f"重连失败: {str(e)}")
        return False


class GoodsCache(RedisClient):
    def __init__(self, host="127.0.0.1", port=6379, db=0, heartbeat_interval=60, retry_time=5, key="goods"):
        # 继承Redis的方法
        super().__init__(host, port, db, heartbeat_interval, retry_time, key)

    def get_one(self, id):
        """获取单个商品"""
        try:
            result = self.redis.hgetall(f"{self.key}:{id}")
            logger.info(f"获取到缓存数据: {result}")
            return result
        except Exception as e:
            logger.error(f"获取失败: {str(e)}")
            return {}

    def set_one(self, data, expire=24*60*60):
        """设置单个商品"""
        logger.debug(f"设置商品信息: {data}")
        try:
            self.redis.hset(f"{self.key}:{data['id']}", mapping=data)
            self.redis.expire(f"{self.key}:{data['id']}", time=expire)
            logger.info(f"成功写入缓存")
        except Exception as e:
            logger.error(f"设置失败: {str(e)}")
            return {}

    def get_many(self, ids: list = []):
        """批量获取商品"""
        logger.debug(f"批量获取商品信息 ID列表: {ids}")
        try:
            with self.redis.pipeline() as pipe:
                for id_ in ids:
                    pipe.hgetall(f"{self.key}:{id_}")
                result = pipe.execute()
                logger.info(f"获取{len(ids)}条缓存数据")
                return result
        except Exception as e:
            logger.error(f"批量获取失败: {str(e)}")
            return []

    def set_many(self, goods: dict, expire=24*60*60):
        """批量设置商品"""
        logger.debug(f"批量设置商品信息 数量: {len(goods)}")
        try:
            with self.redis.pipeline() as pipe:
                for good in goods:
                    pipe.hset(f"{self.key}:{good['id']}", mapping=good)
                    pipe.expire(f"{self.key}:{good['id']}", time=expire)
                pipe.execute()
                logger.info(f"成功写入{len(goods)}条缓存")
        except Exception as e:
            logger.error(f"批量设置失败: {str(e)}")
            return []

    def delete_cache(self, id):
        """删除单个商品的缓存"""
        try:
            self.redis.delete(f"{self.key}:{id}")
            logger.info(f"成功删除ID为{id}的缓存")
        except Exception as e:
            logger.error(f"删除失败: {str(e)}")

    def double_delay_delete(self, id, instance, data, wait_time=0.02):
        """ 延迟双删, 保证高并发情况下的数据一致性 """
        self.delete_cache(id)
        for key, val in data.items():
            setattr(instance, key, val)
        time.sleep(wait_time)
        self.delete_cache(id)
        logger.info(f"成功删除ID为{id}的缓存")

    def clear_cache(self):
        """ 清空所有缓存 """
        try:
            keys = self.redis.keys(f"{self.key}:*")
            if len(keys) > 0:
                self.redis.delete(*keys)
                logger.info(f"成功清空{len(keys)}条缓存")
            else:
                logger.info("没有需要清空的缓存")
        except Exception as e:
            logger.error(f"清空失败: {str(e)}")

    def pre_reduce_stock(self, id, quantity=1):
        """ 使用lua脚本 预扣减库存, 处理并发情况 """
        script = """
           local stock_key = KEYS[1]
           local product_id = ARGV[1]
           local decrement = tonumber(ARGV[2])
           local stock = tonumber(redis.call('HGET', stock_key, product_id) or 0)
           if stock >= decrement and stock > 0 then
               redis.call('HINCRBY', stock_key, product_id, -decrement)
               return 1
           else
               return 0
           end
        """
        keys = [f"{self.key}:{id}"]
        args = ["stock", quantity]
        return self.redis.eval(script, len(keys), *keys, *args)




