import time
from functools import wraps

import redis

from common.result.error import OtherErr
from common.result import result_code, err_msg
from conf.config import Config
from common import logger
from redis_lock import Lock
from multiprocessing import Process


def get_redis():
    pool = redis.ConnectionPool(
        host=Config.redis.host,
        port=Config.redis.port,
        password=Config.redis.password,
        decode_responses=True,
        db=3
    )
    r = redis.Redis(connection_pool=pool)
    return r


def kwargs_to_limit_key(key_prefix, *args, **kwargs):
    keys = key_prefix.split(':')
    cache_key = ''
    for key in keys:
        if cache_key:
            cache_key += ':'
        cache_key += str(kwargs.get(key, key))
    return cache_key


def args_to_limit_key(key_prefix, *args, **kwargs):
    keys = list(args) + list(kwargs.values())
    if not keys: return None
    cache_key = ''
    for index, key in enumerate(keys):
        # 去掉类self
        if str(key).startswith("<__main__."):
            continue
        if str(type(key)) == "<class 'function'>":
            continue
        if str(type(key)) == "<class 'method'>":
            continue
        if str(key).startswith("<class"):
            continue
        if not cache_key:
            cache_key = key_prefix
        if not cache_key.endswith(":") and (index+1) <= len(keys):
            cache_key += ":"
        cache_key += str(key)

    return cache_key


def func_to_limit_key(key_prefix, func):
    if not key_prefix.endswith(":"):
        key_prefix += ":"
    return key_prefix + func.__name__


class AsyncRedisLimit:

    def __init__(self, key=None, key_dispose=None, acquite_timeout=30, time_out=120, blocking=True, err=None):
        self.key = key
        self.key_dispose = key_dispose or args_to_limit_key
        self.acquite_timeout = acquite_timeout
        self.time_out = time_out
        self.blocking = blocking
        self.err = err or "请求超时"
        if not self.blocking:
            self.acquite_timeout = None

    def __call__(self, func):

        @wraps(func)
        async def wrapper(*args, **kwargs):
            cache_key = None
            if self.key_dispose in (kwargs_to_limit_key, args_to_limit_key) and list(args) + list(kwargs.values()):
                cache_key = self.key_dispose(self.key, *args, **kwargs)
            if not cache_key:
                cache_key = func_to_limit_key(self.key, func)
            # 创建锁
            lock = Lock(get_redis(), cache_key, expire=self.time_out, strict=False)
            if lock.acquire(blocking=self.blocking, timeout=self.acquite_timeout):
                logger.info(f"[Redis分布式锁] {cache_key}上锁, 时间：{time.time()}")
                try:
                    data = await func(*args, **kwargs)
                except Exception as e:
                    raise e
                finally:
                    logger.info(f"[Redis分布式锁]  时间：{time.time()}, {cache_key}解锁")
                    lock.release()
                return data

            else:
                logger.info(f"[Redis分布式锁] {cache_key}上锁, 时间：{time.time()}")
                raise OtherErr(result_code.RedisCont.ConnectionErr, err_msg.FailErr, message=self.err)

        return wrapper


class RedisLimit:

    def __init__(self,  key=None, key_dispose=None, acquite_timeout=30, time_out=120, blocking=True, err=None):
        self.key_dispose = key_dispose or args_to_limit_key
        self.key = key
        self.acquite_timeout = acquite_timeout
        self.time_out = time_out
        self.blocking = blocking
        self.err = err or "请求超时"
        if not self.blocking:
            self.acquite_timeout = None

    def __call__(self, func):
        @wraps(func)
        def wrapper(*args, **kwargs):
            cache_key = None
            if self.key_dispose in (kwargs_to_limit_key, args_to_limit_key) and list(args) + list(kwargs.values()):
                cache_key = self.key_dispose(self.key, *args, **kwargs)
            if not cache_key:
                cache_key = func_to_limit_key(self.key, func)
            # 创建锁
            lock = Lock(get_redis(), cache_key, expire=self.time_out, strict=False)
            if lock.acquire(blocking=self.blocking, timeout=self.acquite_timeout):
                logger.info(f"[Redis分布式锁] {cache_key}上锁, 时间：{time.time()}")
                try:
                    data = func(*args, **kwargs)
                except Exception as e:
                    raise e
                finally:
                    logger.info(f"[Redis分布式锁]  时间：{time.time()}, {cache_key}解锁")
                    lock.release()
                return data

            else:
                logger.error(f"[Redis分布式锁] {cache_key}锁超时占用！")
                raise OtherErr(result_code.RedisCont.ConnectionErr, err_msg.FailErr, message=self.err)
        return wrapper


class a:

    # @staticmethod
    def b(self):
        pass

    pass


@RedisLimit(key='Order:name', key_dispose=kwargs_to_limit_key, err="触发锁")
def run(name, a=0):
    """模拟耗时较长，每次执行返回结果都一样的情况"""
    time.sleep(1)
    if a == 1:
        raise OtherErr(result_code.RedisCont.ConnectionErr, err_msg.FailErr)
    print(f"{name}:start func")



# @AsyncRedisLimit(key='Order', key_dispose=func_to_limit_key)
# async def run():
#     """模拟耗时较长，每次执行返回结果都一样的情况"""
#     # time.sleep(31)
#     print(f"1:start func")

if __name__ == '__main__':
    # import asyncio
    # loop = asyncio.get_event_loop()
    # loop.run_until_complete(run())
    r1 = Process(target=run, kwargs={"name": 1, "a": 0})
    r2 = Process(target=run, kwargs={"name": 1, "a": 1})
    r3 = Process(target=run, kwargs={"name": 1, "a": 0})
    r4 = Process(target=run, kwargs={"name": 2, "a": 1})
    r5 = Process(target=run, kwargs={"name": 3, "a": 0})
    r1.start()
    r2.start()
    r3.start()
    r4.start()
    r5.start()




