# -*- coding:utf-8 -*-
import redis
import json
import time
from threading import Thread
from datetime import datetime, timedelta
from utils.redis_utils import redis_conn
class PythonRedisStreamQueue:

    def __init__(self, host='127.0.0.1', port=6379, db=0, password=None, max_retries=3, queue_max_len=None, read_count=1, block_time=1000):
        self.redis = redis.Redis(host=host, port=port, db=db, password=password) # redis连接,可自行替换连接信息
        self.queues = {}  # 存放队列
        self.delay_queues = {}  # 存放延迟队列
        self.groups = {}  # 存放消费者组
        self.max_retries = max_retries  # -1表示无限重试，0表示不重试 其他值表示重试次数
        self.queue_max_len = queue_max_len  # 队列最大长度，None表示不限制, 0表示无法写入 其他值表示队列最大长度，最新老数据会替换新数据（长度非准确限制）
        self.read_count = read_count  # 一次读取消息数量
        self.block_time = block_time  # 阻塞读取消息时间，单位ms

    #添加消息队列任务到指定队列，queue_name为队列名称例如: stream_queue:test，job为字典格式，delay为延迟时间，单位秒
    def add_job(self, stream_name, data, delay=None, job = None):
        job_data = json.dumps(data, ensure_ascii=False)
        
        # 任务附加信息，默认为None时仅添加重试次数
        if job is None:
            job = {
                'retries': 0,
            }

        job_info = json.dumps(job, ensure_ascii=False)
        
        if delay:
            print('add_job delay job error: delay queue not support')
            return False
        else:
            self.redis.xadd(stream_name, {'job': job_info, 'data': job_data}, maxlen=self.queue_max_len)
        return True

    #启动工作队列进程，queue_name为队列名称例如: stream_queue:test，callback为回调函数，group_name为消费者组名称（默认一个线程启动worker）
    def worker(self, stream_name, callback, group_name = 'group_1', thread_count=1):
        if group_name not in self.groups:
            self.create_consumer_group(stream_name, group_name)
            self.groups[group_name] = []
            for _ in range(thread_count):
                thread = Thread(target=self._consumer, args=(stream_name, callback, group_name))
                thread.daemon = True
                thread.start()
                self.groups[group_name].append(thread)
        self.stop(group_name=group_name)
        return True

    #查询pending信息（已消费但未确认的消息数据统计）
    def pending_info(self, stream_name, group_name):
        info = self.redis.xpending(stream_name, group_name)
        if info.get('pending', 0) == 0:
            return None
        for key, value in info.items():
            if key == 'min':
                info['min'] = value.decode('utf-8') if value else None
            if key =='max':
                info['max'] = value.decode('utf-8') if value else None
        return info

    #查询pending消息列表
    def pending_messages(self, stream_name, group_name, start='-', end='+', count=1, consumer=None):
        return self.redis.xpending_range(stream_name, group_name, start, end, count, consumer)

    #查询消息队列状态
    def stream_info(self, stream_name):
        return self.redis.xinfo_stream(stream_name)
    
    # 未确认的消息删除掉（重新启动worker后，未确认的消息会强制确认完成）
    def remove_pending_messages(self, stream_name, group_name):
        try:
            info = self.pending_info(stream_name, group_name)

            if not info:
                return
            
            # 读取未确认的消息ids列表
            pending_messages = self.pending_messages(stream_name, group_name, '-', '+', info.get('pending', 0), 'consumer')

            pending_ids = [message['message_id'].decode('utf-8') for message in pending_messages]
            if not pending_messages or not pending_ids:
                return

            # 重置未确认的消息，直接给与成功标记
            for message_id in pending_ids:
                self.redis.xack(stream_name, group_name, message_id)
                self.redis.xdel(stream_name, message_id)
                print(f"Removed pending message ID: %s" % message_id)
            print(f"Removed pending message ID count: %s min: %s max: %s" % (str(len(pending_ids)) + '/' + str(info['pending']),str(info['min']), str(info['max'])))
        except redis.exceptions.ResponseError as e:
            if "NOGROUP No such key" not in str(e):
                print(f"Warning: failed to reset pending messages, redis.exceptions.ResponseError: {e}")
        except Exception as e:
            print(f"Warning: failed to reset pending messages: {e}")
    
    def _pop_delayed_job(self, stream_name):
        now = int(datetime.utcnow().timestamp())
        job_data = self.redis.zrangebyscore(f"{stream_name}:delayed", 0, now, start=0, num=1)
        if job_data:
            job_data = job_data[0].decode('utf-8')
            self.redis.zrem(f"{stream_name}:delayed", job_data)
            return job_data
        return None

    def _move_to_stream(self, stream_name):
        job_data = self._pop_delayed_job(stream_name)
        if job_data:
            self.redis.xadd(stream_name, {'job': job_data}, maxlen=10000)

    def create_consumer_group(self, stream_name, group_name):
        try:
            self.redis.xgroup_create(stream_name, group_name, id='0', mkstream=True)
        except redis.exceptions.ResponseError as e:
            if "BUSYGROUP Consumer Group name already exists" not in str(e):
                raise e

    # 消费者组消费消息（监听消息队列，并处理消息）
    def _consumer(self, stream_name, callback, group_name):
        while True:
            try:
                #阻塞读取消息，超时时间为1000ms，每次读取一条 stream_name > 符号表示从最新未确认的消息开始读取
                messages = self.redis.xreadgroup(group_name, 'consumer', {stream_name: '>'}, count=self.read_count, block=self.block_time)
                if messages:
                    for message in messages:
                        stream, message_data = message
                        message_id, message_job = message_data[0]
                        for key, value in message_job.items():
                            if key.decode('utf-8') == 'job':
                                job = json.loads(value.decode('utf-8'))
                            if key.decode('utf-8') == 'data':
                                data = json.loads(value.decode('utf-8'))
                        #print(job)
                        #print(data)
                        try:
                            # 使用用户自定义的回调函数处理任务(这个回调函数中抛出异常则进入重试机制)
                            callback(data)
                            
                            # 处理成功后，确认消息消费成功，移除pending消息列表记录
                            self.redis.xack(stream_name, group_name, message_id)
                            
                            # 这里处理成功后，直接删除消息(也可启动worker前获取所有ack的消息，统一删除)
                            self.redis.xdel(stream_name, message_id)
                        except Exception as e:
                            # max_retries为0表示不重试，-1表示无限重试，其他值表示重试次数
                            if self.max_retries == -1 or job.get('retries', 0) < self.max_retries:
                                job['retries'] = job.get('retries', 0) + 1
                                
                                # 重试任务，这里不延迟（如需使用延迟队列，需要开启线程定时提取delay的任务放入普通队列）
                                self.add_job(stream_name, data = data, delay = None, job = job)
                                
                                # 删除失败的任务
                                self.redis.xack(stream_name, group_name, message_id)
                                self.redis.xdel(stream_name, message_id)
                                
                                print(f"Job retry: {job} {data}")
                            else:
                                # 删除失败次数超过最大次数的任务
                                self.redis.xack(stream_name, group_name, message_id)
                                self.redis.xdel(stream_name, message_id)
                                
                                print(f"Job reached max retries: {job}")
                            print(f"Error processing job: {e}")
            except redis.exceptions.TimeoutError:
                # 超时重试
                print("Error:PythonRedisStreamQueue Consumer Handle the timeout (e.g., by checking if the consumer should stop)")
                pass
            except redis.exceptions.ResponseError as e:
                if "NOGROUP No such key" in str(e):
                    print(f"Error PythonRedisStreamQueue Consumer exception, please restart the job worker: {e}")
                    raise e
                else:
                    print(f"Error PythonRedisStreamQueue Consumer exception: {e}")
                    raise e
            except Exception as e:
                print(f"Error PythonRedisStreamQueue Consumer exception: {e}")
                raise e
            pass


    # 确认任务完成并删除已完成任务
    def success_job(self, stream_name, group_name, message_id):
        try:
            self.redis.xack(stream_name, group_name, message_id)
            self.redis.xdel(stream_name, message_id)
        except Exception as e:
            print(f"Error success_job: {e}")
        pass
    
    # worker线程启动后调用，退出启动进程让线程运行直到结束
    def stop(self, group_name):
        for group in self.groups[group_name]:
            group.join()

# 创建实例对象(外部可直接调用 from python_redis_stream_queue import queue)
queue = ThinkQueue()