# plate_consumer/src/consumer.py
import os
import time
import json
import redis
import logging
from datetime import datetime
from sqlalchemy import create_engine
from sqlalchemy.orm import sessionmaker
from .config import ConsumerConfig
from .models import DetectionResult, init_db


class DBConsumer:
    def __init__(self):
        # 初始化日志
        self.logger = logging.getLogger(__name__)
        self.logger.setLevel(logging.INFO)
        formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')
        ch = logging.StreamHandler()
        ch.setFormatter(formatter)
        self.logger.addHandler(ch)

        # 初始化数据库
        self.engine = create_engine(
            ConsumerConfig.DB_URI,
            pool_size=ConsumerConfig.DB_POOL_SIZE,
            pool_recycle=300
        )
        self.Session = sessionmaker(bind=self.engine)
        init_db(self.engine)  # 创建表结构

        # 初始化Redis
        self.redis_conn = redis.Redis(
            host=ConsumerConfig.REDIS_HOST,
            port=ConsumerConfig.REDIS_PORT,
            db=ConsumerConfig.REDIS_DB,
            decode_responses=True
        )
        self.stream_key = ConsumerConfig.REDIS_STREAMS['DB_OPS']
        self.dead_letter_key = ConsumerConfig.REDIS_STREAMS['DEAD_LETTER']
        self.group_name = "db_ops_group"
        self.max_retries = 3
        self.processed_ids_key = "processed_business_ids"  # Redis集合键名，用于记录已经处理的消息ID

    def _is_duplicate(self, business_id):
        """通过业务ID检查重复"""
        return self.redis_conn.sismember(self.processed_ids_key, business_id)

    def _mark_processed(self, business_id):
        """标记消息为已处理"""
        self.redis_conn.sadd(self.processed_ids_key, business_id)
        self.redis_conn.expire(self.processed_ids_key, 72*3600)  # 保留72小时

    def _create_group(self):
        """创建消费者组（幂等操作）"""
        try:
            self.redis_conn.xgroup_create(
                name=self.stream_key,
                groupname=self.group_name,
                id='$',
                mkstream=True
            )
        except redis.exceptions.ResponseError as e:
            if "BUSYGROUP" not in str(e):
                raise

    def _process_message(self, message):
        """消息处理核心逻辑"""
        msg_id, data = message
        business_id = data.get('business_id')
        try:
            # ⭐️ 去重检查
            if self._is_duplicate(business_id):
                self.logger.warning(f"跳过重复消息: {business_id}")
                self.redis_conn.xack(self.stream_key, self.group_name, msg_id)
                return

            task_type = data['task_type']
            payload = json.loads(data['payload'])

            session = self.Session()
            try:
                if task_type == 'insert':
                    self._handle_insert(session, payload)
                elif task_type == 'update':
                    self._handle_update(session, payload)
                else:
                    raise ValueError(f"未知任务类型: {task_type}")

                session.commit()

                # 处理成功后标记
                self._mark_processed(business_id)
                self.redis_conn.xack(self.stream_key, self.group_name, msg_id)
            except Exception as e:
                session.rollback()
                raise e
            finally:
                session.close()

        except Exception as e:
            self.logger.error(f"消息处理失败: {str(e)}")
            self._handle_failure(msg_id, business_id, data, e)

    def _handle_insert(self, session, payload):
        record = DetectionResult(
            plate_number=payload['plate_number'],
            entry_time=datetime.fromtimestamp(payload['entry_time']),
            confidence=payload['confidence'],
            amount_due=0
        )
        session.merge(record)  # 使用merge实现幂等性

    def _handle_update(self, session, payload):
        record = session.query(DetectionResult).filter(
            DetectionResult.plate_number == payload['plate_number'],
            DetectionResult.exit_time.is_(None)
        ).first()

        if record:
            record.exit_time = datetime.fromtimestamp(payload['exit_time'])
            record.amount_due = payload['amount_due']

    def _handle_failure(self, msg_id, business_id, data, error):
        current_retries = int(data.get('retries', 0))

        if current_retries < self.max_retries:
            new_data = {**data, 'retries': current_retries + 1}
            # 重试时让Redis自动生成新ID
            self.redis_conn.xadd(
                self.stream_key,
                new_data,
                id='*'  # 改为自动生成ID
            )
        else:
            # 死信队列记录
            self.redis_conn.xadd(
                self.dead_letter_key,
                {
                    'original_msg_id': msg_id,  # 记录原始消息ID
                    'business_id': business_id,  # 明确记录业务ID
                    'original': json.dumps(data),
                    'failed_at': datetime.now().isoformat(),
                    'error': str(error)
                }
            )
        self.redis_conn.xack(self.stream_key, self.group_name, msg_id)

    def run(self):
        self._create_group()
        consumer_name = f"consumer_{os.getpid()}"

        while True:
            try:
                messages = self.redis_conn.xreadgroup(
                    groupname=self.group_name,
                    consumername=consumer_name,
                    streams={self.stream_key: '>'},
                    count=1,    # 尝试一次获取多条消息，这里修改试试
                    block=5000
                )

                if messages:
                    for stream, msg_list in messages:
                        for msg in msg_list:
                            self._process_message(msg)

            except redis.exceptions.ConnectionError:
                self.logger.warning("Redis连接丢失，5秒后重连...")
                time.sleep(5)
            except Exception as e:
                self.logger.error(f"运行时异常: {str(e)}")
                time.sleep(1)
