#数据管道
import json
import time
from datetime import datetime
from kafka import KafkaProducer
from kafka.errors import KafkaError
import redis
import logging

class CnhnbRealtimePipeline:
    """惠农网实时爬虫数据管道"""
    
    def __init__(self, kafka_servers, topic, redis_config):
        self.kafka_servers = kafka_servers
        self.topic = topic
        self.redis_config = redis_config
        self.producer = None
        self.redis_client = None
        self.logger = logging.getLogger(__name__)
        
    @classmethod
    def from_crawler(cls, crawler):
        kafka_config = crawler.settings.get('KAFKA_BOOTSTRAP_SERVERS')
        topic = crawler.settings.get('KAFKA_TOPIC')
        redis_config = {
            'host': crawler.settings.get('REDIS_HOST'),
            'port': crawler.settings.get('REDIS_PORT'),
            'db': crawler.settings.get('REDIS_DB'),
            'password': crawler.settings.get('REDIS_PASSWORD')
        }
        return cls(kafka_config, topic, redis_config)
    
    def open_spider(self, spider):
        """爬虫启动时初始化连接"""
        # 初始化Kafka生产者
        try:
            self.producer = KafkaProducer(
                bootstrap_servers=self.kafka_servers,
                value_serializer=lambda v: json.dumps(v, ensure_ascii=False).encode('utf-8'),
                key_serializer=lambda k: k.encode('utf-8') if k else None,
                retries=3,
                acks='all'
            )
            self.logger.info(f"Kafka生产者初始化成功: {self.kafka_servers}")
        except Exception as e:
            self.logger.error(f"Kafka生产者初始化失败: {e}")
            self.producer = None
        
        # 初始化Redis连接
        try:
            self.redis_client = redis.Redis(
                host=self.redis_config['host'],
                port=self.redis_config['port'],
                db=self.redis_config['db'],
                password=self.redis_config['password'],
                decode_responses=True
            )
            self.logger.info(f"Redis连接初始化成功: {self.redis_config['host']}:{self.redis_config['port']}")
        except Exception as e:
            self.logger.error(f"Redis连接初始化失败: {e}")
            self.redis_client = None
    
    def close_spider(self, spider):
        """爬虫关闭时关闭连接"""
        if self.producer:
            self.producer.close()
            self.logger.info("Kafka生产者已关闭")
    
    def process_item(self, item, spider):
        """处理爬取的数据项"""
        try:
            # 添加时间戳和元数据
            item_data = dict(item)
            item_data['crawl_timestamp'] = datetime.now().isoformat()
            item_data['spider_name'] = spider.name
            
            # 发送到Kafka
            if self.producer:
                key = item_data.get('name', 'unknown')
                future = self.producer.send(
                    topic=self.topic,
                    key=key,
                    value=item_data
                )
                record_metadata = future.get(timeout=10)
                self.logger.info(f"数据已发送到Kafka: topic={record_metadata.topic}, partition={record_metadata.partition}, offset={record_metadata.offset}")
            
            # 存储到Redis
            if self.redis_client:
                key = f"cnhnb_price:{item_data.get('name', 'unknown')}:{int(time.time())}"
                self.redis_client.setex(
                    key,
                    3600,  # 1小时过期
                    json.dumps(item_data, ensure_ascii=False)
                )
                self.logger.info(f"数据已存储到Redis: {key}")
                
        except Exception as e:
            self.logger.error(f"处理数据项失败: {e}")
        
        return item 