import scrapy
from scrapy_redis.spiders import RedisSpider
from sdWeatherSpider.items import SdweatherspiderItem
from urllib.parse import urljoin
import socket
import redis
from scrapy.exceptions import CloseSpider
from redis.exceptions import ConnectionError as RedisConnectionError
import time
from scrapy import signals
#C:\Users\龚楚廷\PycharmProjects\sdWeatherSpider>redis-cli
#127.0.0.1:6379> LPUSH sdweather:start_urls '{"url": "http://www.weather.com.cn/guangdong/index.shtml"}'
#(integer) 1
#127.0.0.1:6379> exit
#C:\Users\龚楚廷\PycharmProjects\sdWeatherSpider>scrapy crawl everyCityinSD

class EverycityinSDSpider(RedisSpider):
    name = 'everyCityinSD'
    allowed_domains = ['www.weather.com.cn']
    redis_key = 'sdweather:start_urls'

    def __init__(self, *args, **kwargs):
        super(EverycityinSDSpider, self).__init__(*args, **kwargs)
        self.server_id = socket.gethostname()
        self.redis_conn = None  # 初始化为None

    @classmethod
    def from_crawler(cls, crawler, *args, **kwargs):
        spider = super().from_crawler(crawler, *args, **kwargs)
        crawler.signals.connect(spider.spider_opened, signal=scrapy.signals.spider_opened)
        crawler.signals.connect(spider.spider_closed, signal=scrapy.signals.spider_closed)
        return spider

    def spider_opened(self, spider):
        # 重试连接Redis最多5次
        max_retries = 5
        for attempt in range(max_retries):
            try:
                self.redis_conn = redis.Redis(
                    host=self.settings.get('REDIS_HOST', 'localhost'),
                    port=self.settings.get('REDIS_PORT', 6379),
                    db=self.settings.get('REDIS_DB', 0),
                    decode_responses=True
                )
                self.redis_conn.ping()  # 测试连接
                self.logger.info(f"Redis连接已建立")
                return
            except (RedisConnectionError, redis.exceptions.ConnectionError) as e:
                self.logger.warning(f"Redis连接失败 (尝试 {attempt + 1}/{max_retries}): {str(e)}")
                if attempt < max_retries - 1:
                    time.sleep(2)  # 等待2秒后重试
        self.logger.error("无法连接到Redis服务器")
        raise CloseSpider("Redis连接失败")

    def spider_closed(self, spider):
        if self.redis_conn:
            self.redis_conn.close()
            self.logger.info("Redis连接已关闭")

    def parse(self, response):
        if not self.redis_conn:
            self.logger.error("Redis连接未初始化")
            return

        province = response.url.split('/')[-2]

        # 记录处理的省份
        self.redis_conn.sadd(f'sdweather:province:processed:{self.server_id}', province)

        city_links = response.xpath(
            '//a[contains(@href, "/weather/") and contains(@title, "天气预报")]/@href'
        ).getall()
        city_names = response.xpath(
            '//a[contains(@href, "/weather/") and contains(@title, "天气预报")]/text()'
        ).getall()

        for link, name in zip(city_links, city_names):
            city_url = urljoin(response.url, link)
            if '/weather/' in city_url and city_url.endswith('.shtml'):
                city_code = self.extract_city_code(city_url)

                # 检查是否已处理过该城市
                if self.redis_conn.sismember('weather:cities:processed', city_code):
                    self.logger.info(f'城市 {name} 已处理过，跳过')
                    continue

                yield scrapy.Request(
                    url=city_url,
                    callback=self.parse_city_weather,
                    meta={'province': province, 'city': name, 'city_code': city_code},
                    errback=self.errback_city
                )

    def extract_city_code(self, url):
        """从URL中提取城市代码"""
        parts = url.split('/')
        if len(parts) >= 2 and parts[-1].endswith('.shtml'):
            return parts[-1].split('.')[0]
        return ''

    def parse_city_weather(self, response):
        item = SdweatherspiderItem()
        item['city'] = response.meta.get('city', '未知城市')
        item['city_code'] = response.meta.get('city_code', '')

        try:
            weather_list = []
            weather_container = response.xpath('//ul[@class="t clearfix"]')
            if not weather_container:
                raise ValueError("未找到天气列表容器")

            for day in weather_container.xpath('./li'):
                date = day.xpath('./h1/text()').get(default='未知日期').strip()
                weather_desc = day.xpath('./p[@title]/text()').get(default='未知天气').strip()
                temp_high = day.xpath('./p[@class="tem"]/span/text()').get(default='')
                temp_low = day.xpath('./p[@class="tem"]/i/text()').get(default='')
                temp = f'{temp_high}/{temp_low}' if temp_high else temp_low
                wind = (day.xpath('./p[@class="win"]/em/@title').get(default='') +
                        day.xpath('./p[@class="win"]/i/text()').get(default='')).strip()
                wind = wind if wind else '未知风力'
                weather_list.append(f'{date}:{weather_desc},{temp},{wind}')

            item['weather'] = '\n'.join(weather_list)

        except Exception as e:
            self.logger.error(f'城市{item["city"]}天气解析失败: {str(e)}')
            item['weather'] = '数据解析异常'

        yield item

    def errback_city(self, failure):
        """请求失败处理"""
        city = failure.request.meta.get('city', '未知城市')
        city_code = failure.request.meta.get('city_code', '未知编码')

        if self.redis_conn:
            # 记录失败的城市到Redis
            fail_key = f'sdweather:cities:failed:{self.server_id}'
            self.redis_conn.hset(fail_key, city_code, str(failure))

        self.logger.error(f'请求城市{city}失败: {failure.request.url} - {repr(failure)}')
