import scrapy

from scrapy import signals, Request
import json
from ..commons.utils import get_json_data
from ..commons.parse_data import parse_note_data
from ..redis_service import RedisService

class NoteByPostIdSpider(scrapy.Spider):
    name = "note_by_poster_id"

    @classmethod
    def from_crawler(cls, crawler, *args, **kwargs):
        # instance = cls(*args, **kwargs)
        spider = super(NoteByPostIdSpider, cls).from_crawler(crawler, *args, **kwargs)
        # instance.pipeline = crawler.engine.scrapy_service_pipeline
        crawler.signals.connect(spider.spider_closed, signal=signals.spider_closed)
        return spider
    def __init__(self,poster_id = None,job_id=None, *args, **kwargs):
        super(NoteByPostIdSpider, self).__init__(*args, **kwargs)
        self.poster_id = poster_id
        self.job_id = job_id
        self.total_pages = 1
        self.final_result = []
        self.redis_service = RedisService().redis_client

        # 初始化任务进度
        self.redis_service.set(f'{self.job_id}', json.dumps(
            {'total_nums': 1, 'completed_nums': 0, 'failed_nums': 0, 'status': 'initializing'}))

    def update_total_nums(self, total_nums):
        progress = json.loads(self.redis_service.get(f'{self.job_id}'))
        # progress = {'total_pages': total_pages, 'completed_pages': 0, 'status':'crawling'}
        progress['total_nums'] = total_nums
        progress['status'] = 'crawling'
        self.redis_service.set(f'{self.job_id}', json.dumps(progress))
        self.logger.info(f'Updated progress: {progress}')

    def update_failed_nums(self):
        progress = json.loads(self.redis_service.get(f'{self.job_id}'))
        progress['failed_nums'] += 1
        self.redis_service.set(f'{self.job_id}', json.dumps(progress))
        self.logger.info(f'Updated failed Nums: {progress}')

    def update_completed_nums(self):
        progress = json.loads(self.redis_service.get(f'{self.job_id}'))
        progress['completed_nums'] += 1
        self.redis_service.set(f'{self.job_id}', json.dumps(progress))
        self.logger.info(f'Updated completed Nums: {progress}')
    def get_api(self,cursor=None):
        if cursor:
            return f"/api/sns/web/v1/user_posted?num=30&cursor={cursor}&user_id={self.poster_id}&image_formats=jpg,webp,avif"
        return f"/api/sns/web/v1/user_posted?num=30&cursor=&user_id={self.poster_id}&image_formats=jpg,webp,avif"

    def start_requests(self):

        yield Request(
            url='https://edith.xiaohongshu.com',
            callback=self.parse,
            method='GET',
            meta={'api': self.get_api(), 'data': 'undefined'}

        )




    def parse(self, response):
        json_str = get_json_data(response.text)
        json_data = json.loads(json_str)['data']
        has_more = json_data.get('has_more', False)
        notes = json_data.get('notes',[])
        for note in notes:
            yield from self.get_note_by_id(note['note_id'])

        if has_more:
            self.total_pages += 1
            self.update_total_nums(self.total_pages)
            next_cursor = json_data.get('cursor', None)
            if next_cursor:
                next_page_url =f"https://edith.xiaohongshu.com{self.get_api(next_cursor)}"
                yield Request(
                    url=next_page_url,
                    callback=self.parse,
                    method='GET',
                    meta={'api': self.get_api(next_cursor), 'data': 'undefined'}
                )

        pass
    def get_note_by_id(self, note_id):
        if len(note_id) != 24:
            self.update_failed_nums()
            self.logger.warning(f"{note_id}====id异常跳过此id爬取")
            return
        yield Request(
            url=f'https://edith.xiaohongshu.com/{note_id}',
            callback=self.parse_data,
            method='POST',
            meta={'api': '/api/sns/web/v1/feed', 'data': {
                'source_note_id': f'{note_id}',
                'image_formats': ['jpg', 'webp', 'avif'],
                "extra": {"need_body_topic": "1"}
            }, }
        )

    def parse_data(self, response):
        self.update_completed_nums()
        data = response.text
        if data:
            json_data = json.loads(get_json_data(data))['data']['items'][0]
            parsed_data = parse_note_data(json_data, for_web=True)
            self.final_result.append(parsed_data)
            yield {'data': [parsed_data]}

    def spider_closed(self, spider, reason):
        progress = json.loads(self.redis_service.get(f'{self.job_id}'))
        progress['status'] = 'finished'
        self.redis_service.set(self.job_id, json.dumps(progress))
        self.logger.info(f'Spider closed due to {reason}')
