import json
from datetime import time
from urllib.parse import urlencode

import scrapy

from web_crawler.config import get_headers, get_cookies
from web_crawler.utils import MySQLPool, DateProcess


class XueqiuStockFinanceSpider(scrapy.Spider):
    name = 'xueqiu_stock_finance'
    custom_settings = {
        'CONCURRENT_REQUESTS': 64,
        'DOWNLOAD_DELAY': 0.1,
        'COOKIES_ENABLED': True,
        'RETRY_TIMES': 3,
        'RETRY_HTTP_CODES': [500, 502, 503, 504, 400, 403, 404, 408],
        # 'ITEM_PIPELINES': {
        # }
    }
    DATA_COUNT = 10
    SYMBOL_SQL = """SELECT distinct symbol FROM xueqiu_stocks"""

    def __init__(self, *args, **kwargs):
        super().__init__(*args, **kwargs)
        self.pool = MySQLPool()
        self.symbols = self._fetch_symbols_from_db()  # 股票列表

    def _fetch_symbols_from_db(self):
        """从MySQL获取symbol列表"""
        conn = self.pool.get_conn()

        try:
            with conn.cursor() as cursor:
                cursor.execute(self.SYMBOL_SQL)
                return [row['symbol'] for row in cursor.fetchall()]
        finally:
            conn.close()

    def start_requests(self):
        data_types = ['indicator', 'income', 'balance', 'cash_flow']

        for symbol in self.symbols:
            for data_type in data_types:
                yield  self._build_finance_request(symbol, data_type)


    def _build_finance_request(self, symbol, data_type):
        """构建财务数据请求"""
        params = {
            'symbol': symbol,
            'type': 'all',
            'is_detail': 'true',
            'count': self.DATA_COUNT,
            'timestamp': DateProcess.get_timestamp(),
        }
        url = f'https://stock.xueqiu.com/v5/stock/finance/cn/{data_type}.json?{urlencode(params)}'
        return scrapy.Request(
            url=url,
            callback=self.parse_finance_data,
            meta={
                'symbol': symbol,
                'data_type': data_type,
            },
            headers=get_headers(),
            cookies=get_cookies()
        )

    def parse_finance_data(self, response):
        """解析财务数据"""
        try:
            data = json.loads(response.text)
            if data['error_code'] != 0:
                raise ValueError(f"API error: {data.get('error_description')}")

            yield data

        except Exception as e:
            self.logger.error(f"Parse error for {response.url}: {str(e)}")



