import time
from typing import Any

import scrapy
from urllib.parse import urlencode
import json
from web_crawler.config import get_headers,get_cookies
from ..utils import MySQLPool


class TestSpider(scrapy.Spider):
    name = 'test'
    allowed_domains = ["stock.xueqiu.com"]

    # 测试用的股票代码（包含不同市场）
    TEST_SYMBOLS = [
        "SZ002460","SH600900"
    ]
    # 从MySQL获取symbol列表的SQL
    SYMBOL_SQL = """select distinct symbol from xueqiu_data.xueqiu_stocks"""
    custom_settings = {
        'ITEM_PIPELINES': {
            'web_crawler.pipeline.XueqiuKlinePipeline': 300,
        }
    }
    def __init__(self, **kwargs: Any):
        super().__init__(**kwargs)
        self.pool = MySQLPool()
        self.symbols = self._fetch_symbols_from_db()


    def _fetch_symbols_from_db(self):
        """从MySQL获取symbol列表"""
        conn = self.pool.get_conn()
        try:
            with conn.cursor() as cursor:
                cursor.execute(self.SYMBOL_SQL)
                return  [row['symbol'] for row in cursor.fetchall()]
        finally:
            conn.close()

    def start_requests(self):
        """生成测试请求"""
        for symbol in self.TEST_SYMBOLS:
            params = {
                "symbol": symbol,
                "begin": str(int(time.time() * 1000)),
                "period": "day",
                "type": "before",
                "count": "-5",  # 只获取5条数据用于测试
                "indicator": "kline,pe,pb,ps,pcf,market_capital,macd,kdj,boll"
            }
            yield scrapy.Request(
                url=f"https://stock.xueqiu.com/v5/stock/chart/kline.json?{urlencode(params)}",
                headers=get_headers(),
                cookies=get_cookies(),
                callback=self.getdata,
                meta={'symbol': symbol}
            )

    def getdata(self, response):
        print(json.loads(response.text))

    def test_get_data(self):
        print(self.symbols)

    # def parse_test_response(self, response):
    #     """解析测试响应"""
    #     try:
    #         data = json.loads(response.text)
    #         yield {
    #             'symbol': response.meta['symbol'],
    #             'status': 'success',
    #             'http_status': response.status,
    #             'data_keys': list(data.keys()),
    #             'sample_data': data['data']['item'][0] if data.get('data') else None,
    #             'has_column': 'column' in data.get('data', {}),
    #             'response_length': len(response.text)
    #         }
    #     except Exception as e:
    #         yield {
    #             'symbol': response.meta['symbol'],
    #             'status': 'error',
    #             'error': str(e),
    #             'response_text': response.text[:200] + '...' if response.text else None
    #         }



