import aiohttp
import asyncio
from abc import ABC, abstractmethod
from typing import Dict, Any, Optional
from ..utils.logger import get_logger
from ..utils.config import config

logger = get_logger(__name__)

class BaseCollector(ABC):
    """基础爬虫类"""
    
    def __init__(self):
        self.headers = {
            'User-Agent': config.get('crawler.eastmoney.headers.User-Agent')
        }
        self.timeout = aiohttp.ClientTimeout(total=config.get('crawler.eastmoney.timeout', 30))
        self.retry_times = config.get('crawler.eastmoney.retry', 3)
        self.delay = config.get('crawler.eastmoney.delay', 1)

    async def get(self, url: str, params: Optional[Dict[str, Any]] = None) -> Optional[str]:
        """发送GET请求"""
        for i in range(self.retry_times):
            try:
                async with aiohttp.ClientSession(headers=self.headers, timeout=self.timeout) as session:
                    async with session.get(url, params=params) as response:
                        if response.status == 200:
                            return await response.text()
                        else:
                            logger.error(f"Request failed with status {response.status}: {url}")
                            return None
            except Exception as e:
                logger.error(f"Request error: {e}, retry {i + 1}/{self.retry_times}")
                if i < self.retry_times - 1:
                    await asyncio.sleep(self.delay * (i + 1))
                continue
        return None

    @abstractmethod
    async def collect(self) -> None:
        """收集数据"""
        pass

    @abstractmethod
    async def parse(self, content: str) -> Dict[str, Any]:
        """解析数据"""
        pass

    @abstractmethod
    async def save(self, data: Dict[str, Any]) -> None:
        """保存数据"""
        pass 