from abc import ABC, abstractmethod
from typing import Type, Optional, List, Dict, Any, Generic, TypeVar
from datetime import datetime, timedelta
import pandas as pd
import akshare as ak
from functools import wraps
import logging
import time
from concurrent.futures import ThreadPoolExecutor
from dataclasses import dataclass

T = TypeVar('T')
logger = logging.getLogger(__name__)

class DataModel(ABC):
    """数据模型基类，定义数据持久化接口"""
    
    @classmethod
    @abstractmethod
    def get_latest_date(cls, code: str) -> Optional[datetime]:
        """获取最新数据日期"""
        pass
    
    @classmethod
    @abstractmethod
    def save(cls, data: pd.DataFrame) -> bool:
        """保存数据"""
        pass

class DataProcessor(ABC, Generic[T]):
    """数据处理基类，定义数据处理的基本流程"""
    
    def __init__(self, model_class: Type[DataModel], column_mapping: Dict[str, str]):
        self.model_class = model_class
        self.column_mapping = column_mapping
    
    @abstractmethod
    def process_code(self, code: str) -> str:
        """处理代码格式"""
        pass
    
    def clean(self, data: pd.DataFrame) -> pd.DataFrame:
        """清洗数据的基础实现"""
        if data.empty:
            return data

        # 重命名列
        data = data.rename(columns=self.column_mapping)

        # 统一日期格式
        if 'trade_date' in data.columns:
            data['trade_date'] = pd.to_datetime(data['trade_date']).dt.strftime('%Y-%m-%d')

        # 处理数值列
        numeric_columns = ['open', 'high', 'low', 'close']
        for col in numeric_columns:
            if col in data.columns:
                data[col] = pd.to_numeric(data[col], errors='coerce').round(2)

        if 'volume' in data.columns:
            data['volume'] = pd.to_numeric(data['volume'], errors='coerce').astype('int64')

        # 删除无效数据
        data = data.dropna(subset=numeric_columns)
        data = data[data['close'] > 0]

        return data
    
    @abstractmethod
    def adapt(self, data: pd.DataFrame) -> T:
        """适配数据到目标格式"""
        pass
    
    def validate(self, data: T) -> bool:
        """验证数据的基础实现"""
        if isinstance(data, pd.DataFrame):
            required_columns = ['trade_date', 'open', 'close', 'high', 'low']
            return all(col in data.columns for col in required_columns)
        return True

class DataSource(ABC):
    """数据源抽象基类"""
    
    def __init__(self, segment_size: timedelta = timedelta(days=1000)):
        self.segment_size = segment_size
    
    @abstractmethod
    def fetch_segment(self, code: str, start_date: datetime, end_date: datetime, **kwargs) -> pd.DataFrame:
        """获取指定时间段的数据"""
        pass
    
    def fetch(self, code: str, start_date: datetime, end_date: datetime, **kwargs) -> List[pd.DataFrame]:
        """分段获取数据"""
        all_data = []
        current_end = end_date
        
        while current_end >= start_date:
            current_start = max(current_end - self.segment_size, start_date)
            df = self.fetch_segment(code, current_start, current_end, **kwargs)
            
            if df is not None and not df.empty:
                all_data.append(df)
            
            current_end = current_start - timedelta(days=1)
        
        return all_data
    
    @abstractmethod
    def get_codes(self) -> List[str]:
        """获取可用的代码列表"""
        pass

@dataclass
class RetryConfig:
    """重试配置"""
    max_retries: int = 3
    delay: float = 0.1
    max_workers: int = 10

class AkshareDataSource(DataSource):
    """Akshare数据源实现"""
    
    def __init__(self, ak_func: callable, retry_config: RetryConfig):
        super().__init__()
        self.ak_func = ak_func
        self.retry_config = retry_config
    
    def fetch_segment(self, code: str, start_date: datetime, end_date: datetime, **kwargs) -> pd.DataFrame:
        """带重试机制的数据获取"""
        for attempt in range(self.retry_config.max_retries):
            try:
                params = {
                    'symbol': code,
                    'start_date': start_date.strftime('%Y%m%d'),
                    'end_date': end_date.strftime('%Y%m%d'),
                    **kwargs
                }
                df = self.ak_func(**params)
                if df is not None and not df.empty:
                    return df
                time.sleep(self.retry_config.delay)
            except Exception as e:
                logger.error(f"Attempt {attempt + 1} failed for {code}: {str(e)}")
                if attempt < self.retry_config.max_retries - 1:
                    time.sleep(self.retry_config.delay * (attempt + 1))
        return pd.DataFrame()

class BaseDataProvider(ABC, Generic[T]):
    """数据提供者基类"""
    
    def __init__(self, data_source: DataSource, processor: DataProcessor[T]):
        self.data_source = data_source
        self.processor = processor
    
    def get_data(self, code: str, end_date: datetime,
                 start_date: Optional[datetime] = None) -> Optional[T]:
        """获取处理后的数据"""
        # 获取最新数据日期
        latest_date = self.processor.model_class.get_latest_date(code)
        
        if latest_date is not None:
            if end_date <= latest_date:
                logger.info(f"{code}: Data is up to date")
                return None
            start_date = latest_date + timedelta(days=1)
        elif start_date is None:
            start_date = datetime(1990, 12, 19)
        
        # 处理代码格式
        processed_code = self.processor.process_code(code)
        
        # 获取数据
        segment_data = self.data_source.fetch(processed_code, start_date, end_date)
        
        if not segment_data:
            return None
        
        # 合并数据
        data = pd.concat(segment_data, ignore_index=True)
        data['code'] = code
        
        # 清洗数据
        cleaned_data = self.processor.clean(data)
        
        # 适配数据
        adapted_data = self.processor.adapt(cleaned_data)
        
        # 验证数据
        if not self.processor.validate(adapted_data):
            raise ValueError("Data validation failed")
        
        # 保存数据
        self.processor.model_class.save(adapted_data)
        
        return adapted_data
    
    def update_batch(self, codes: List[str], end_date: datetime,
                    start_date: Optional[datetime] = None) -> Dict[str, Optional[T]]:
        """批量更新数据"""
        with ThreadPoolExecutor(max_workers=self.data_source.retry_config.max_workers) as executor:
            futures = {
                executor.submit(self.get_data, code, end_date, start_date): code
                for code in codes
            }
            
            results = {}
            for future in futures:
                code = futures[future]
                try:
                    result = future.result()
                    results[code] = result
                except Exception as e:
                    logger.error(f"Failed to update {code}: {str(e)}")
                    results[code] = None
            
            return results

class StockKline(DataModel):
    """股票K线数据模型"""
    
    @classmethod
    def get_latest_date(cls, code: str) -> Optional[datetime]:
        # 实现获取最新日期的逻辑
        pass
    
    @classmethod
    def save(cls, data: pd.DataFrame) -> bool:
        # 实现保存数据的逻辑
        pass

class StockProcessor(DataProcessor[pd.DataFrame]):
    """股票数据处理器"""
    
    def process_code(self, code: str) -> str:
        """处理股票代码格式"""
        # 实现股票代码处理逻辑
        return code
    
    def adapt(self, data: pd.DataFrame) -> pd.DataFrame:
        """适配股票数据"""
        # 可以添加计算技术指标等逻辑
        return data

def create_stock_provider() -> BaseDataProvider[pd.DataFrame]:
    """创建股票数据提供者"""
    retry_config = RetryConfig(max_retries=3, delay=0.1, max_workers=10)
    
    column_mapping = {
        '日期': 'trade_date',
        '开盘': 'open',
        '收盘': 'close',
        '最高': 'high',
        '最低': 'low',
        '成交量': 'volume'
    }
    
    data_source = AkshareDataSource(ak.stock_zh_a_hist, retry_config)
    processor = StockProcessor(StockKline, column_mapping)
    return BaseDataProvider(data_source, processor)

# 使用示例
if __name__ == "__main__":
    # 配置日志
    logging.basicConfig(level=logging.INFO)
    
    # 创建数据提供者
    provider = create_stock_provider()
    
    # 获取单个股票数据
    end_date = datetime.now()
    data = provider.get_data("000001", end_date)
    
    # 批量更新数据
    codes = ["000001", "000002", "000003"]
    results = provider.update_batch(codes, end_date)