#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
改进的历史数据服务
基于API分析结果优化的批量历史数据获取实现
解决东方财富批量历史API的已知问题
"""

import requests
import logging
import time
from datetime import datetime, timedelta, date
from typing import Dict, Optional, List, Any
from threading import Lock
import concurrent.futures


class ImprovedHistoricalDataService:
    """改进的历史数据服务类"""

    def __init__(self, enable_parallel: bool = False, max_workers: int = 3):
        self.logger = logging.getLogger(__name__)
        
        # 并发控制
        self.enable_parallel = enable_parallel
        self.max_workers = max_workers
        self.request_lock = Lock()
        self.last_request_time = 0
        self.min_request_interval = 0.1  # 100ms间隔避免频率限制
        
        # 会话设置
        self.session = requests.Session()
        self._setup_session()
        
        # API配置
        self.eastmoney_kline_url = "http://push2his.eastmoney.com/api/qt/stock/kline/get"

    def _setup_session(self):
        """设置优化的会话配置"""
        # 使用连接池提高性能
        adapter = requests.adapters.HTTPAdapter(
            pool_connections=10, 
            pool_maxsize=20, 
            max_retries=2
        )
        self.session.mount('http://', adapter)
        self.session.mount('https://', adapter)
        
        # 设置优化的请求头
        self.session.headers.update({
            'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/120.0.0.0 Safari/537.36',
            'Accept': 'application/json, text/plain, */*',
            'Accept-Language': 'zh-CN,zh;q=0.9,en;q=0.8',
            'Accept-Encoding': 'gzip, deflate',
            'Cache-Control': 'no-cache',
            'Connection': 'keep-alive',
            'Referer': 'http://quote.eastmoney.com/',
            'Host': 'push2his.eastmoney.com'
        })

    def _rate_limit(self):
        """实现请求频率限制，避免被API服务器封禁"""
        if not self.enable_parallel:  # 只在顺序模式下使用
            with self.request_lock:
                current_time = time.time()
                time_since_last = current_time - self.last_request_time
                if time_since_last < self.min_request_interval:
                    time.sleep(self.min_request_interval - time_since_last)
                self.last_request_time = time.time()

    def get_previous_day_volume(
        self, stock_code: str, target_date: date = None
    ) -> Optional[int]:
        """
        获取指定股票的前一交易日成交量（优化版）

        Args:
            stock_code: 股票代码（如 '000001'）
            target_date: 目标日期，默认为昨天

        Returns:
            前一交易日成交量（手），获取失败返回None
        """
        if target_date is None:
            target_date = date.today() - timedelta(days=1)

        return self._get_volume_from_eastmoney_kline_optimized(stock_code, target_date)

    def get_batch_previous_day_volumes(
        self, stock_codes: List[str], target_date: date = None
    ) -> Dict[str, Optional[int]]:
        """
        批量获取多只股票的前一交易日成交量（全面优化版）

        Args:
            stock_codes: 股票代码列表
            target_date: 目标日期，默认为昨天

        Returns:
            股票代码到成交量的映射字典
        """
        if not stock_codes:
            return {}

        if target_date is None:
            target_date = date.today() - timedelta(days=1)

        self.logger.info(f"开始批量获取 {len(stock_codes)} 只股票的历史成交量 ({target_date})")
        start_time = time.time()

        # 根据配置选择并发或顺序处理
        if self.enable_parallel and len(stock_codes) > 5:
            results = self._get_batch_volumes_parallel(stock_codes, target_date)
        else:
            results = self._get_batch_volumes_sequential(stock_codes, target_date)

        elapsed = time.time() - start_time
        success_count = sum(1 for v in results.values() if v is not None)
        
        self.logger.info(f"批量获取历史成交量完成: {success_count}/{len(stock_codes)} 只股票成功，耗时 {elapsed:.2f}秒")
        
        return results

    def _get_batch_volumes_sequential(
        self, stock_codes: List[str], target_date: date
    ) -> Dict[str, Optional[int]]:
        """顺序获取批量成交量数据（稳定可靠）"""
        results = {}
        
        for i, code in enumerate(stock_codes, 1):
            try:
                self.logger.debug(f"进度 {i}/{len(stock_codes)}: 获取 {code}")
                volume = self._get_volume_from_eastmoney_kline_optimized(code, target_date)
                results[code] = volume
                
                if volume is not None:
                    self.logger.debug(f"  ✅ {code}: {volume:,}手")
                else:
                    self.logger.debug(f"  ❌ {code}: 无数据")
                    
            except Exception as e:
                self.logger.error(f"获取股票 {code} 历史成交量失败: {e}")
                results[code] = None
        
        return results

    def _get_batch_volumes_parallel(
        self, stock_codes: List[str], target_date: date
    ) -> Dict[str, Optional[int]]:
        """并发获取批量成交量数据（高效但需谨慎使用）"""
        results = {}
        
        self.logger.info(f"使用并发模式，工作线程数: {self.max_workers}")
        
        with concurrent.futures.ThreadPoolExecutor(max_workers=self.max_workers) as executor:
            # 提交所有任务
            future_to_code = {
                executor.submit(self._get_volume_from_eastmoney_kline_optimized, code, target_date): code 
                for code in stock_codes
            }
            
            # 收集结果
            for future in concurrent.futures.as_completed(future_to_code):
                code = future_to_code[future]
                try:
                    volume = future.result()
                    results[code] = volume
                    
                    if volume is not None:
                        self.logger.debug(f"✅ {code}: {volume:,}手")
                    else:
                        self.logger.debug(f"❌ {code}: 无数据")
                        
                except Exception as e:
                    self.logger.error(f"并发获取股票 {code} 异常: {e}")
                    results[code] = None
        
        return results

    def _get_volume_from_eastmoney_kline_optimized(
        self, stock_code: str, target_date: date, retry_count: int = 2
    ) -> Optional[int]:
        """从东方财富K线接口获取单个股票成交量数据（优化版）"""
        
        # 实施频率限制
        self._rate_limit()
        
        try:
            # 构建API请求参数
            secid = f"1.{stock_code}" if stock_code.startswith('6') else f"0.{stock_code}"
            
            params = {
                "secid": secid,
                "ut": "bd1d9ddb04089700cf9c27f6f7426281",
                "fields1": "f1,f2,f3,f4,f5,f6",
                "fields2": "f51,f52,f53,f54,f55,f56,f57,f58,f59,f60,f61",
                "klt": 101,  # 日K线
                "fqt": 1,    # 前复权
                "beg": target_date.strftime("%Y%m%d"),
                "end": target_date.strftime("%Y%m%d"),
                "lmt": 1
            }
            
            for attempt in range(retry_count):
                try:
                    response = self.session.get(
                        self.eastmoney_kline_url, 
                        params=params, 
                        timeout=12
                    )
                    
                    if response.status_code == 200:
                        data = response.json()
                        return self._parse_kline_response(data, stock_code, target_date)
                    else:
                        self.logger.warning(f"获取 {stock_code} 历史K线数据失败: HTTP {response.status_code}")
                        if attempt < retry_count - 1:
                            time.sleep(0.5 * (attempt + 1))  # 指数退避
                            continue
                
                except requests.exceptions.RequestException as e:
                    if attempt < retry_count - 1:
                        self.logger.debug(f"获取 {stock_code} 网络异常，重试 {attempt + 1}/{retry_count}: {e}")
                        time.sleep(1 * (attempt + 1))
                        continue
                    else:
                        raise
                        
        except Exception as e:
            self.logger.error(f"获取 {stock_code} 历史成交量异常: {e}")
            return None

        return None

    def _parse_kline_response(self, data: dict, stock_code: str, target_date: date) -> Optional[int]:
        """解析K线API响应，提取成交量数据"""
        try:
            if data and "data" in data and data["data"] and "klines" in data["data"]:
                klines = data["data"]["klines"]
                if klines and len(klines) > 0:
                    # 取第一条K线数据（因为只查询一天）
                    kline = klines[0]
                    fields = kline.split(",")
                    if len(fields) >= 6:  # 确保有足够的字段
                        try:
                            volume = int(float(fields[5]))  # fields[5] = f56 = 成交量
                            self.logger.debug(f"成功解析 {stock_code} 在 {target_date} 的成交量: {volume}")
                            return volume
                        except (ValueError, TypeError) as e:
                            self.logger.warning(f"成交量数据解析失败 {stock_code}: {e}")
                            return None
            
            # 无数据情况（可能是非交易日或新股）
            self.logger.debug(f"股票 {stock_code} 在 {target_date} 无K线数据")
            return None
            
        except Exception as e:
            self.logger.error(f"解析 {stock_code} K线响应失败: {e}")
            return None

    def validate_data_source(self) -> Dict[str, bool]:
        """验证数据源的可用性"""
        results = {}

        # 测试东方财富K线接口
        try:
            test_volume = self._get_volume_from_eastmoney_kline_optimized(
                "000001", date.today() - timedelta(days=1)
            )
            results["eastmoney_kline_optimized"] = test_volume is not None
            self.logger.info(f"数据源验证 - 东方财富K线接口: {'✅ 可用' if results['eastmoney_kline_optimized'] else '❌ 不可用'}")
        except Exception as e:
            results["eastmoney_kline_optimized"] = False
            self.logger.error(f"数据源验证失败: {e}")

        return results

    def get_performance_stats(self) -> Dict[str, Any]:
        """获取性能统计信息"""
        return {
            "parallel_mode": self.enable_parallel,
            "max_workers": self.max_workers if self.enable_parallel else 1,
            "min_request_interval": self.min_request_interval,
            "session_pool_connections": 10,
            "session_pool_maxsize": 20,
            "retry_count": 2
        }