import tushare as ts
import pandas as pd
import numpy as np
import os
import time
import logging

logger = logging.getLogger(__name__)
logger.setLevel(logging.DEBUG)
from datetime import datetime, timedelta
from data_cache_manager import DataCacheManager

class SectorAnalyzer:
    def __init__(self):
        # 初始化日志处理器
        handler = logging.StreamHandler()
        formatter = logging.Formatter('%(asctime)s - %(levelname)s - %(message)s')
        handler.setFormatter(formatter)
        logger.addHandler(handler)

        ts.set_token("8f0c9f36fea80fe9cc6a0d480a87ba58c48a91182d0f7562e9ac9706")
        self.pro = ts.pro_api()
        self.api_counter = 0  # 新增API计数器
        
        # 初始化数据缓存管理器
        self.cache_manager = DataCacheManager()

    from functools import lru_cache

    @lru_cache(maxsize=32)
    def get_trade_dates(self, start_date, end_date):
        """获取交易日历并缓存，自动扩展查询范围"""
        # 首先尝试从缓存中获取数据
        cached_dates = self.cache_manager.load_trade_dates(start_date, end_date)
        if cached_dates is not None:
            logger.debug(f"从缓存获取交易日历 | 范围={start_date}-{end_date} 总数={len(cached_dates)}")
            return cached_dates
            
        # 缓存中没有数据，从API获取
        # 扩展查询范围至前后各30天
        extended_start = (datetime.strptime(start_date, '%Y%m%d') - timedelta(days=30)).strftime('%Y%m%d')
        extended_end = (datetime.strptime(end_date, '%Y%m%d') + timedelta(days=30)).strftime('%Y%m%d')
        
        try:
            logger.debug(f"扩展查询日期范围 | 原范围={start_date}-{end_date} 扩展后={extended_start}-{extended_end}")
            self._check_api_limit()  # 检查API调用限制
            cal = self.pro.trade_cal(exchange='SSE', start_date=extended_start, end_date=extended_end)
            trade_dates = cal[cal['is_open'] == 1]['cal_date'].tolist()
            
            # 过滤出请求范围内的有效交易日
            filtered_dates = [d for d in trade_dates if start_date <= d <= end_date]
            logger.debug(f"原始交易日数量={len(trade_dates)} 过滤后有效数量={len(filtered_dates)}")
            
            if not filtered_dates:
                logger.warning(f"交易日数据异常 | start={start_date} end={end_date} 范围内无有效交易日")
                return []
            
            # 将数据保存到缓存
            self.cache_manager.save_trade_dates(start_date, end_date, filtered_dates)
                
            logger.debug(f"成功获取交易日历 | 范围={start_date}-{end_date} 总数={len(filtered_dates)}")
            return filtered_dates
            
        except Exception as e:
            logger.error(f"交易日历获取失败 | 错误={str(e)}")
            return []
    
    def get_stock_data(self, trade_date):
        # 首先尝试从缓存中获取数据
        cached_data = self.cache_manager.load_stock_data(trade_date)
        if cached_data is not None:
            logger.debug(f"从缓存获取股票数据 | 日期={trade_date} 数据量={len(cached_data)}")
            return cached_data
            
        # 缓存中没有数据，从API获取
        logger.debug(f"从API获取股票数据 | 日期={trade_date}")
        
        # 带重试机制的基础数据获取
        max_retries = 3
        self._check_api_limit()  # 新增API调用计数检查
        for attempt in range(max_retries):
            try:
                # 获取基础股票数据
                basics = self.pro.stock_basic(exchange='', list_status='L', fields='ts_code,name,list_date,industry')
                
                # 获取当日涨停数据
                limit_data = self.pro.limit_list(trade_date=trade_date, fields='ts_code,limit')
                break
            except Exception as e:
                if attempt == max_retries - 1:
                    raise
                time.sleep(0.5 * (attempt + 1))
        
        # 合并数据
        merged = pd.merge(
            basics.rename(columns={'industry': 'sw_industry'}),
            limit_data,
            on='ts_code',
            how='left'
        )
        # 将涨停股票标记为1，其他股票标记为0
        merged['limit'] = merged['limit'].fillna('').apply(lambda x: 1 if x == 'U' else 0)
        
        # 过滤新股（上市满30天）
        merged['list_date'] = pd.to_datetime(merged['list_date'])
        merged = merged[merged['list_date'] <= (datetime.strptime(trade_date, '%Y%m%d') - timedelta(days=30))]
        
        # 将数据保存到缓存
        self.cache_manager.save_stock_data(trade_date, merged)
        
        return merged
        
    def get_consecutive_limit_stocks(self, trade_date):
        """获取连板股数据"""
        # 获取前一个交易日
        end_date = datetime.strptime(trade_date, '%Y%m%d')
        # 获取过去30天的交易日历确保覆盖节假日
        start_date = end_date - timedelta(days=30)
        trade_dates = self.get_trade_dates(start_date.strftime('%Y%m%d'), trade_date)
        
        # 容错处理：确保当前交易日在列表中
        if not trade_dates or trade_dates[-1] != trade_date:
            logger.error(f"交易日数据异常 | trade_date={trade_date} available_dates={trade_dates[-3:]}")
            return set()
        
        # 查找前一个有效交易日
        prev_index = -2
        while abs(prev_index) <= len(trade_dates):
            try:
                prev_date = trade_dates[prev_index]
                break
            except IndexError:
                prev_index -= 1
        else:
            logger.warning(f"前一交易日查找失败 | current_date={trade_date} trade_dates={trade_dates[-5:]}")
            return set()
            
        # 动态查找有效前交易日
        current_index = trade_dates.index(trade_date)
        prev_index = current_index - 1
        
        while prev_index >= 0:
            prev_date = trade_dates[prev_index]
            # 验证数据有效性
            try:
                # 首先尝试从缓存中获取前一日涨停数据
                prev_limit = self.cache_manager.load_limit_data(prev_date)
                if prev_limit is None:
                    self._check_api_limit()  # 检查API调用限制
                    prev_limit = self.pro.limit_list(trade_date=prev_date, fields='ts_code,limit')
                    # 保存到缓存
                    if not prev_limit.empty:
                        self.cache_manager.save_limit_data(prev_date, prev_limit)
                
                if not prev_limit.empty:
                    break
                prev_index -= 1
            except Exception as e:
                logger.debug(f"交易日验证中 | prev_date={prev_date} index={prev_index}")
                prev_index -= 1
        else:
            raise ValueError(f"未找到{trade_date}的有效前交易日")
        
        # 首先尝试从缓存中获取当日涨停数据
        today_limit = self.cache_manager.load_limit_data(trade_date)
        
        # 如果缓存中没有数据，则从API获取
        if today_limit is None:
            # 带重试机制的涨停数据获取
            max_retries = 3
            today_limit = pd.DataFrame()
            
            for attempt in range(max_retries):
                try:
                    self._check_api_limit()  # 检查API调用限制
                    today_limit = self.pro.limit_list(trade_date=trade_date, fields='ts_code,limit')
                    # 加强空值校验
                    if today_limit.empty:
                        print(f"今日{trade_date}无涨停数据，尝试第{attempt+1}次重试")
                        continue
                    
                    # 保存到缓存
                    self.cache_manager.save_limit_data(trade_date, today_limit)
                    break
                except Exception as e:
                    if attempt == max_retries - 1:
                        print(f"获取{trade_date}涨停数据失败: {str(e)}")
                        return set()
                    time.sleep(1)
        
        # 处理空数据情况
        today_limit_stocks = set(today_limit[today_limit['limit']=='U']['ts_code']) if not today_limit.empty else set()
        prev_limit_stocks = set(prev_limit[prev_limit['limit']=='U']['ts_code']) if not prev_limit.empty else set()
        
        consecutive_limit_stocks = today_limit_stocks.intersection(prev_limit_stocks)
        return consecutive_limit_stocks
    
    def calculate_indicators(self, trade_date):
        # 获取历史数据（过去10个交易日）
        end_date = datetime.strptime(trade_date, '%Y%m%d')
        start_date = end_date - timedelta(days=30)  # 留出足够天数
        trade_dates = self.get_trade_dates(start_date.strftime('%Y%m%d'), trade_date)
        
        # 核心计算逻辑
        current_data = self.get_stock_data(trade_date)
        consecutive_limit_stocks = self.get_consecutive_limit_stocks(trade_date)
        
        # 获取历史涨停密度
        historical_data = []
        valid_dates = []
        logger.debug(f"开始获取历史数据 | 总日期数={len(trade_dates)} 候选日期={trade_dates[-10:]}")
        
        for d in trade_dates[-10:]:
            # 验证日期有效性
            if d not in trade_dates:
                logger.warning(f"发现无效交易日 | date={d}")
                continue
            
            try:
                logger.debug(f"正在获取交易日数据 | date={d}")
                data = self.get_stock_data(d)
                time.sleep(6)
                
                if not data.empty and 'ts_code' in data.columns:
                    historical_data.append(data)
                    valid_dates.append(d)
                else:
                    logger.warning(f"空数据跳过 | date={d}")
            except Exception as e:
                logger.error(f"历史数据获取失败 | date={d} 错误类型={type(e).__name__} 详情={str(e)}")
                logger.debug(f"请求参数: trade_date={d}", exc_info=True)
        
        logger.info(f"完成历史数据收集 | 有效日期数={len(valid_dates)} 有效日期列表={valid_dates}")
        historical_density = [
            day_data.groupby('sw_industry').agg(
                limit_up=('limit', 'sum'),
                total=('ts_code', 'count')
            ).assign(limit_density=lambda df: df['limit_up']/df['total'])
            for day_data in historical_data
        ]
        historical_df = pd.concat(historical_density)
        
        # 计算分位值 - 修改为计算80%分位值
        quantiles = historical_df.groupby('sw_industry')['limit_density']\
            .agg([('q25', lambda x: np.quantile(x, 0.25)),
                 ('q75', lambda x: np.quantile(x, 0.75)),
                 ('q80', lambda x: np.quantile(x, 0.80))])
        
        # 计算涨停密度
        sector_stats = current_data.groupby('sw_industry').agg(
            total_stocks=('ts_code', 'count'),
            limit_up=('limit', 'sum')
        ).reset_index()
        
        sector_stats['limit_density'] = sector_stats['limit_up'] / sector_stats['total_stocks']
        
        # 计算连板股占比
        sector_consecutive = {}
        for sector in sector_stats['sw_industry'].unique():
            sector_stocks = set(current_data[current_data['sw_industry'] == sector]['ts_code'])
            sector_limit_stocks = set(current_data[(current_data['sw_industry'] == sector) & 
                                               (current_data['limit'] == 1)]['ts_code'])
            
            if len(sector_limit_stocks) > 0:
                sector_consecutive_stocks = sector_limit_stocks.intersection(consecutive_limit_stocks)
                sector_consecutive[sector] = len(sector_consecutive_stocks) / len(sector_limit_stocks)
            else:
                sector_consecutive[sector] = 0
                
        sector_stats['consecutive_ratio'] = sector_stats['sw_industry'].map(sector_consecutive)
        
        return sector_stats, quantiles

    def calculate_scores(self, sector_stats, quantiles, current_data):
        # 综合评分逻辑
        sector_stats = sector_stats.merge(quantiles, on='sw_industry')
        
        # 获取全市场数据
        market_total = current_data['ts_code'].nunique()
        market_limit_up = current_data['limit'].sum()
        
        # 计算各项指标
        sector_stats['score'] = 0
        
        # 涨停密度评分（1-10分）- 涨停密度5%-25%
        sector_stats['score'] += sector_stats['limit_density'].apply(
            lambda x: min(10, max(0, (x - 0.05) * 50)) if x >= 0.05 else 0)
        
        # 连板占比评分（5分）- 连板占比≥20%
        sector_stats['score'] += sector_stats['consecutive_ratio'].apply(
            lambda x: 5 if x >= 0.2 else min(4, max(0, x * 20)))
        
        # 历史分位值评分（1-5分）- 历史分位值81%-90%
        sector_stats['score'] += sector_stats.apply(
            lambda x: min(5, max(0, (x['limit_density'] - x['q80']) * 50)) 
            if x['limit_density'] >= x['q80'] else 0, axis=1)
        
        # 市场相对强度评分（5分）- 市场相对强度≥1.2
        market_density = market_limit_up / market_total if market_total > 0 else 0
        sector_stats['market_relative_strength'] = sector_stats['limit_density'] / market_density if market_density > 0 else 0
        sector_stats['score'] += sector_stats['market_relative_strength'].apply(
            lambda x: 5 if x >= 1.2 else min(4, max(0, (x - 0.8) * 10)))
        
        # 分数标准化
        sector_stats['score'] = sector_stats['score'].apply(
            lambda x: min(25, max(0, x)))
            
        # 添加详细得分项 - 与主评分逻辑保持一致
        sector_stats['density_score'] = sector_stats['limit_density'].apply(
            lambda x: min(10, max(0, (x - 0.05) * 50)) if x >= 0.05 else 0)
        sector_stats['consecutive_score'] = sector_stats['consecutive_ratio'].apply(
            lambda x: 5 if x >= 0.2 else min(4, max(0, x * 20)))
        sector_stats['historical_score'] = sector_stats.apply(
            lambda x: min(5, max(0, (x['limit_density'] - x['q80']) * 50)) 
            if x['limit_density'] >= x['q80'] else 0, axis=1)
        sector_stats['strength_score'] = sector_stats['market_relative_strength'].apply(
            lambda x: 5 if x >= 1.2 else min(4, max(0, (x - 0.8) * 10)))
            
        return sector_stats
        
    def export_to_csv(self, data, filename):
        """将结果导出为CSV文件"""
        # 确保输出目录存在
        output_dir = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'output')
        if not os.path.exists(output_dir):
            os.makedirs(output_dir)
            
        # 导出CSV
        output_path = os.path.join(output_dir, filename)
        data.to_csv(output_path, index=False, encoding='utf-8-sig')
        print(f"结果已导出至: {output_path}")
        return output_path

    def _check_api_limit(self):
        self.api_counter += 1
        if self.api_counter >= 10:
            time.sleep(60)
            self.api_counter = 0
            
    def get_index_data(self, date):
        # 首先尝试从缓存中获取指数数据
        cached_index = self.cache_manager.load_index_data(date)
        if cached_index is not None and not cached_index.empty:
            return cached_index
        else:
            # 从API获取上证指数数据
            self._check_api_limit()  # 检查API调用限制
            df = self.pro.index_daily(ts_code='000001.SH', trade_date=date, fields='trade_date,close')
            if not df.empty:
                # 保存到缓存
                self.cache_manager.save_index_data(date, df)
            return df