from fastapi import FastAPI, HTTPException
from fastapi.middleware.cors import CORSMiddleware
import akshare as ak
import pandas as pd
from datetime import datetime, timedelta
from typing import Optional, List, Dict, Any
import logging
from cachetools import TTLCache
import asyncio
from concurrent.futures import ThreadPoolExecutor
import numpy as np
import gc
from functools import lru_cache
import json
from numba import jit  # JIT编译加速数值计算
import requests
import time
from requests.adapters import HTTPAdapter
from urllib3.util.retry import Retry
import urllib3

# 禁用SSL警告
urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)

# 配置日志
logging.basicConfig(level=logging.INFO)
logger = logging.getLogger(__name__)

# 网络配置
NETWORK_TIMEOUT = 30  # 网络超时时间（秒）
MAX_RETRIES = 3  # 最大重试次数
RETRY_BACKOFF_FACTOR = 1  # 重试间隔因子

# 配置requests会话，添加重试机制
def create_session_with_retries():
    """创建带重试机制的requests会话"""
    session = requests.Session()
    
    # 配置重试策略
    retry_strategy = Retry(
        total=MAX_RETRIES,
        status_forcelist=[429, 500, 502, 503, 504],
        allowed_methods=["HEAD", "GET", "OPTIONS"],
        backoff_factor=RETRY_BACKOFF_FACTOR
    )
    
    # 配置HTTP适配器
    adapter = HTTPAdapter(max_retries=retry_strategy)
    session.mount("http://", adapter)
    session.mount("https://", adapter)
    
    # 设置超时
    session.timeout = NETWORK_TIMEOUT
    
    return session

# 全局会话对象
http_session = create_session_with_retries()

# 安全的AKShare调用包装函数
def safe_akshare_call(func, *args, max_retries=MAX_RETRIES, fallback_data=None, **kwargs):
    """安全调用AKShare函数，包含重试机制和错误处理"""
    last_error = None
    
    for attempt in range(max_retries + 1):
        try:
            # 设置pandas网络超时
            import pandas as pd
            pd.set_option('display.max_columns', None)
            
            # 调用AKShare函数
            result = func(*args, **kwargs)
            return result
            
        except Exception as e:
            last_error = e
            error_msg = str(e).lower()
            
            # 检查是否是网络相关错误
            if any(keyword in error_msg for keyword in [
                'timeout', 'connection', 'proxy', 'network', 
                'httpsconnectionpool', 'read timed out', 'remote end closed',
                'ssl', 'certificate', 'handshake'
            ]):
                if attempt < max_retries:
                    wait_time = RETRY_BACKOFF_FACTOR * (2 ** attempt)
                    logger.warning(f"网络错误，{wait_time}秒后重试 (尝试 {attempt + 1}/{max_retries + 1}): {str(e)}")
                    time.sleep(wait_time)
                    continue
                else:
                    logger.error(f"网络请求失败，已达到最大重试次数: {str(e)}")
                    # 如果有降级数据，返回降级数据而不是抛出异常
                    if fallback_data is not None:
                        logger.info("使用降级数据")
                        return fallback_data
                    raise HTTPException(
                        status_code=503, 
                        detail=f"网络服务暂时不可用，请稍后重试"
                    )
            else:
                # 非网络错误，直接抛出
                logger.error(f"AKShare调用失败: {str(e)}")
                if fallback_data is not None:
                    logger.info("使用降级数据")
                    return fallback_data
                raise HTTPException(
                    status_code=500, 
                    detail=f"数据获取失败: {str(e)}"
                )
    
    # 如果所有重试都失败了
    if fallback_data is not None:
        logger.info("使用降级数据")
        return fallback_data
    raise HTTPException(
        status_code=503, 
        detail=f"服务暂时不可用，请稍后重试"
    )

app = FastAPI(title="Stock Data API", version="1.0.0")

# 配置CORS
app.add_middleware(
    CORSMiddleware,
    allow_origins=["http://localhost:5173", "http://127.0.0.1:5173"],
    allow_credentials=True,
    allow_methods=["*"],
    allow_headers=["*"],
)

# 缓存配置 - 延长缓存时间到30分钟，增加缓存大小
cache = TTLCache(maxsize=500, ttl=1800)
# 专门的实时数据缓存，缓存时间较短
real_time_cache = TTLCache(maxsize=100, ttl=60)
# 历史数据缓存，缓存时间更长
history_cache = TTLCache(maxsize=200, ttl=3600)
# 分析数据缓存，缓存时间最长
analysis_cache = TTLCache(maxsize=150, ttl=7200)  # 2小时

# 线程池用于异步执行耗时操作
executor = ThreadPoolExecutor(max_workers=4)

# 缓存统计
cache_stats = {"hits": 0, "misses": 0, "total_requests": 0}

def update_cache_stats(hit: bool):
    """更新缓存统计"""
    cache_stats["total_requests"] += 1
    if hit:
        cache_stats["hits"] += 1
    else:
        cache_stats["misses"] += 1

# JIT编译的数值计算函数
@jit(nopython=True)
def calculate_sma_jit(prices, window):
    """使用JIT编译的简单移动平均计算"""
    n = len(prices)
    sma = np.empty(n)
    sma[:window-1] = np.nan
    
    for i in range(window-1, n):
        sma[i] = np.mean(prices[i-window+1:i+1])
    
    return sma

@jit(nopython=True)
def calculate_rsi_jit(prices, window=14):
    """使用JIT编译的RSI计算"""
    n = len(prices)
    if n < window + 1:
        return np.full(n, np.nan)
    
    deltas = np.diff(prices)
    gains = np.where(deltas > 0, deltas, 0)
    losses = np.where(deltas < 0, -deltas, 0)
    
    avg_gains = np.empty(n-1)
    avg_losses = np.empty(n-1)
    
    # 初始平均值
    avg_gains[window-1] = np.mean(gains[:window])
    avg_losses[window-1] = np.mean(losses[:window])
    
    # 计算后续值
    for i in range(window, n-1):
        avg_gains[i] = (avg_gains[i-1] * (window-1) + gains[i]) / window
        avg_losses[i] = (avg_losses[i-1] * (window-1) + losses[i]) / window
    
    rs = avg_gains / avg_losses
    rsi = 100 - (100 / (1 + rs))
    
    result = np.full(n, np.nan)
    result[window:] = rsi[window-1:]
    
    return result

@lru_cache(maxsize=128)
def get_cached_stock_mapping():
    """缓存股票映射表"""
    return STOCK_MAPPING.copy()

# 股票代码映射
STOCK_MAPPING = {
    "000001": "平安银行",
    "000002": "万科A",
    "000858": "五粮液",
    "600000": "浦发银行",
    "600036": "招商银行",
    "600519": "贵州茅台",
    "600887": "伊利股份",
    "600588": "用友网络"
}

# 常用股票列表，用于预加载
POPULAR_STOCKS = ["000001", "600519", "600036", "000002", "600588"]

# 预加载标志
preload_completed = False

def get_stock_name(symbol: str) -> str:
    """获取股票名称"""
    return STOCK_MAPPING.get(symbol, f"股票{symbol}")

def format_stock_symbol(symbol: str) -> str:
    """格式化股票代码为AKShare格式"""
    if symbol.startswith('6'):
        return f"sh{symbol}"
    elif symbol.startswith('0') or symbol.startswith('3'):
        return f"sz{symbol}"
    return symbol

async def preload_popular_stocks():
    """预加载热门股票数据"""
    global preload_completed
    if preload_completed:
        return
    
    logger.info("开始预加载热门股票数据...")
    
    async def preload_stock(symbol):
        try:
            # 预加载股票信息
            await asyncio.get_event_loop().run_in_executor(
                executor, get_stock_info_sync, symbol
            )
            logger.info(f"预加载股票信息成功: {symbol}")
        except HTTPException as e:
            logger.warning(f"预加载股票信息失败 {symbol}: {e.detail}")
        except Exception as e:
            logger.error(f"预加载股票信息失败 {symbol}: {str(e)}")
    
    try:
        # 并发预加载，但限制并发数量避免过多网络请求
        semaphore = asyncio.Semaphore(3)  # 限制同时最多3个请求
        
        async def preload_with_semaphore(symbol):
            async with semaphore:
                await preload_stock(symbol)
        
        tasks = [preload_with_semaphore(symbol) for symbol in POPULAR_STOCKS]
        await asyncio.gather(*tasks, return_exceptions=True)
        
        preload_completed = True
        logger.info("热门股票数据预加载完成")
    except Exception as e:
        logger.error(f"预加载过程中出现错误: {str(e)}")

@app.on_event("startup")
async def startup_event():
    """应用启动时的事件处理"""
    # 在后台预加载数据，不阻塞启动
    asyncio.create_task(preload_popular_stocks())

@app.get("/")
async def root():
    return {"message": "Stock Data API is running"}

@app.get("/api/health")
async def health_check():
    """API健康检查端点"""
    try:
        # 测试网络连接
        test_result = await asyncio.get_event_loop().run_in_executor(
            executor, test_network_connection
        )
        
        hit_rate = (cache_stats["hits"] / cache_stats["total_requests"] * 100) if cache_stats["total_requests"] > 0 else 0
        return {
            "status": "healthy", 
            "message": "API is running", 
            "timestamp": datetime.now().isoformat(),
            "preload_completed": preload_completed,
            "network": test_result,
            "cache_stats": {
                "hit_rate": round(hit_rate, 2),
                "total_requests": cache_stats["total_requests"],
                "cache_sizes": {
                    "real_time": len(real_time_cache),
                    "history": len(history_cache),
                    "analysis": len(analysis_cache),
                    "general": len(cache)
                }
            }
        }
    except Exception as e:
        logger.error(f"健康检查失败: {str(e)}")
        return {
            "status": "unhealthy", 
            "timestamp": datetime.now().isoformat(),
            "error": str(e)
        }

def test_network_connection():
    """测试网络连接状态"""
    try:
        # 测试基本的网络连接
        response = http_session.get("https://www.baidu.com", timeout=5)
        if response.status_code == 200:
            return {"status": "connected", "latency": "normal"}
        else:
            return {"status": "limited", "code": response.status_code}
    except Exception as e:
        return {"status": "disconnected", "error": str(e)}

@app.get("/api/cache/stats")
async def get_cache_stats():
    """获取缓存统计信息"""
    hit_rate = (cache_stats["hits"] / cache_stats["total_requests"] * 100) if cache_stats["total_requests"] > 0 else 0
    return {
        "hit_rate": round(hit_rate, 2),
        "hits": cache_stats["hits"],
        "misses": cache_stats["misses"],
        "total_requests": cache_stats["total_requests"],
        "cache_sizes": {
            "real_time_cache": len(real_time_cache),
            "history_cache": len(history_cache),
            "analysis_cache": len(analysis_cache),
            "general_cache": len(cache)
        },
        "cache_limits": {
            "real_time_cache": real_time_cache.maxsize,
            "history_cache": history_cache.maxsize,
            "analysis_cache": analysis_cache.maxsize,
            "general_cache": cache.maxsize
        }
    }

def get_stock_info_sync(symbol: str):
    """同步获取股票基本信息"""
    try:
        # 使用安全的AKShare调用获取单个股票的实时数据，避免获取所有A股数据
        df = safe_akshare_call(ak.stock_zh_a_spot_em)
        stock_info = df[df['代码'] == symbol]
        
        if stock_info.empty:
            # 如果在实时数据中找不到，尝试获取历史数据的最新价格
            end_date = datetime.now().strftime("%Y%m%d")
            start_date = (datetime.now() - timedelta(days=5)).strftime("%Y%m%d")
            hist_df = safe_akshare_call(ak.stock_zh_a_hist, symbol=symbol, period="daily", 
                                       start_date=start_date, end_date=end_date, adjust="")
            if not hist_df.empty:
                latest = hist_df.iloc[-1]
                return {
                    "symbol": symbol,
                    "name": get_stock_name(symbol),
                    "current_price": float(latest['收盘']),
                    "change": 0.0,
                    "change_percent": 0.0,
                    "volume": int(latest['成交量']),
                    "turnover": float(latest['成交额']) if '成交额' in latest else 0.0
                }
            else:
                raise HTTPException(status_code=404, detail="Stock not found")
        
        return {
            "symbol": symbol,
            "name": get_stock_name(symbol),
            "current_price": float(stock_info.iloc[0]['最新价']),
            "change": float(stock_info.iloc[0]['涨跌额']),
            "change_percent": float(stock_info.iloc[0]['涨跌幅']),
            "volume": int(stock_info.iloc[0]['成交量']),
            "turnover": float(stock_info.iloc[0]['成交额'])
        }
        
    except HTTPException:
        # 重新抛出HTTPException
        raise
    except Exception as e:
        logger.error(f"Error getting stock info for {symbol}: {str(e)}")
        raise HTTPException(status_code=500, detail=f"Failed to get stock info: {str(e)}")

@app.get("/api/stock/info/{symbol}")
async def get_stock_info(symbol: str):
    """获取股票基本信息"""
    try:
        cache_key = f"info_{symbol}"
        if cache_key in real_time_cache:
            update_cache_stats(True)
            return real_time_cache[cache_key]
        
        update_cache_stats(False)
        # 使用线程池异步执行耗时操作
        loop = asyncio.get_event_loop()
        result = await loop.run_in_executor(executor, get_stock_info_sync, symbol)
        
        real_time_cache[cache_key] = result
        return result
        
    except Exception as e:
        logger.error(f"Error getting stock info for {symbol}: {str(e)}")
        raise HTTPException(status_code=500, detail=f"Failed to get stock info: {str(e)}")

def get_stock_history_sync(symbol: str, period: str, start_date: str, end_date: str):
    """同步获取股票历史数据"""
    try:
        # 获取历史数据
        if period == "daily":
            df = safe_akshare_call(ak.stock_zh_a_hist, symbol=symbol, period="daily", 
                                  start_date=start_date, end_date=end_date, adjust="qfq")
        elif period in ["30m", "60m"]:
            # 对于分钟数据，使用不同的接口
            period_map = {"30m": "30", "60m": "60"}
            df = safe_akshare_call(ak.stock_zh_a_hist_min_em, symbol=symbol, period=period_map[period], 
                                         start_date=start_date + " 09:30:00", 
                                         end_date=end_date + " 15:00:00", adjust="qfq")
        else:
            raise HTTPException(status_code=400, detail="Unsupported period")
        
        if df.empty:
            raise HTTPException(status_code=404, detail="No data found")
        
        # 使用向量化操作优化数据转换
        df_copy = df.copy()
        
        if period == "daily":
            df_copy['date'] = pd.to_datetime(df_copy['日期']).dt.strftime('%Y-%m-%d')
            date_col = 'date'
        else:
            df_copy['date'] = pd.to_datetime(df_copy['时间']).dt.strftime('%Y-%m-%d %H:%M:%S')
            date_col = 'date'
        
        # 向量化数值转换
        df_copy['open'] = pd.to_numeric(df_copy['开盘'], errors='coerce')
        df_copy['high'] = pd.to_numeric(df_copy['最高'], errors='coerce')
        df_copy['low'] = pd.to_numeric(df_copy['最低'], errors='coerce')
        df_copy['close'] = pd.to_numeric(df_copy['收盘'], errors='coerce')
        df_copy['volume'] = pd.to_numeric(df_copy['成交量'], errors='coerce').fillna(0).astype(int)
        
        # 使用to_dict('records')更高效
        data = df_copy[[date_col, 'open', 'high', 'low', 'close', 'volume']].rename(columns={date_col: 'date'}).to_dict('records')
        
        # 手动清理内存
        del df_copy
        gc.collect()
        
        return {
            "symbol": symbol,
            "name": get_stock_name(symbol),
            "period": period,
            "data": data
        }
        
    except Exception as e:
        logger.error(f"Error getting stock history for {symbol}: {str(e)}")
        raise HTTPException(status_code=500, detail=f"Failed to get stock history: {str(e)}")

@app.get("/api/stock/history/{symbol}")
async def get_stock_history(
    symbol: str,
    period: str = "daily",
    start_date: Optional[str] = None,
    end_date: Optional[str] = None
):
    """获取股票历史数据"""
    try:
        # 设置默认日期范围
        if not end_date:
            end_date = datetime.now().strftime("%Y%m%d")
        if not start_date:
            # 默认获取365天数据，提供更长的历史数据
            start_date = (datetime.now() - timedelta(days=365)).strftime("%Y%m%d")
        
        cache_key = f"history_{symbol}_{period}_{start_date}_{end_date}"
        if cache_key in history_cache:
            update_cache_stats(True)
            return history_cache[cache_key]
        
        update_cache_stats(False)
        # 使用线程池异步执行耗时操作
        loop = asyncio.get_event_loop()
        result = await loop.run_in_executor(
            executor, get_stock_history_sync, symbol, period, start_date, end_date
        )
        
        history_cache[cache_key] = result
        return result
        
    except Exception as e:
        logger.error(f"Error getting stock history for {symbol}: {str(e)}")
        raise HTTPException(status_code=500, detail=f"Failed to get stock history: {str(e)}")

def get_stock_analysis_sync(symbol: str, days: int):
    """同步获取股票分析数据"""
    try:
        # 减少数据获取量，只获取必要的数据
        end_date = datetime.now().strftime("%Y%m%d")
        start_date = (datetime.now() - timedelta(days=min(days*3, 180))).strftime("%Y%m%d")  # 最多180天
        
        df = safe_akshare_call(ak.stock_zh_a_hist, symbol=symbol, period="daily", 
                              start_date=start_date, end_date=end_date, adjust="")
        
        if df.empty or len(df) < days:
            raise HTTPException(status_code=404, detail="Insufficient data for analysis")
        
        # 使用numpy进行快速计算
        close_prices = df['收盘'].values.astype(np.float64)
        
        # 使用JIT编译的技术指标计算
        ma5 = calculate_sma_jit(close_prices, 5)
        ma10 = calculate_sma_jit(close_prices, 10)
        ma20 = calculate_sma_jit(close_prices, 20)
        rsi = calculate_rsi_jit(close_prices, 14)
        
        # 计算相似度（基于价格走势）
        recent_data = df.tail(days)
        price_changes = recent_data['收盘'].pct_change().dropna().values
        
        # 生成分析结果（减少到10个，提高速度）
        analysis_results = []
        max_results = min(10, (len(df) - days) // 5)  # 减少计算量
        
        for i in range(max_results):
            historical_start = len(df) - days - (i + 1) * 5
            historical_end = historical_start + days
            
            if historical_start >= 0:
                historical_data = df.iloc[historical_start:historical_end]
                historical_changes = historical_data['收盘'].pct_change().dropna().values
                
                # 使用numpy计算相关性，更快
                if len(historical_changes) > 0 and len(price_changes) > 0:
                    min_len = min(len(historical_changes), len(price_changes))
                    if min_len > 1:
                        correlation = np.corrcoef(
                            historical_changes[-min_len:], 
                            price_changes[-min_len:]
                        )[0, 1]
                        similarity = max(0, correlation * 100) if not np.isnan(correlation) else 50
                    else:
                        similarity = 50
                else:
                    similarity = 50
                
                start_price = float(historical_data.iloc[0]['收盘'])
                end_price = float(historical_data.iloc[-1]['收盘'])
                
                analysis_results.append({
                    "period": f"{historical_data.iloc[0]['日期'].strftime('%Y-%m-%d')} 至 {historical_data.iloc[-1]['日期'].strftime('%Y-%m-%d')}",
                    "similarity": round(similarity, 1),
                    "start_price": start_price,
                    "end_price": end_price,
                    "change_percent": round((end_price - start_price) / start_price * 100, 2),
                    "technical_indicators": {
                        "ma5": float(ma5[historical_end-1]) if not np.isnan(ma5[historical_end-1]) else None,
                        "ma20": float(ma20[historical_end-1]) if not np.isnan(ma20[historical_end-1]) else None,
                        "rsi": float(rsi[historical_end-1]) if not np.isnan(rsi[historical_end-1]) else None
                    }
                })
        
        # 按相似度排序
        analysis_results.sort(key=lambda x: x['similarity'], reverse=True)
        
        # 手动清理内存
        del close_prices, ma5, ma10, ma20, rsi
        gc.collect()
        
        return {
            "symbol": symbol,
            "name": get_stock_name(symbol),
            "analysis_period": days,
            "results": analysis_results
        }
        
    except Exception as e:
        logger.error(f"Error getting stock analysis for {symbol}: {str(e)}")
        raise HTTPException(status_code=500, detail=f"Failed to get stock analysis: {str(e)}")

@app.get("/api/stock/analysis/{symbol}")
async def get_stock_analysis(symbol: str, days: int = 30):
    """获取股票分析数据"""
    try:
        cache_key = f"analysis_{symbol}_{days}"
        if cache_key in analysis_cache:
            update_cache_stats(True)
            return analysis_cache[cache_key]
        
        update_cache_stats(False)
        # 使用线程池异步执行耗时操作
        loop = asyncio.get_event_loop()
        result = await loop.run_in_executor(
            executor, get_stock_analysis_sync, symbol, days
        )
        
        analysis_cache[cache_key] = result
        return result
        
    except Exception as e:
        logger.error(f"Error getting stock analysis for {symbol}: {str(e)}")
        raise HTTPException(status_code=500, detail=f"Failed to get stock analysis: {str(e)}")

def get_stock_comparison_sync(symbol: str, target_period: str, days: int):
    """同步获取股票对比数据"""
    try:
        # 解析目标时期
        start_date_str, end_date_str = target_period.split(' 至 ')
        start_date = datetime.strptime(start_date_str, '%Y-%m-%d')
        end_date = datetime.strptime(end_date_str, '%Y-%m-%d')
        
        # 获取当前时期数据
        current_end = datetime.now()
        current_start = current_end - timedelta(days=days)
        
        current_df = safe_akshare_call(ak.stock_zh_a_hist, symbol=symbol, period="daily", 
                                      start_date=current_start.strftime("%Y%m%d"), 
                                      end_date=current_end.strftime("%Y%m%d"), adjust="")
        
        # 获取历史时期数据
        historical_df = safe_akshare_call(ak.stock_zh_a_hist, symbol=symbol, period="daily", 
                                         start_date=start_date.strftime("%Y%m%d"), 
                                         end_date=end_date.strftime("%Y%m%d"), adjust="")
        
        # 优化数据转换，使用列表推导式
        if not current_df.empty:
            base_price = float(current_df.iloc[0]['收盘'])
            current_data = [
                {
                    "day": i + 1,
                    "date": row['日期'].strftime("%Y-%m-%d"),
                    "price": float(row['收盘']),
                    "change_percent": 0 if i == 0 else round((float(row['收盘']) - base_price) / base_price * 100, 2)
                }
                for i, (_, row) in enumerate(current_df.iterrows())
            ]
        else:
            current_data = []
        
        if not historical_df.empty:
            base_price = float(historical_df.iloc[0]['收盘'])
            historical_data = [
                {
                    "day": i + 1,
                    "date": row['日期'].strftime("%Y-%m-%d"),
                    "price": float(row['收盘']),
                    "change_percent": 0 if i == 0 else round((float(row['收盘']) - base_price) / base_price * 100, 2)
                }
                for i, (_, row) in enumerate(historical_df.iterrows())
            ]
        else:
            historical_data = []
        
        return {
            "symbol": symbol,
            "name": get_stock_name(symbol),
            "current_period": {
                "start_date": current_start.strftime("%Y-%m-%d"),
                "end_date": current_end.strftime("%Y-%m-%d"),
                "data": current_data
            },
            "historical_period": {
                "start_date": start_date.strftime("%Y-%m-%d"),
                "end_date": end_date.strftime("%Y-%m-%d"),
                "data": historical_data
            }
        }
        
    except Exception as e:
        logger.error(f"Error getting stock comparison for {symbol}: {str(e)}")
        raise HTTPException(status_code=500, detail=f"Failed to get stock comparison: {str(e)}")

@app.get("/api/stock/comparison/{symbol}")
async def get_stock_comparison(symbol: str, target_period: str, days: int = 30):
    """获取股票对比数据"""
    try:
        cache_key = f"comparison_{symbol}_{target_period}_{days}"
        if cache_key in cache:
            return cache[cache_key]
        
        # 使用线程池异步执行耗时操作
        loop = asyncio.get_event_loop()
        result = await loop.run_in_executor(
            executor, get_stock_comparison_sync, symbol, target_period, days
        )
        
        cache[cache_key] = result
        return result
        
    except Exception as e:
        logger.error(f"Error getting stock comparison for {symbol}: {str(e)}")
        raise HTTPException(status_code=500, detail=f"Failed to get stock comparison: {str(e)}")

if __name__ == "__main__":
    import uvicorn
    uvicorn.run(app, host="0.0.0.0", port=8000)