#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
东方财富基金历史净值数据爬虫
这个对应前端：http://localhost:3000/fund-data-downloader基金净值下载业务敏啊
功能：从东方财富网站爬取基金历史净值明细数据并存储到MySQL数据库
作者：Alan
创建时间：2025年
"""

import os
import sys
import django  # type: ignore
import requests
import re
import json
import time
import random
import threading
from datetime import datetime, timedelta
from bs4 import BeautifulSoup
from enum import Enum
import queue
from concurrent.futures import ThreadPoolExecutor
from dataclasses import dataclass
try:
    import mysql.connector  # type: ignore
    from mysql.connector import Error  # type: ignore
    from mysql.connector import pooling  # type: ignore
except ImportError:
    print("请安装mysql-connector-python: pip install mysql-connector-python")
    mysql = None  # type: ignore
    Error = Exception  # type: ignore
    pooling = None  # type: ignore
import logging
from typing import List, Dict, Optional, Any
from config import get_database_config

# 设置Django环境
current_dir = os.path.dirname(os.path.abspath(__file__))
sys.path.insert(0, current_dir)
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'wealth_backend.settings')
django.setup()

from data_service.models import FundInfo
from django.db import transaction  # type: ignore

# 配置日志（在Django setup之后）
# 确保log目录存在
log_dir = os.path.join(current_dir, 'log')
os.makedirs(log_dir, exist_ok=True)

# 手动配置日志处理器
logger = logging.getLogger(__name__)
logger.setLevel(logging.INFO)

# 清除现有处理器
for handler in logger.handlers[:]:
    logger.removeHandler(handler)

# 配置日志格式
file_formatter = logging.Formatter('%(asctime)s - %(threadName)s - %(filename)s:%(lineno)d - %(levelname)s - %(message)s')
console_formatter = logging.Formatter('%(asctime)s - %(levelname)s - %(message)s')

# 创建文件处理器 - 记录所有INFO级别日志
file_handler = logging.FileHandler(os.path.join(log_dir, 'fund_nav_scraper.log'))
file_handler.setLevel(logging.INFO)
file_handler.setFormatter(file_formatter)

# 创建控制台处理器 - 只显示WARNING及以上级别
console_handler = logging.StreamHandler()
console_handler.setLevel(logging.WARNING)
console_handler.setFormatter(console_formatter)

# 添加处理器到logger
logger.addHandler(file_handler)
logger.addHandler(console_handler)

# 防止日志传播到根logger
logger.propagate = False


@dataclass
class FailedTask:
    """失败任务数据结构"""
    fund_code: str
    retry_count: int = 0
    last_error: str = ""
    first_attempt_time: Optional[datetime] = None
    last_attempt_time: Optional[datetime] = None

class DownloadStatus(Enum):
    """下载状态枚举"""
    WAITING = "waiting"  # 等待下载
    DOWNLOADING = "downloading"  # 正在下载
    STOPPED = "stopped"  # 停止下载
    COMPLETED = "completed"  # 下载完成
    FAILED = "failed"  # 下载失败

class FundNavScraper:
    """基金净值数据爬虫类"""
    
    def __init__(self, db_config: Dict[str, str]):
        """
        初始化基金净值爬虫
        
        Args:
            db_config: 数据库配置字典
        """
        # 过滤mysql.connector.connect()支持的配置项
        mysql_config_keys = {'host', 'port', 'database', 'user', 'password', 'charset'}
        self.db_config = {k: v for k, v in db_config.items() if k in mysql_config_keys}
        
        # 初始化数据库连接池
        self._connection_pool = None
        self._init_connection_pool()
        self.session = requests.Session()
        self.session.headers.update({
            'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/120.0.0.0 Safari/537.36',
            'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8',
            'Accept-Language': 'zh-CN,zh;q=0.9,en;q=0.8',
            'Accept-Encoding': 'gzip, deflate, br',
            'Connection': 'keep-alive',
            'Upgrade-Insecure-Requests': '1'
        })
        
        # 状态管理
        self._status = DownloadStatus.WAITING
        self._stop_flag = False
        self._current_task = ""
        self._lock = threading.Lock()
        
        # 进度信息
        self._progress_info = {
            'current_step': '',
            'current_index': 0,
            'total_count': 0,
            'success_count': 0,
            'failed_count': 0,
            'start_time': None,
            'last_update_time': None
        }
        
        # 多线程相关
        self._failed_queue = queue.Queue()  # 失败任务队列
        self._success_queue = queue.Queue()  # 成功任务队列
        self._failed_tasks = {}  # 失败任务字典 {fund_code: FailedTask}
        self._main_thread = None  # 主下载线程
        self._retry_thread = None  # 重试线程
        self._thread_pool = None  # 线程池
        self._max_workers = 5  # 最大工作线程数
        self._max_queue_size = 100  # 任务队列最大大小
        self._max_retries = 3  # 最大重试次数
        self._base_retry_delay = 5  # 基础重试延迟（秒）
        self._max_retry_delay = 300  # 最大重试延迟（秒），5分钟
        
        # 缓存相关
        self._fund_codes_cache: Optional[List[str]] = None
        self._cache_timestamp: Optional[datetime] = None
        self._cache_ttl = 3600  # 缓存有效期（秒），1小时
    
    @property
    def status(self) -> DownloadStatus:
        """获取当前状态"""
        with self._lock:
            return self._status
    
    def set_status(self, status: DownloadStatus) -> None:
        """设置状态"""
        with self._lock:
            self._status = status
            logger.info(f"状态变更为: {status.value}")
    
    def stop_download(self) -> bool:
        """停止下载"""
        with self._lock:
            if self._status == DownloadStatus.DOWNLOADING:
                self._stop_flag = True
                self._current_task = "正在停止下载..."
                logger.info("收到停止下载请求")
                return True
            else:
                logger.warning(f"当前状态 {self._status.value} 不支持停止操作")
                return False
    
    def reset(self) -> None:
        """重置状态和进度信息"""
        with self._lock:
            self._status = DownloadStatus.WAITING
            self._stop_flag = False
            self._current_task = ""
            self._progress_info = {
                'current_step': '',
                'current_index': 0,
                'total_count': 0,
                'success_count': 0,
                'failed_count': 0,
                'start_time': None,
                'last_update_time': None
            }
    
    def force_reset_for_new_download(self) -> bool:
        """强制重置状态用于新下载，清理所有状态和线程"""
        try:
            with self._lock:
                # 强制停止所有线程
                self._stop_flag = True
                
                # 等待线程结束
                if self._main_thread and self._main_thread.is_alive():
                    logger.info("等待主下载线程结束...")
                    # 不等待，直接重置状态
                
                if self._retry_thread and self._retry_thread.is_alive():
                    logger.info("等待重试线程结束...")
                    # 不等待，直接重置状态
                
                # 关闭线程池
                if self._thread_pool:
                    try:
                        self._thread_pool.shutdown(wait=False)
                    except:
                        pass
                    self._thread_pool = None
                
                # 清空队列
                while not self._failed_queue.empty():
                    try:
                        self._failed_queue.get_nowait()
                    except queue.Empty:
                        break
                
                while not self._success_queue.empty():
                    try:
                        self._success_queue.get_nowait()
                    except queue.Empty:
                        break
                
                # 重置所有状态
                self._status = DownloadStatus.WAITING
                self._stop_flag = False
                self._current_task = ""
                self._failed_tasks.clear()
                self._main_thread = None
                self._retry_thread = None
                
                # 重置进度信息
                self._progress_info = {
                    'current_step': '',
                    'current_index': 0,
                    'total_count': 0,
                    'success_count': 0,
                    'failed_count': 0,
                    'start_time': None,
                    'last_update_time': None
                }
                
                # 清除缓存
                self.clear_fund_codes_cache()
                
                logger.info("强制重置完成，状态已重置为WAITING")
                return True
                
        except Exception as e:
            logger.error(f"强制重置失败: {str(e)}")
            return False
    
    def can_restart_download(self) -> tuple[bool, str]:
        """检查是否可以重新启动下载
        
        Returns:
            (是否可以重启, 状态信息)
        """
        with self._lock:
            current_status = self._status
            
            # 允许重新下载的状态
            allowed_statuses = [DownloadStatus.STOPPED, DownloadStatus.COMPLETED, DownloadStatus.FAILED, DownloadStatus.WAITING]
            
            if current_status in allowed_statuses:
                return True, f"当前状态 {current_status.value} 允许重新下载"
            else:
                return False, f"当前状态 {current_status.value} 不允许重新下载，请等待当前任务完成或停止"
    
    def is_stopped(self) -> bool:
        """检查是否被停止"""
        with self._lock:
            return self._stop_flag
    
    def reset_stop_flag(self) -> None:
        """重置停止标志"""
        with self._lock:
            self._stop_flag = False
    
    def get_progress_info(self) -> Dict[str, Any]:
        """获取进度信息"""
        with self._lock:
            return {
                'status': self._status.value,
                'current_task': self._current_task,
                'progress': self._progress_info.copy()
            }
    
    def _update_progress(self, current_step: str, current_index: int = 0, total_count: int = 0, 
                        success_count: int = 0, failed_count: int = 0) -> None:
        """更新进度信息"""
        with self._lock:
            self._progress_info.update({
                'current_step': current_step,
                'current_index': current_index,
                'total_count': total_count,
                'success_count': success_count,
                'failed_count': failed_count,
                'last_update_time': datetime.now()
            })
    
    def update_fund_download_status(self, fund_code: str, status: str, retry_count: int = 0, error_message: str = "") -> bool:
        """更新基金下载状态到数据库
        
        Args:
            fund_code: 基金代码
            status: 下载状态
            retry_count: 重试次数
            error_message: 错误信息
            
        Returns:
            是否更新成功
        """
        connection = None
        cursor = None
        try:
            if mysql is None:
                return False
                
            connection = self._get_connection()
            if connection is None:
                logger.error("无法获取数据库连接")
                return False
            cursor = connection.cursor()
            
            # 插入或更新状态
            sql = """
                INSERT INTO fund_download_status 
                (fund_code, download_status, retry_count, last_attempt_time, error_message)
                VALUES (%s, %s, %s, NOW(), %s)
                ON DUPLICATE KEY UPDATE
                download_status = VALUES(download_status),
                retry_count = VALUES(retry_count),
                last_attempt_time = NOW(),
                error_message = VALUES(error_message),
                last_success_time = CASE WHEN VALUES(download_status) = 'success' THEN NOW() ELSE last_success_time END
            """
            
            cursor.execute(sql, (fund_code, status, retry_count, error_message))
            connection.commit()
            return True
            
        except Exception as e:
            logger.error(f"更新基金 {fund_code} 下载状态失败: {str(e)}")
            return False
        finally:
            if cursor is not None:
                cursor.close()
            if connection is not None and connection.is_connected():
                connection.close()
    
    def reset_all_fund_download_status(self) -> bool:
        """重置所有基金的下载状态，将success状态重置为空，为重新下载做准备
        
        Returns:
            是否重置成功
        """
        connection = None
        cursor = None
        try:
            if mysql is None:
                logger.error("MySQL连接器未安装，无法重置下载状态")
                return False
                
            connection = self._get_connection()
            if connection is None:
                logger.error("无法获取数据库连接")
                return False
            cursor = connection.cursor()
            
            # 重置所有success状态为空，保留其他状态
            sql = """
                UPDATE fund_download_status 
                SET download_status = NULL,
                    retry_count = 0,
                    error_message = NULL,
                    last_attempt_time = NOW()
                WHERE download_status = 'success'
            """
            
            cursor.execute(sql)
            affected_rows = cursor.rowcount
            connection.commit()
            
            logger.info(f"重置基金下载状态完成，影响 {affected_rows} 条记录")
            
            # 清除基金代码缓存，确保下次获取时重新查询
            self.clear_fund_codes_cache()
            
            return True
            
        except Exception as e:
            logger.error(f"重置基金下载状态失败: {str(e)}")
            if connection is not None:
                try:
                    connection.rollback()
                except:
                    pass
            return False
        finally:
            if cursor is not None:
                cursor.close()
            if connection is not None and connection.is_connected():
                connection.close()
    
    def get_fund_codes_from_db(self, limit: Optional[int] = None) -> List[str]:
        """从MySQL fund_info表读取基金代码，并根据fund_download_status表过滤（带缓存机制）
        
        Args:
            limit: 限制返回的基金数量，None表示返回所有
            
        Returns:
            基金代码列表（已过滤掉非success状态的基金）
        """
        # 检查缓存是否有效
        current_time = datetime.now()
        if (self._fund_codes_cache is not None and 
            self._cache_timestamp is not None and 
            (current_time - self._cache_timestamp).total_seconds() < self._cache_ttl):
            logger.debug(f"使用缓存的基金代码列表，缓存大小: {len(self._fund_codes_cache)}")
            if limit:
                return self._fund_codes_cache[:limit]
            return self._fund_codes_cache.copy()
        
        # 缓存失效或不存在，从数据库读取
        connection = None
        cursor = None
        
        try:
            # 使用原生SQL查询以获得更好的性能和控制
            connection = self._get_connection()
            if connection is None:
                logger.error("数据库连接失败，无法获取基金代码")
                return []
            
            cursor = connection.cursor()
            
            # 查询基金代码，并根据fund_download_status表进行过滤
            # 返回未成功下载的基金：没有记录的或者状态不是success的
            sql = """
                SELECT DISTINCT fi.fund_code 
                FROM fund_info fi
                LEFT JOIN fund_download_status fds ON fi.fund_code = fds.fund_code
                WHERE fds.fund_code IS NULL OR fds.download_status = 'success' OR fds.download_status IS NULL
                ORDER BY fi.fund_code
            """
            
            if limit:
                sql += f" LIMIT {limit}"
            
            cursor.execute(sql)
            results = cursor.fetchall()
            fund_codes = [row[0] for row in results]
            
            # 记录过滤统计信息
            cursor.execute("SELECT COUNT(*) FROM fund_info")
            total_funds = cursor.fetchone()[0]
            
            cursor.execute("SELECT COUNT(*) FROM fund_download_status WHERE download_status = 'success'")
            success_funds = cursor.fetchone()[0]
            
            # 更新缓存
            self._fund_codes_cache = fund_codes.copy()
            self._cache_timestamp = current_time
            
            logger.info(f"从数据库读取基金代码完成 - 总基金数: {total_funds}, "
                       f"已成功下载: {success_funds}, "
                       f"待下载基金数: {len(fund_codes)}")
            
            return fund_codes
            
        except Exception as e:
            logger.error(f"从数据库读取基金代码失败: {str(e)}")
            
            # 如果数据库查询失败，尝试使用Django ORM作为备选方案
            try:
                logger.info("尝试使用Django ORM作为备选方案")
                queryset = FundInfo.objects.all()
                fund_codes = list(queryset.values_list('fund_code', flat=True))
                
                # 更新缓存
                self._fund_codes_cache = fund_codes.copy()
                self._cache_timestamp = current_time
                
                logger.warning(f"使用Django ORM获取到 {len(fund_codes)} 个基金代码（未过滤download_status）")
                
                if limit:
                    return fund_codes[:limit]
                return fund_codes
                
            except Exception as orm_error:
                logger.error(f"Django ORM查询也失败: {str(orm_error)}")
                
                # 如果所有查询都失败但有缓存，返回缓存数据
                if self._fund_codes_cache is not None:
                    logger.warning("所有数据库查询失败，使用过期缓存数据")
                    if limit:
                        return self._fund_codes_cache[:limit]
                    return self._fund_codes_cache.copy()
                return []
                
        finally:
            if connection is not None and hasattr(connection, 'is_connected') and connection.is_connected():
                if cursor is not None:
                    cursor.close()
                connection.close()
    
    def clear_fund_codes_cache(self) -> None:
        """清除基金代码缓存"""
        self._fund_codes_cache = None
        self._cache_timestamp = None
        logger.info("基金代码缓存已清除")
    
    def _get_all_fund_codes_for_full_download(self, limit: Optional[int] = None) -> List[str]:
        """获取所有基金代码用于完全下载（覆盖模式），不进行状态过滤
        
        Args:
            limit: 限制返回的基金数量
            
        Returns:
            基金代码列表
        """
        try:
            connection = self._get_connection()
            if connection is None:
                logger.error("无法获取数据库连接")
                return []
            
            cursor = connection.cursor()
            
            # 直接从fund_info表获取所有基金代码，不进行状态过滤
            if limit:
                query = "SELECT fund_code FROM fund_info ORDER BY fund_code LIMIT %s"
                cursor.execute(query, (limit,))
            else:
                query = "SELECT fund_code FROM fund_info ORDER BY fund_code"
                cursor.execute(query)
            
            fund_codes = [row[0] for row in cursor.fetchall()]
            
            cursor.close()
            connection.close()
            
            logger.info(f"覆盖模式获取到 {len(fund_codes)} 个基金代码")
            return fund_codes
            
        except Exception as e:
            logger.error(f"获取覆盖模式基金代码时发生错误: {str(e)}")
            return []
    
    def _calculate_retry_delay(self, retry_count: int) -> int:
        """计算指数退避重试延迟
        
        Args:
            retry_count: 当前重试次数
            
        Returns:
            延迟时间（秒）
        """
        # 指数退避算法: base_delay * (2 ^ retry_count) + 随机抖动
        delay = self._base_retry_delay * (2 ** retry_count)
        # 添加随机抖动，避免雷群效应
        jitter = random.uniform(0.5, 1.5)
        delay = int(delay * jitter)
        # 限制最大延迟时间
        return min(delay, self._max_retry_delay)
    
    def _init_connection_pool(self) -> None:
        """初始化数据库连接池"""
        if mysql is None or pooling is None:
            logger.warning("MySQL连接器未安装，无法创建连接池")
            return
        
        try:
            pool_config = {
                **self.db_config,
                'pool_name': 'fund_nav_pool',
                'pool_size': 10,  # 连接池大小
                'pool_reset_session': True,
                'autocommit': True
            }
            
            self._connection_pool = pooling.MySQLConnectionPool(**pool_config)
            logger.info("数据库连接池初始化成功")
        except Exception as e:
            logger.error(f"数据库连接池初始化失败: {str(e)}")
            self._connection_pool = None
    
    def _get_connection(self):
        """从连接池获取数据库连接
        
        Returns:
            数据库连接对象或None
        """
        if self._connection_pool is not None:
            try:
                return self._connection_pool.get_connection()
            except Exception as e:
                logger.error(f"从连接池获取连接失败: {str(e)}")
        
        # 连接池不可用时，创建直接连接
        try:
            if mysql is not None:
                return mysql.connector.connect(**self.db_config)
        except Exception as e:
            logger.error(f"创建数据库连接失败: {str(e)}")
        
        return None
    
    def get_downloaded_fund_codes(self) -> List[str]:
        """获取已下载净值数据的基金代码列表
        
        Returns:
            已下载的基金代码列表
        """
        connection = None
        cursor = None
        try:
            connection = self._get_connection()
            if connection is None:
                logger.error("无法获取数据库连接")
                return []
            cursor = connection.cursor()
            
            # 查询已有净值数据的基金代码（使用KEY分区表）
            query = "SELECT DISTINCT fund_code FROM fund_nav_detail_new"
            cursor.execute(query)
            
            downloaded_codes = [row[0] for row in cursor.fetchall()]
            logger.info(f"已下载净值数据的基金数量: {len(downloaded_codes)}")
            
            return downloaded_codes
            
        except Error as e:
            logger.error(f"查询已下载基金代码失败: {str(e)}")
            return []
        except Exception as e:
            logger.error(f"数据库连接失败: {str(e)}")
            return []
        finally:
            if cursor is not None:
                cursor.close()
            if connection is not None and connection.is_connected():
                connection.close()
    
    def get_attempted_fund_codes(self) -> List[str]:
        """获取已尝试下载的基金代码列表（包括成功和失败的）
        
        Returns:
            已尝试下载的基金代码列表
        """
        connection = None
        cursor = None
        try:
            connection = self._get_connection()
            if connection is None:
                logger.error("无法获取数据库连接")
                return []
            cursor = connection.cursor()
            
            # 查询所有已尝试下载的基金代码（包括成功和失败的）
            # 成功的基金在fund_nav_detail表中有记录
            # 失败的基金可以通过fund_info表中的基金代码减去待下载的基金代码来计算
            all_codes = self.get_fund_codes_from_db()
            pending_codes = set(self.get_pending_fund_codes())
            
            attempted_codes = [code for code in all_codes if code not in pending_codes]
            logger.info(f"已尝试下载的基金数量: {len(attempted_codes)}")
            
            return attempted_codes
            
        except Error as e:
            logger.error(f"查询已尝试下载基金代码失败: {str(e)}")
            return []
        except Exception as e:
            logger.error(f"数据库连接失败: {str(e)}")
            return []
        finally:
            if cursor is not None:
                cursor.close()
            if connection is not None and connection.is_connected():
                connection.close()
    
    def get_pending_fund_codes(self, limit: Optional[int] = None) -> List[str]:
        """获取待下载的基金代码列表（排除已下载的）
        
        Args:
            limit: 限制返回的基金数量
            
        Returns:
            待下载的基金代码列表
        """
        all_codes = self.get_fund_codes_from_db()
        downloaded_codes = set(self.get_downloaded_fund_codes())
        
        pending_codes = [code for code in all_codes if code not in downloaded_codes]
        
        if limit:
            pending_codes = pending_codes[:limit]
            
        logger.info(f"待下载净值数据的基金数量: {len(pending_codes)}")
        return pending_codes
    
    def _main_download_worker(self, fund_codes: List[str]) -> None:
        """主下载线程工作函数，使用线程池并发下载
        
        Args:
            fund_codes: 待下载的基金代码列表
        """
        total_count = len(fund_codes)
        completed_count = 0
        
        logger.info(f"启动线程池，最大工作线程数: {self._max_workers}")
        
        with ThreadPoolExecutor(max_workers=self._max_workers) as executor:
            # 提交所有下载任务
            future_to_fund = {}
            for fund_code in fund_codes:
                if self.is_stopped():
                    break
                future = executor.submit(self._download_single_fund, fund_code)
                future_to_fund[future] = fund_code
            
            # 处理完成的任务
            from concurrent.futures import as_completed
            for future in as_completed(future_to_fund):
                if self.is_stopped():
                    logger.info("主下载线程收到停止信号")
                    break
                    
                fund_code = future_to_fund[future]
                completed_count += 1
                
                try:
                    success = future.result()
                    if success:
                        self._success_queue.put(fund_code)
                    # 更新进度
                    success_count = self._success_queue.qsize()
                    failed_count = len(self._failed_tasks)
                    self._update_progress(
                        f"已完成 {completed_count}/{total_count} 只基金下载",
                        current_index=completed_count,
                        total_count=total_count,
                        success_count=success_count,
                        failed_count=failed_count
                    )
                except Exception as e:
                    logger.error(f"处理基金 {fund_code} 下载结果时发生错误: {str(e)}")
        
        success_count = self._success_queue.qsize()
        failed_count = len(self._failed_tasks)
        logger.info(f"主下载线程完成，成功: {success_count}，失败: {failed_count}")
    
    def _download_single_fund(self, fund_code: str) -> bool:
        """下载单个基金的净值数据
        
        Args:
            fund_code: 基金代码
            
        Returns:
            是否下载成功
        """
        try:
            # 检查是否被停止
            if self.is_stopped():
                return False
            
            # 更新状态为下载中
            self.update_fund_download_status(fund_code, 'downloading')
            
            # 获取净值数据
            nav_data = self.get_fund_nav_data(fund_code)
            
            if nav_data:
                # 保存到数据库
                if self.save_to_database(nav_data):
                    # 更新状态为成功
                    self.update_fund_download_status(fund_code, 'success')
                    # 只在DEBUG级别记录详细信息
                    logger.debug(f"基金 {fund_code} 净值数据下载成功，共 {len(nav_data)} 条记录")
                    return True
                else:
                    # 保存失败，添加到失败队列
                    error_msg = "数据保存失败"
                    self._add_to_failed_queue(fund_code, error_msg)
                    logger.warning(f"基金 {fund_code} 数据保存失败")
                    return False
            else:
                # 获取数据失败，添加到失败队列
                error_msg = "净值数据获取失败"
                self._add_to_failed_queue(fund_code, error_msg)
                logger.warning(f"基金 {fund_code} 数据获取失败")
                return False
                
        except Exception as e:
            error_msg = f"下载异常: {str(e)}"
            self._add_to_failed_queue(fund_code, error_msg)
            logger.error(f"下载基金 {fund_code} 净值数据时发生错误: {str(e)}")
            return False
        finally:
            # 随机延时，避免请求过于频繁
            delay = random.uniform(0.5, 2.0)
            time.sleep(delay)
    
    def _add_to_failed_queue(self, fund_code: str, error_msg: str) -> None:
        """添加失败任务到队列
        
        Args:
            fund_code: 基金代码
            error_msg: 错误信息
        """
        current_time = datetime.now()
        
        if fund_code in self._failed_tasks:
            # 更新现有失败任务
            failed_task = self._failed_tasks[fund_code]
            failed_task.retry_count += 1
            failed_task.last_error = error_msg
            failed_task.last_attempt_time = current_time
        else:
            # 创建新的失败任务
            failed_task = FailedTask(
                fund_code=fund_code,
                retry_count=1,
                last_error=error_msg,
                first_attempt_time=current_time,
                last_attempt_time=current_time
            )
            self._failed_tasks[fund_code] = failed_task
        
        # 更新数据库状态
        self.update_fund_download_status(fund_code, 'failed', failed_task.retry_count, error_msg)
        
        # 添加到失败队列
        self._failed_queue.put(failed_task)
        
        logger.info(f"基金 {fund_code} 添加到失败队列，重试次数: {failed_task.retry_count}")
    
    def _retry_worker(self) -> None:
        """重试线程工作函数"""
        logger.info("重试线程启动")
        
        while not self.is_stopped():
            failed_task = None
            try:
                # 从失败队列获取任务，设置超时避免无限等待
                try:
                    failed_task = self._failed_queue.get(timeout=5)
                except queue.Empty:
                    continue
                
                fund_code = failed_task.fund_code
                
                # 检查是否超过最大重试次数
                if failed_task.retry_count > self._max_retries:
                    logger.warning(f"基金 {fund_code} 超过最大重试次数 {self._max_retries}，标记为最终失败")
                    # 更新数据库状态为超过最大重试次数
                    self.update_fund_download_status(fund_code, 'max_retries_exceeded', failed_task.retry_count, failed_task.last_error)
                    # 从失败任务字典中移除
                    if fund_code in self._failed_tasks:
                        del self._failed_tasks[fund_code]
                    self._failed_queue.task_done()
                    continue
                
                # 计算并等待重试延迟（指数退避）
                retry_delay = self._calculate_retry_delay(failed_task.retry_count)
                logger.info(f"基金 {fund_code} 等待 {retry_delay} 秒后进行第 {failed_task.retry_count + 1} 次重试")
                time.sleep(retry_delay)
                
                # 检查是否被停止
                if self.is_stopped():
                    # 将任务放回队列
                    self._failed_queue.put(failed_task)
                    break
                
                logger.info(f"开始重试基金 {fund_code}，第 {failed_task.retry_count + 1} 次尝试")
                
                # 更新状态为下载中
                self.update_fund_download_status(fund_code, 'downloading', failed_task.retry_count)
                
                # 重试下载
                nav_data = self.get_fund_nav_data(fund_code)
                
                if nav_data:
                    # 保存到数据库
                    if self.save_to_database(nav_data):
                        # 重试成功
                        logger.info(f"基金 {fund_code} 重试成功，共 {len(nav_data)} 条记录")
                        # 更新状态为成功
                        self.update_fund_download_status(fund_code, 'success')
                        # 添加到成功队列
                        self._success_queue.put(fund_code)
                        # 从失败任务字典中移除
                        if fund_code in self._failed_tasks:
                            del self._failed_tasks[fund_code]
                    else:
                        # 保存失败，重新添加到失败队列
                        error_msg = "数据保存失败"
                        self._update_failed_task_and_requeue(failed_task, error_msg)
                        logger.error(f"基金 {fund_code} 重试时数据保存失败")
                else:
                    # 获取数据失败，重新添加到失败队列
                    error_msg = "净值数据获取失败"
                    self._update_failed_task_and_requeue(failed_task, error_msg)
                    logger.error(f"基金 {fund_code} 重试时数据获取失败")
                
                self._failed_queue.task_done()
                
            except Exception as e:
                logger.error(f"重试线程发生异常: {str(e)}")
                # 如果有失败任务，重新放回队列
                if 'failed_task' in locals():
                    self._failed_queue.put(failed_task)
                time.sleep(5)  # 异常后等待5秒
        
        logger.info("重试线程结束")
    
    def _update_failed_task_and_requeue(self, failed_task: FailedTask, error_msg: str) -> None:
        """更新失败任务并重新加入队列
        
        Args:
            failed_task: 失败任务对象
            error_msg: 错误信息
        """
        failed_task.retry_count += 1
        failed_task.last_error = error_msg
        failed_task.last_attempt_time = datetime.now()
        
        # 更新数据库状态
        self.update_fund_download_status(failed_task.fund_code, 'failed', failed_task.retry_count, error_msg)
        
        # 重新加入失败队列
        self._failed_queue.put(failed_task)
    
    def stop_threads(self) -> None:
        """停止所有线程"""
        logger.info("正在停止所有线程...")
        
        # 设置停止标志
        self.stop_download()
        
        # 等待主下载线程结束
        if hasattr(self, '_main_thread') and self._main_thread and self._main_thread.is_alive():
            logger.info("等待主下载线程结束...")
            self._main_thread.join(timeout=10)
        
        # 等待重试线程结束
        if hasattr(self, '_retry_thread') and self._retry_thread and self._retry_thread.is_alive():
            logger.info("等待重试线程结束...")
            self._retry_thread.join(timeout=10)
        
        logger.info("所有线程已停止")
    
    def get_multithread_download_statistics(self) -> Dict[str, Any]:
        """获取多线程下载统计信息
        
        Returns:
            Dict: 包含各种统计信息的字典
        """
        try:
            # 获取基金总数
            total_funds = len(self.get_fund_codes_from_db())
            
            # 获取成功队列大小
            success_count = self._success_queue.qsize() if hasattr(self, '_success_queue') else 0
            
            # 获取失败任务数量
            failed_count = len(self._failed_tasks) if hasattr(self, '_failed_tasks') else 0
            
            # 获取数据库中的净值记录总数
            connection = self._get_connection()
            if connection is None:
                logger.error("无法获取数据库连接")
                return {"total_records": 0, "downloaded_funds": 0, "failed_funds": 0}
                cursor = connection.cursor()
                cursor.execute("SELECT COUNT(*) FROM fund_nav_detail_new")
                total_nav_records = cursor.fetchone()[0]
                cursor.close()
                connection.close()
            else:
                total_nav_records = 0
            
            return {
                'total_funds': total_funds,
                'success_count': success_count,
                'failed_count': failed_count,
                'processing_count': getattr(self, '_processed_funds', 0),
                'total_nav_records': total_nav_records,
                'status': self.status.value,
                'current_task': getattr(self, '_current_task', ''),
                'progress_info': self.get_progress_info()
            }
            
        except Exception as e:
            logger.error(f"获取多线程下载统计信息时发生错误: {str(e)}")
            return {
                'total_funds': 0,
                'success_count': 0,
                'failed_count': 0,
                'processing_count': 0,
                'total_nav_records': 0,
                'status': self.status.value,
                'current_task': '',
                'progress_info': {}
            }
    
    def download_all_fund_nav(self, limit: Optional[int] = None) -> bool:
        """使用多线程下载所有基金的净值数据（覆盖模式）
        
        Args:
            limit: 限制下载的基金数量
            
        Returns:
            是否成功完成下载
        """
        try:
            # 设置状态和任务信息
            self.set_status(DownloadStatus.DOWNLOADING)
            self._current_task = "基金净值数据多线程批量下载（覆盖模式）"
            self.reset_stop_flag()
            
            # 重置所有基金的下载状态，为覆盖模式下载做准备
            logger.info("重置所有基金下载状态...")
            if not self.reset_all_fund_download_status():
                logger.error("重置基金下载状态失败")
                self.set_status(DownloadStatus.FAILED)
                return False
            
            # 直接获取所有基金代码（覆盖模式，不进行状态过滤）
            fund_codes = self._get_all_fund_codes_for_full_download(limit)
            
            if not fund_codes:
                logger.info("没有需要下载的基金")
                self.set_status(DownloadStatus.COMPLETED)
                return True
            
            total_count = len(fund_codes)
            
            # 设置开始时间
            with self._lock:
                self._progress_info['start_time'] = datetime.now()
            
            logger.info(f"开始使用多线程下载 {total_count} 只基金的净值数据")
            
            # 启动重试线程
            self._retry_thread = threading.Thread(target=self._retry_worker, daemon=True)
            self._retry_thread.start()
            logger.info("重试线程已启动")
            
            # 启动主下载线程
            self._main_thread = threading.Thread(target=self._main_download_worker, args=(fund_codes,), daemon=True)
            self._main_thread.start()
            logger.info("主下载线程已启动")
            
            # 等待主下载线程完成
            self._main_thread.join()
            
            # 等待失败队列处理完成（设置超时避免无限等待）
            logger.info("等待重试队列处理完成...")
            timeout = 300  # 5分钟超时
            start_time = time.time()
            
            while not self._failed_queue.empty() and (time.time() - start_time) < timeout:
                if self.is_stopped():
                    break
                time.sleep(1)
            
            # 更新最终状态
            if self.is_stopped():
                self.set_status(DownloadStatus.STOPPED)
                logger.info("多线程下载被停止")
            else:
                self.set_status(DownloadStatus.COMPLETED)
                logger.info("多线程下载完成")
            
            # 输出最终统计
            success_count = self._success_queue.qsize()
            failed_count = len(self._failed_tasks)
            logger.info(f"最终统计 - 成功: {success_count}, 失败: {failed_count}")
            
            return True
            
        except Exception as e:
            logger.error(f"多线程下载过程中发生错误: {str(e)}")
            self.set_status(DownloadStatus.FAILED)
            return False
    
    def continue_download(self, limit: Optional[int] = None) -> bool:
        """断点续传，继续下载未完成的基金净值数据
        
        Args:
            limit: 限制下载的基金数量
            
        Returns:
            是否成功完成下载
        """
        try:
            # 设置状态和任务信息
            self.set_status(DownloadStatus.DOWNLOADING)
            self._current_task = "基金净值数据断点续传下载"
            self.reset_stop_flag()
            
            # 获取待下载的基金代码（增量模式）
            fund_codes = self.get_pending_fund_codes(limit)
            
            if not fund_codes:
                logger.info("没有需要下载的基金")
                self.set_status(DownloadStatus.COMPLETED)
                return True
            
            total_count = len(fund_codes)
            
            # 设置开始时间
            with self._lock:
                self._progress_info['start_time'] = datetime.now()
            
            logger.info(f"开始断点续传下载 {total_count} 只基金的净值数据")
            
            # 启动重试线程
            self._retry_thread = threading.Thread(target=self._retry_worker, daemon=True)
            self._retry_thread.start()
            logger.info("重试线程已启动")
            
            # 启动主下载线程
            self._main_thread = threading.Thread(target=self._main_download_worker, args=(fund_codes,), daemon=True)
            self._main_thread.start()
            logger.info("主下载线程已启动")
            
            # 等待主下载线程完成
            self._main_thread.join()
            
            # 等待失败队列处理完成（设置超时避免无限等待）
            logger.info("等待重试队列处理完成...")
            timeout = 300  # 5分钟超时
            start_time = time.time()
            
            while not self._failed_queue.empty() and (time.time() - start_time) < timeout:
                if self.is_stopped():
                    break
                time.sleep(1)
            
            # 更新最终状态
            if self.is_stopped():
                self.set_status(DownloadStatus.STOPPED)
                logger.info("断点续传下载被停止")
            else:
                self.set_status(DownloadStatus.COMPLETED)
                logger.info("断点续传下载完成")
            
            # 输出最终统计
            success_count = self._success_queue.qsize()
            failed_count = len(self._failed_tasks)
            logger.info(f"断点续传最终统计 - 成功: {success_count}, 失败: {failed_count}")
            
            return True
            
        except Exception as e:
            logger.error(f"断点续传下载过程中发生错误: {str(e)}")
            self.set_status(DownloadStatus.FAILED)
            return False
    
    def get_download_statistics(self) -> Dict[str, Any]:
        """获取下载统计信息
        
        Returns:
            包含下载统计信息的字典
        """
        try:
            total_funds = len(self.get_fund_codes_from_db())
            downloaded_funds = len(self.get_downloaded_fund_codes())  # 成功下载的基金数量
            attempted_funds = len(self.get_attempted_fund_codes())    # 已尝试下载的基金数量（包括成功和失败）
            pending_funds = total_funds - attempted_funds
            
            # 获取净值记录总数
            connection = None
            cursor = None
            total_records = 0
            
            try:
                connection = self._get_connection()
                if connection is not None:
                    cursor = connection.cursor()
                    
                    query = "SELECT COUNT(*) FROM fund_nav_detail_new"
                    cursor.execute(query)
                    result = cursor.fetchone()
                    total_records = result[0] if result else 0
                    
            except Exception as e:
                logger.error(f"查询净值记录总数失败: {str(e)}")
            finally:
                if cursor is not None:
                    cursor.close()
                if connection is not None and connection.is_connected():
                    connection.close()
            
            progress_info = self.get_progress_info()
            
            statistics = {
                'total_funds': total_funds,
                'downloaded_funds': attempted_funds,  # 前端显示已尝试下载的数量
                'successful_funds': downloaded_funds,  # 新增：成功下载的数量
                'pending_funds': pending_funds,
                'download_progress': f"{attempted_funds}/{total_funds} ({attempted_funds/total_funds*100:.1f}%)" if total_funds > 0 else "0/0 (0%)",
                'total_nav_records': total_records,
                'current_status': progress_info['status'],
                'current_task': progress_info['current_task'],
                'current_progress': progress_info['progress']
            }
            
            return statistics
            
        except Exception as e:
            logger.error(f"获取下载统计信息失败: {str(e)}")
            return {
                'total_funds': 0,
                'downloaded_funds': 0,
                'successful_funds': 0,
                'pending_funds': 0,
                'download_progress': "0/0 (0%)",
                'total_nav_records': 0,
                'current_status': self.status.value,
                'current_task': self._current_task,
                'current_progress': self._progress_info.copy()
            }
        
    def get_fund_nav_data(self, fund_code: str, start_date: Optional[str] = None, end_date: Optional[str] = None) -> List[Dict]:
        """
        获取基金历史净值数据
        
        Args:
            fund_code: 基金代码
            start_date: 开始日期 (YYYY-MM-DD)
            end_date: 结束日期 (YYYY-MM-DD)
            
        Returns:
            包含净值数据的字典列表
        """
        try:
            # 使用天天基金的数据接口
            url = f'http://fund.eastmoney.com/pingzhongdata/{fund_code}.js'
            logger.info(f"正在获取基金 {fund_code} 的净值数据...")
            
            # 发送请求
            response = self.session.get(url, timeout=30)
            response.raise_for_status()
            response.encoding = 'utf-8'
            
            # 解析JavaScript数据
            nav_data = self._parse_pingzhong_data(response.text, fund_code)
            
            # 过滤日期范围
            if start_date or end_date:
                nav_data = self._filter_by_date(nav_data, start_date, end_date)
            
            logger.info(f"成功获取基金 {fund_code} 的 {len(nav_data)} 条净值数据")
            return nav_data
            
        except Exception as e:
            logger.error(f"获取基金 {fund_code} 净值数据失败: {str(e)}")
            return []
    
    def get_fund_nav_data_with_retry(self, fund_code: str, start_date: Optional[str] = None, end_date: Optional[str] = None, max_retries: int = 1) -> List[Dict]:
        """
        获取基金历史净值数据（带重试机制）
        
        Args:
            fund_code: 基金代码
            start_date: 开始日期 (YYYY-MM-DD)
            end_date: 结束日期 (YYYY-MM-DD)
            max_retries: 最大重试次数
            
        Returns:
            包含净值数据的字典列表
        """
        for attempt in range(max_retries + 1):
            try:
                # 第一次尝试
                if attempt == 0:
                    nav_data = self.get_fund_nav_data(fund_code, start_date, end_date)
                    if nav_data:
                        return nav_data
                    else:
                        logger.warning(f"基金 {fund_code} 第 {attempt + 1} 次尝试获取数据失败，准备重试")
                # 重试
                else:
                    logger.info(f"基金 {fund_code} 开始第 {attempt + 1} 次重试，等待60秒...")
                    time.sleep(60)  # 等待1分钟
                    
                    # 检查是否被停止
                    if self.is_stopped():
                        logger.info("下载已被停止，取消重试")
                        return []
                    
                    nav_data = self.get_fund_nav_data(fund_code, start_date, end_date)
                    if nav_data:
                        logger.info(f"基金 {fund_code} 第 {attempt + 1} 次重试成功")
                        return nav_data
                    else:
                        logger.warning(f"基金 {fund_code} 第 {attempt + 1} 次重试仍然失败")
                        
            except Exception as e:
                logger.error(f"基金 {fund_code} 第 {attempt + 1} 次尝试发生异常: {str(e)}")
                if attempt < max_retries:
                    logger.info(f"基金 {fund_code} 将在60秒后进行第 {attempt + 2} 次重试")
                    time.sleep(60)
                    
                    # 检查是否被停止
                    if self.is_stopped():
                        logger.info("下载已被停止，取消重试")
                        return []
        
        logger.error(f"基金 {fund_code} 经过 {max_retries + 1} 次尝试后仍然失败")
        return []
    
    def _parse_table_data(self, table, fund_code: str) -> List[Dict]:
        """
        从HTML表格中解析净值数据
        
        Args:
            table: BeautifulSoup表格对象
            fund_code: 基金代码
            
        Returns:
            净值数据列表
        """
        nav_data = []
        rows = table.find_all('tr')[1:]  # 跳过表头
        
        for row in rows:
            cells = row.find_all('td')
            if len(cells) >= 6:
                try:
                    nav_date = cells[0].get_text().strip()
                    unit_nav = cells[1].get_text().strip()
                    cumulative_nav = cells[2].get_text().strip()
                    daily_growth_rate = cells[3].get_text().strip().replace('%', '')
                    purchase_status = cells[4].get_text().strip()
                    redemption_status = cells[5].get_text().strip()
                    dividend_info = cells[6].get_text().strip() if len(cells) > 6 else ''
                    
                    # 数据清洗和转换
                    nav_record = {
                        'fund_code': fund_code,
                        'nav_date': self._parse_date(nav_date),
                        'unit_net_value': self._parse_decimal(unit_nav),
                        'accumulated_net_value': self._parse_decimal(cumulative_nav),
                        'daily_growth_rate': self._parse_decimal(daily_growth_rate),
                        'purchase_status': purchase_status,
                        'redemption_status': redemption_status,
                        'dividend_info': self._parse_decimal(dividend_info) if dividend_info and dividend_info.strip() else None
                    }
                    
                    if nav_record['nav_date'] and nav_record['unit_net_value']:
                        nav_data.append(nav_record)
                        
                except Exception as e:
                    logger.warning(f"解析表格行数据失败: {str(e)}")
                    continue
        
        return nav_data
    
    def _parse_pingzhong_data(self, js_content: str, fund_code: str) -> List[Dict]:
        """
        解析天天基金pingzhongdata接口的JavaScript数据
        
        Args:
            js_content: JavaScript内容
            fund_code: 基金代码
            
        Returns:
            净值数据列表
        """
        nav_data = []
        try:
            # 提取净值走势数据 Data_netWorthTrend
            net_worth_pattern = r"Data_netWorthTrend\s*=\s*(\[\{.+?\}\]);.*?\/\*累计净值走势\*\/"
            net_worth_match = re.search(net_worth_pattern, js_content, re.DOTALL)
            
            # 提取累计净值走势数据 Data_ACWorthTrend
            ac_worth_pattern = r"Data_ACWorthTrend\s*=\s*(\[\[.*?\]\]);"
            ac_worth_match = re.search(ac_worth_pattern, js_content, re.DOTALL)
            
            # 创建日期到累计净值的映射
            ac_nav_map = {}
            if ac_worth_match:
                try:
                    ac_worth_data = json.loads(ac_worth_match.group(1))
                    for item in ac_worth_data:
                        if isinstance(item, list) and len(item) >= 2:
                            timestamp = item[0]
                            acc_nav = item[1]  # 累计净值
                            if timestamp and acc_nav:
                                date_str = datetime.fromtimestamp(timestamp / 1000).strftime('%Y-%m-%d')
                                ac_nav_map[date_str] = float(acc_nav)
                except Exception as e:
                    logger.warning(f"解析累计净值数据失败: {str(e)}")
            
            # 解析单位净值数据
            if net_worth_match:
                net_worth_data = json.loads(net_worth_match.group(1))
                
                for item in net_worth_data:
                    if isinstance(item, dict):
                        # 解析时间戳 (毫秒)
                        timestamp = item.get('x', 0)
                        unit_nav = item.get('y', 0)  # 单位净值
                        
                        if timestamp and unit_nav:
                            date_str = datetime.fromtimestamp(timestamp / 1000).strftime('%Y-%m-%d')
                            
                            # 获取对应日期的累计净值，如果没有则使用单位净值
                            accumulated_nav = ac_nav_map.get(date_str, float(unit_nav))
                            
                            nav_data.append({
                                'fund_code': fund_code,
                                'nav_date': date_str,
                                'unit_net_value': float(unit_nav),
                                'accumulated_net_value': accumulated_nav,
                                'daily_growth_rate': 0.0,
                                'purchase_status': '',
                                'redemption_status': '',
                                'dividend_info': None
                            })
            
            # 按日期排序（最新的在前）
            nav_data.sort(key=lambda x: x['nav_date'], reverse=True)
            
        except Exception as e:
            logger.error(f"解析pingzhongdata数据失败: {str(e)}")
        
        return nav_data
    
    def _parse_js_data(self, html_content: str, fund_code: str) -> List[Dict]:
        """
        从JavaScript数据中解析净值数据
        
        Args:
            html_content: HTML内容
            fund_code: 基金代码
            
        Returns:
            净值数据列表
        """
        nav_data = []
        
        try:
            # 查找JavaScript中的数据
            # 匹配类似 var apidata = {...} 的模式
            js_pattern = r'var\s+apidata\s*=\s*(\{.*?\});'
            match = re.search(js_pattern, html_content, re.DOTALL)
            
            if match:
                js_data = match.group(1)
                data = json.loads(js_data)
                
                # 根据实际的数据结构解析
                if 'Data' in data and isinstance(data['Data'], list):
                    for item in data['Data']:
                        try:
                            nav_record = {
                                'fund_code': fund_code,
                                'nav_date': self._parse_date(item.get('FSRQ', '')),
                                'unit_net_value': self._parse_decimal(item.get('DWJZ', '')),
                                'accumulated_net_value': self._parse_decimal(item.get('LJJZ', '')),
                                'daily_growth_rate': self._parse_decimal(item.get('JZZZL', '')),
                                'purchase_status': item.get('SGZT', ''),
                                'redemption_status': item.get('SHZT', ''),
                                'dividend_info': item.get('FHSP', '')
                            }
                            
                            if nav_record['nav_date'] and nav_record['unit_net_value']:
                                nav_data.append(nav_record)
                                
                        except Exception as e:
                            logger.warning(f"解析JS数据项失败: {str(e)}")
                            continue
            
        except Exception as e:
            logger.warning(f"解析JavaScript数据失败: {str(e)}")
        
        return nav_data
    
    def _parse_date(self, date_str: str) -> Optional[str]:
        """
        解析日期字符串
        
        Args:
            date_str: 日期字符串
            
        Returns:
            格式化的日期字符串 (YYYY-MM-DD) 或 None
        """
        try:
            # 尝试多种日期格式
            date_formats = ['%Y-%m-%d', '%Y/%m/%d', '%Y.%m.%d']
            
            for fmt in date_formats:
                try:
                    parsed_date = datetime.strptime(date_str, fmt)
                    return parsed_date.strftime('%Y-%m-%d')
                except ValueError:
                    continue
            
            return None
            
        except Exception:
            return None
    
    def _parse_decimal(self, value_str: str) -> Optional[float]:
        """
        解析数值字符串
        
        Args:
            value_str: 数值字符串
            
        Returns:
            浮点数或None
        """
        try:
            if not value_str or value_str in ['--', '-', '']:
                return None
            
            # 清理字符串
            cleaned = re.sub(r'[^\d.-]', '', value_str)
            return float(cleaned) if cleaned else None
            
        except Exception:
            return None
    
    def _filter_by_date(self, nav_data: List[Dict], start_date: Optional[str] = None, end_date: Optional[str] = None) -> List[Dict]:
        """
        按日期范围过滤数据
        
        Args:
            nav_data: 净值数据列表
            start_date: 开始日期
            end_date: 结束日期
            
        Returns:
            过滤后的数据列表
        """
        if not start_date and not end_date:
            return nav_data
        
        filtered_data = []
        
        for record in nav_data:
            nav_date = record.get('nav_date')
            if not nav_date:
                continue
            
            include = True
            
            if start_date and nav_date < start_date:
                include = False
            
            if end_date and nav_date > end_date:
                include = False
            
            if include:
                filtered_data.append(record)
        
        return filtered_data
    
    def save_to_database(self, nav_data: List[Dict]) -> bool:
        """
        将净值数据保存到数据库（适配KEY分区表结构）
        
        Args:
            nav_data: 净值数据列表
            
        Returns:
            是否保存成功
        """
        if not nav_data:
            logger.warning("没有数据需要保存")
            return True
        
        connection = None
        cursor = None
        try:
            connection = self._get_connection()
            if connection is None:
                logger.error("无法获取数据库连接")
                return False
            cursor = connection.cursor()
            
            # 设置会话级别的优化参数（针对大批量插入优化）
            start_time = time.time()
            optimization_settings = [
                "SET SESSION foreign_key_checks = 0",
                "SET SESSION unique_checks = 0", 
                "SET SESSION sql_log_bin = 0",
                "SET SESSION autocommit = 0",
                "SET SESSION bulk_insert_buffer_size = 512*1024*1024",  # 增加到512MB
                "SET SESSION sort_buffer_size = 64*1024*1024",          # 增加到64MB
                "SET SESSION read_buffer_size = 8*1024*1024",           # 增加读缓冲
                "SET SESSION max_heap_table_size = 512*1024*1024",      # 增加内存表大小
                "SET SESSION tmp_table_size = 512*1024*1024"            # 增加临时表大小
            ]
            
            for setting in optimization_settings:
                try:
                    cursor.execute(setting)
                except Exception as e:
                    # 某些设置可能需要特殊权限，忽略错误但记录调试信息
                    logger.debug(f"设置优化参数失败: {setting}, 错误: {str(e)}")
            
            # 准备数据，确保基金代码为6位字符，并进行数据预处理
            data_to_insert = []
            fund_code_base = None
            
            for record in nav_data:
                # 确保基金代码为6位字符
                fund_code = str(record['fund_code']).zfill(6)
                if fund_code_base is None:
                    fund_code_base = fund_code
                
                # 数据类型转换和验证
                try:
                    unit_nav = float(record['unit_net_value']) if record['unit_net_value'] is not None else 0.0
                    accumulated_nav = float(record['accumulated_net_value']) if record['accumulated_net_value'] is not None else 0.0
                    daily_growth_rate = float(record['daily_growth_rate']) if record['daily_growth_rate'] is not None else 0.0
                except (ValueError, TypeError):
                    logger.warning(f"数据类型转换失败，跳过记录: {record}")
                    continue
                
                data_to_insert.append((
                    fund_code,
                    record['nav_date'],
                    unit_nav,
                    accumulated_nav,
                    daily_growth_rate,
                    record.get('purchase_status', ''),
                    record.get('redemption_status', ''),
                    record.get('dividend_info')  # 映射到dividend_per_unit字段
                ))
            
            if not data_to_insert:
                logger.warning("没有有效数据需要保存")
                return True
            
            # 按fund_code和nav_date排序，利用KEY分区的特性提升插入性能
            data_to_insert.sort(key=lambda x: (x[0], x[1]))
            
            # 使用REPLACE INTO替代INSERT ... ON DUPLICATE KEY UPDATE以提升性能
            # 对于KEY分区表，REPLACE INTO通常比ON DUPLICATE KEY UPDATE更快
            replace_sql = """
                REPLACE INTO fund_nav_detail_new 
                (fund_code, nav_date, unit_nav, accumulated_nav, daily_growth_rate, 
                 purchase_status, redemption_status, dividend_per_unit)
                VALUES (%s, %s, %s, %s, %s, %s, %s, %s)
            """
            
            # 动态调整批量大小，根据数据量优化
            total_records = len(data_to_insert)
            if total_records <= 1000:
                batch_size = total_records  # 小数据量一次性插入
            elif total_records <= 10000:
                batch_size = 3000  # 中等数据量使用3000
            else:
                batch_size = 5000  # 大数据量使用更大的批次
            
            total_inserted = 0
            batch_count = 0
            
            # 分批插入数据
            for i in range(0, total_records, batch_size):
                batch_data = data_to_insert[i:i + batch_size]
                batch_count += 1
                
                try:
                    # 使用executemany进行批量插入
                    cursor.executemany(replace_sql, batch_data)
                    connection.commit()
                    
                    total_inserted += len(batch_data)
                    
                    # 只在处理大量数据时记录进度
                    if total_records > 1000:
                        logger.debug(f"批次 {batch_count}: 插入 {len(batch_data)} 条记录，累计 {total_inserted}/{total_records}")
                    
                except Error as batch_error:
                    logger.error(f"批次 {batch_count} 数据插入失败: {str(batch_error)}")
                    # 回滚当前批次
                    connection.rollback()
                    # 继续处理下一批次
                    continue
            
            # 恢复会话设置
            try:
                cursor.execute("SET SESSION foreign_key_checks = 1")
                cursor.execute("SET SESSION unique_checks = 1")
                cursor.execute("SET SESSION sql_log_bin = 1")
                cursor.execute("SET SESSION autocommit = 1")
            except Exception as e:
                logger.debug(f"恢复会话设置失败: {str(e)}")
            
            # 计算性能统计
            end_time = time.time()
            total_time = end_time - start_time
            
            if total_inserted > 0:
                records_per_second = total_inserted / total_time if total_time > 0 else 0
                logger.info(f"数据保存完成 - 基金: {fund_code_base}, 记录数: {total_inserted}, "
                           f"耗时: {total_time:.2f}秒, 速度: {records_per_second:.0f}条/秒, "
                           f"批次数: {batch_count}")
            else:
                logger.warning(f"数据保存失败 - 基金: {fund_code_base}, 耗时: {total_time:.2f}秒")
            
            return total_inserted > 0
            
        except Error as e:
            logger.error(f"数据库操作失败: {str(e)}")
            if connection:
                connection.rollback()
            return False
            
        finally:
            if connection is not None and hasattr(connection, 'is_connected') and connection.is_connected():
                if cursor is not None:
                    cursor.close()
                connection.close()
    
    def query_fund_nav_history(self, fund_code: Optional[str] = None, start_date: Optional[str] = None, 
                              end_date: Optional[str] = None, page: int = 1, page_size: int = 100, 
                              order_by: str = 'nav_date', order_direction: str = 'DESC') -> Dict[str, Any]:
        """
        查询基金历史净值数据
        
        Args:
            fund_code: 基金代码，为空时查询所有基金
            start_date: 开始日期，格式：YYYY-MM-DD
            end_date: 结束日期，格式：YYYY-MM-DD
            page: 页码，从1开始
            page_size: 每页记录数，最大1000
            order_by: 排序字段，可选：nav_date, unit_nav, accumulated_nav, daily_growth_rate
            order_direction: 排序方向，ASC或DESC
            
        Returns:
            包含查询结果的字典
        """
        connection = None
        cursor = None
        
        try:
            # 参数验证
            if page < 1:
                page = 1
            if page_size < 1 or page_size > 1000:
                page_size = 100
            if order_by not in ['nav_date', 'unit_nav', 'accumulated_nav', 'daily_growth_rate', 'fund_code']:
                order_by = 'nav_date'
            if order_direction.upper() not in ['ASC', 'DESC']:
                order_direction = 'DESC'
            
            connection = self._get_connection()
            if connection is None:
                return {
                    'success': False,
                    'error': '数据库连接失败',
                    'data': [],
                    'total': 0,
                    'page': page,
                    'page_size': page_size
                }
            
            cursor = connection.cursor(dictionary=True)
            
            # 设置查询优化参数
            try:
                cursor.execute("SET SESSION read_buffer_size = 8*1024*1024")
                cursor.execute("SET SESSION sort_buffer_size = 32*1024*1024")
            except Exception as e:
                logger.debug(f"设置查询优化参数失败: {str(e)}")
            
            # 构建查询条件和索引提示
            where_conditions = []
            params = []
            index_hint = ""
            
            if fund_code:
                # 确保基金代码为6位字符
                fund_code = str(fund_code).zfill(6)
                where_conditions.append("fund_code = %s")
                params.append(fund_code)
                # 使用复合索引提示
                if start_date or end_date:
                    index_hint = "USE INDEX (idx_fund_code_date)"
                else:
                    index_hint = "USE INDEX (PRIMARY)"
            elif start_date or end_date:
                # 只有日期条件时使用日期索引
                index_hint = "USE INDEX (idx_nav_date)"
            
            if start_date:
                where_conditions.append("nav_date >= %s")
                params.append(start_date)
            
            if end_date:
                where_conditions.append("nav_date <= %s")
                params.append(end_date)
            
            where_clause = ""
            if where_conditions:
                where_clause = "WHERE " + " AND ".join(where_conditions)
            
            # 优化COUNT查询 - 对于大数据量使用估算
            if fund_code:
                # 单个基金的精确计数
                count_sql = f"SELECT COUNT(*) as total FROM fund_nav_detail_new {index_hint} {where_clause}"
                cursor.execute(count_sql, params)
                total_result = cursor.fetchone()
                total = total_result['total'] if total_result else 0
            else:
                # 全表或日期范围查询使用估算（提升性能）
                if not where_conditions:
                    # 全表查询使用统计信息
                    cursor.execute("SELECT table_rows FROM information_schema.tables WHERE table_schema = DATABASE() AND table_name = 'fund_nav_detail_new'")
                    total_result = cursor.fetchone()
                    total = total_result['table_rows'] if total_result else 0
                else:
                    # 有条件查询仍使用精确计数，但限制最大计数
                    count_sql = f"SELECT COUNT(*) as total FROM (SELECT 1 FROM fund_nav_detail_new {index_hint} {where_clause} LIMIT 50000) as t"
                    cursor.execute(count_sql, params)
                    total_result = cursor.fetchone()
                    total = total_result['total'] if total_result else 0
            
            # 查询数据 - 添加索引提示和查询优化
            offset = (page - 1) * page_size
            
            # 根据排序字段选择最优索引
            if order_by == 'nav_date' and not fund_code:
                # 按日期排序且无基金代码条件时，强制使用日期索引
                index_hint = "USE INDEX (idx_nav_date)"
            elif order_by in ['nav_date', 'fund_code'] and fund_code:
                # 有基金代码且按日期或基金代码排序时，使用复合索引
                index_hint = "USE INDEX (idx_fund_code_date)"
            
            data_sql = f"""
                SELECT fund_code, nav_date, unit_nav, accumulated_nav, daily_growth_rate,
                       purchase_status, redemption_status, dividend_per_unit, 
                       created_at, updated_at
                FROM fund_nav_detail_new {index_hint}
                {where_clause}
                ORDER BY {order_by} {order_direction}
                LIMIT %s OFFSET %s
            """
            
            data_params = params + [page_size, offset]
            
            # 记录查询开始时间（用于性能监控）
            query_start = time.time()
            cursor.execute(data_sql, data_params)
            records = cursor.fetchall()
            query_time = time.time() - query_start
            
            # 记录慢查询
            if query_time > 1.0:  # 超过1秒的查询
                logger.warning(f"慢查询检测 - 耗时: {query_time:.2f}秒, SQL: {data_sql[:200]}...")
            elif query_time > 0.1:  # 超过100ms的查询记录调试信息
                logger.debug(f"查询耗时: {query_time:.3f}秒, 记录数: {len(records)}")
            
            # 格式化数据
            formatted_records = []
            for record in records:
                formatted_record = {
                    'fund_code': record['fund_code'],
                    'nav_date': record['nav_date'].strftime('%Y-%m-%d') if record['nav_date'] else None,
                    'unit_nav': float(record['unit_nav']) if record['unit_nav'] else None,
                    'accumulated_nav': float(record['accumulated_nav']) if record['accumulated_nav'] else None,
                    'daily_growth_rate': float(record['daily_growth_rate']) if record['daily_growth_rate'] else None,
                    'purchase_status': record['purchase_status'],
                    'redemption_status': record['redemption_status'],
                    'dividend_per_unit': record['dividend_per_unit'],
                    'created_at': record['created_at'].strftime('%Y-%m-%d %H:%M:%S') if record['created_at'] else None,
                    'updated_at': record['updated_at'].strftime('%Y-%m-%d %H:%M:%S') if record['updated_at'] else None
                }
                formatted_records.append(formatted_record)
            
            return {
                'success': True,
                'data': formatted_records,
                'total': total,
                'page': page,
                'page_size': page_size,
                'total_pages': (total + page_size - 1) // page_size,
                'has_next': page * page_size < total,
                'has_prev': page > 1
            }
            
        except Exception as e:
            logger.error(f"查询基金历史净值数据失败: {str(e)}")
            return {
                'success': False,
                'error': str(e),
                'data': [],
                'total': 0,
                'page': page,
                'page_size': page_size
            }
        finally:
            if cursor is not None:
                cursor.close()
            if connection is not None and connection.is_connected():
                connection.close()
    
    def query_fund_nav_by_code(self, fund_code: str, limit: int = 30) -> Dict[str, Any]:
        """
        根据基金代码查询最近的净值数据
        
        Args:
            fund_code: 基金代码
            limit: 返回记录数，默认30条
            
        Returns:
            包含查询结果的字典
        """
        return self.query_fund_nav_history(
            fund_code=fund_code,
            page=1,
            page_size=limit,
            order_by='nav_date',
            order_direction='DESC'
        )
    
    def query_fund_nav_by_date_range(self, start_date: str, end_date: str, 
                                   fund_code: Optional[str] = None) -> Dict[str, Any]:
        """
        根据日期范围查询净值数据
        
        Args:
            start_date: 开始日期，格式：YYYY-MM-DD
            end_date: 结束日期，格式：YYYY-MM-DD
            fund_code: 基金代码，可选
            
        Returns:
            包含查询结果的字典
        """
        return self.query_fund_nav_history(
            fund_code=fund_code,
            start_date=start_date,
            end_date=end_date,
            page=1,
            page_size=1000,
            order_by='nav_date',
            order_direction='ASC'
        )
    
    def get_fund_nav_statistics(self, fund_code: Optional[str] = None) -> Dict[str, Any]:
        """
        获取基金净值统计信息
        
        Args:
            fund_code: 基金代码，为空时统计所有基金
            
        Returns:
            统计信息字典
        """
        connection = None
        cursor = None
        
        try:
            connection = self._get_connection()
            if connection is None:
                return {'success': False, 'error': '数据库连接失败'}
            
            cursor = connection.cursor(dictionary=True)
            
            # 构建查询条件
            where_clause = ""
            params = []
            if fund_code:
                fund_code = str(fund_code).zfill(6)
                where_clause = "WHERE fund_code = %s"
                params.append(fund_code)
            
            # 统计查询
            stats_sql = f"""
                SELECT 
                    COUNT(*) as total_records,
                    COUNT(DISTINCT fund_code) as total_funds,
                    MIN(nav_date) as earliest_date,
                    MAX(nav_date) as latest_date,
                    AVG(unit_nav) as avg_unit_nav,
                    MIN(unit_nav) as min_unit_nav,
                    MAX(unit_nav) as max_unit_nav,
                    AVG(daily_growth_rate) as avg_growth_rate,
                    MIN(daily_growth_rate) as min_growth_rate,
                    MAX(daily_growth_rate) as max_growth_rate
                FROM fund_nav_detail_new 
                {where_clause}
            """
            
            cursor.execute(stats_sql, params)
            stats = cursor.fetchone()
            
            if stats:
                return {
                    'success': True,
                    'statistics': {
                        'total_records': stats['total_records'],
                        'total_funds': stats['total_funds'],
                        'earliest_date': stats['earliest_date'].strftime('%Y-%m-%d') if stats['earliest_date'] else None,
                        'latest_date': stats['latest_date'].strftime('%Y-%m-%d') if stats['latest_date'] else None,
                        'avg_unit_nav': float(stats['avg_unit_nav']) if stats['avg_unit_nav'] else None,
                        'min_unit_nav': float(stats['min_unit_nav']) if stats['min_unit_nav'] else None,
                        'max_unit_nav': float(stats['max_unit_nav']) if stats['max_unit_nav'] else None,
                        'avg_growth_rate': float(stats['avg_growth_rate']) if stats['avg_growth_rate'] else None,
                        'min_growth_rate': float(stats['min_growth_rate']) if stats['min_growth_rate'] else None,
                        'max_growth_rate': float(stats['max_growth_rate']) if stats['max_growth_rate'] else None
                    }
                }
            else:
                return {'success': False, 'error': '未找到数据'}
                
        except Exception as e:
            logger.error(f"获取统计信息失败: {str(e)}")
            return {'success': False, 'error': str(e)}
        finally:
            if cursor is not None:
                cursor.close()
            if connection is not None and connection.is_connected():
                connection.close()

    def scrape_fund_nav(self, fund_code: str, start_date: Optional[str] = None, end_date: Optional[str] = None) -> bool:
        """
        爬取并保存基金净值数据
        
        Args:
            fund_code: 基金代码
            start_date: 开始日期
            end_date: 结束日期
            
        Returns:
            是否成功
        """
        try:
            # 获取数据
            nav_data = self.get_fund_nav_data(fund_code, start_date, end_date)
            
            if not nav_data:
                logger.warning(f"未获取到基金 {fund_code} 的净值数据")
                return False
            
            # 保存到数据库
            success = self.save_to_database(nav_data)
            
            # 添加延时，避免请求过于频繁
            time.sleep(random.uniform(1, 3))
            
            return success
            
        except Exception as e:
            logger.error(f"爬取基金 {fund_code} 数据失败: {str(e)}")
            return False

class FundNavAPI:
    """基金净值查询API接口类"""
    
    def __init__(self, db_config: Optional[Dict[str, str]] = None):
        """
        初始化API接口
        
        Args:
            db_config: 数据库配置，为空时使用默认配置
        """
        if db_config is None:
            db_config = get_database_config()
        
        self.scraper = FundNavScraper(db_config)
        logger.info("基金净值查询API初始化完成")
    
    def get_fund_nav_history(self, fund_code: Optional[str] = None, start_date: Optional[str] = None, 
                           end_date: Optional[str] = None, page: int = 1, page_size: int = 100, 
                           order_by: str = 'nav_date', order_direction: str = 'DESC') -> Dict[str, Any]:
        """
        获取基金历史净值数据
        
        Args:
            fund_code: 基金代码，为空时查询所有基金
            start_date: 开始日期，格式：YYYY-MM-DD
            end_date: 结束日期，格式：YYYY-MM-DD
            page: 页码，从1开始
            page_size: 每页记录数，最大1000
            order_by: 排序字段，可选：nav_date, unit_nav, accumulated_nav, daily_growth_rate
            order_direction: 排序方向，ASC或DESC
            
        Returns:
            包含查询结果的字典
        """
        try:
            # 输入验证
            if start_date and not self._validate_date_format(start_date):
                return {
                    'success': False,
                    'error': '开始日期格式错误，请使用YYYY-MM-DD格式',
                    'data': [],
                    'total': 0
                }
            
            if end_date and not self._validate_date_format(end_date):
                return {
                    'success': False,
                    'error': '结束日期格式错误，请使用YYYY-MM-DD格式',
                    'data': [],
                    'total': 0
                }
            
            if fund_code and not self._validate_fund_code(fund_code):
                return {
                    'success': False,
                    'error': '基金代码格式错误，请输入6位数字',
                    'data': [],
                    'total': 0
                }
            
            # 调用查询方法
            result = self.scraper.query_fund_nav_history(
                fund_code=fund_code,
                start_date=start_date,
                end_date=end_date,
                page=page,
                page_size=page_size,
                order_by=order_by,
                order_direction=order_direction
            )
            
            logger.info(f"API查询完成: fund_code={fund_code}, 返回{len(result.get('data', []))}条记录")
            return result
            
        except Exception as e:
            logger.error(f"API查询失败: {str(e)}")
            return {
                'success': False,
                'error': f'查询失败: {str(e)}',
                'data': [],
                'total': 0
            }
    
    def get_fund_nav_by_code(self, fund_code: str, limit: int = 30) -> Dict[str, Any]:
        """
        根据基金代码获取最近的净值数据
        
        Args:
            fund_code: 基金代码
            limit: 返回记录数，默认30条
            
        Returns:
            包含查询结果的字典
        """
        if not self._validate_fund_code(fund_code):
            return {
                'success': False,
                'error': '基金代码格式错误，请输入6位数字',
                'data': [],
                'total': 0
            }
        
        try:
            result = self.scraper.query_fund_nav_by_code(fund_code, limit)
            logger.info(f"API按代码查询完成: fund_code={fund_code}, 返回{len(result.get('data', []))}条记录")
            return result
        except Exception as e:
            logger.error(f"API按代码查询失败: {str(e)}")
            return {
                'success': False,
                'error': f'查询失败: {str(e)}',
                'data': [],
                'total': 0
            }
    
    def get_fund_nav_by_date_range(self, start_date: str, end_date: str, 
                                 fund_code: Optional[str] = None) -> Dict[str, Any]:
        """
        根据日期范围获取净值数据
        
        Args:
            start_date: 开始日期，格式：YYYY-MM-DD
            end_date: 结束日期，格式：YYYY-MM-DD
            fund_code: 基金代码，可选
            
        Returns:
            包含查询结果的字典
        """
        # 验证日期格式
        if not self._validate_date_format(start_date):
            return {
                'success': False,
                'error': '开始日期格式错误，请使用YYYY-MM-DD格式',
                'data': [],
                'total': 0
            }
        
        if not self._validate_date_format(end_date):
            return {
                'success': False,
                'error': '结束日期格式错误，请使用YYYY-MM-DD格式',
                'data': [],
                'total': 0
            }
        
        # 验证基金代码（如果提供）
        if fund_code and not self._validate_fund_code(fund_code):
            return {
                'success': False,
                'error': '基金代码格式错误，请输入6位数字',
                'data': [],
                'total': 0
            }
        
        try:
            result = self.scraper.query_fund_nav_by_date_range(start_date, end_date, fund_code)
            logger.info(f"API按日期范围查询完成: {start_date}到{end_date}, fund_code={fund_code}, 返回{len(result.get('data', []))}条记录")
            return result
        except Exception as e:
            logger.error(f"API按日期范围查询失败: {str(e)}")
            return {
                'success': False,
                'error': f'查询失败: {str(e)}',
                'data': [],
                'total': 0
            }
    
    def get_fund_nav_statistics(self, fund_code: Optional[str] = None) -> Dict[str, Any]:
        """
        获取基金净值统计信息
        
        Args:
            fund_code: 基金代码，为空时统计所有基金
            
        Returns:
            统计信息字典
        """
        # 验证基金代码（如果提供）
        if fund_code and not self._validate_fund_code(fund_code):
            return {
                'success': False,
                'error': '基金代码格式错误，请输入6位数字'
            }
        
        try:
            result = self.scraper.get_fund_nav_statistics(fund_code)
            logger.info(f"API统计查询完成: fund_code={fund_code}")
            return result
        except Exception as e:
            logger.error(f"API统计查询失败: {str(e)}")
            return {
                'success': False,
                'error': f'查询失败: {str(e)}'
            }
    
    def _validate_date_format(self, date_str: str) -> bool:
        """
        验证日期格式
        
        Args:
            date_str: 日期字符串
            
        Returns:
            是否为有效格式
        """
        try:
            datetime.strptime(date_str, '%Y-%m-%d')
            return True
        except ValueError:
            return False
    
    def _validate_fund_code(self, fund_code: str) -> bool:
        """
        验证基金代码格式
        
        Args:
            fund_code: 基金代码
            
        Returns:
            是否为有效格式
        """
        if not fund_code:
            return False
        
        # 移除可能的前导零，然后检查是否为数字
        fund_code = str(fund_code).strip()
        if not fund_code.isdigit():
            return False
        
        # 基金代码应该是6位数字
        if len(fund_code) > 6:
            return False
        
        return True
    
    def get_api_info(self) -> Dict[str, Any]:
        """
        获取API信息
        
        Returns:
            API信息字典
        """
        return {
            'api_name': '基金净值查询API',
            'version': '1.0.0',
            'description': '提供基金历史净值数据查询功能',
            'endpoints': {
                'get_fund_nav_history': '查询基金历史净值数据（支持分页、排序、筛选）',
                'get_fund_nav_by_code': '根据基金代码查询最近净值数据',
                'get_fund_nav_by_date_range': '根据日期范围查询净值数据',
                'get_fund_nav_statistics': '获取基金净值统计信息'
            },
            'supported_fields': [
                'fund_code', 'nav_date', 'unit_nav', 'accumulated_nav', 
                'daily_growth_rate', 'purchase_status', 'redemption_status', 
                'dividend_per_unit', 'created_at', 'updated_at'
            ]
        }


def main():
    """主函数"""
    # 从配置文件获取数据库配置
    db_config = get_database_config()
    
    # 创建爬虫实例
    scraper = FundNavScraper(db_config)
    
    # 显示下载统计信息
    logger.info("=== 基金净值下载统计 ===")
    stats = scraper.get_download_statistics()
    logger.info(f"总基金数量: {stats['total_funds']}")
    logger.info(f"已下载基金数量: {stats['downloaded_funds']}")
    logger.info(f"待下载基金数量: {stats['pending_funds']}")
    logger.info(f"下载进度: {stats['download_progress']}")
    logger.info(f"净值记录总数: {stats['total_nav_records']}")
    
    # 选择运行模式
    import sys
    if len(sys.argv) > 1:
        mode = sys.argv[1]
        
        if mode == "batch":
            # 批量下载模式
            limit = int(sys.argv[2]) if len(sys.argv) > 2 else None
            logger.info(f"开始批量下载模式，限制数量: {limit or '无限制'}")
            success = scraper.download_all_fund_nav(limit)
            logger.info(f"批量下载{'成功' if success else '失败'}")
            
        elif mode == "continue":
            # 断点续传模式
            limit = int(sys.argv[2]) if len(sys.argv) > 2 else None
            logger.info(f"开始断点续传模式，限制数量: {limit or '无限制'}")
            success = scraper.continue_download(limit)
            logger.info(f"断点续传{'成功' if success else '失败'}")
            
        elif mode == "stats":
            # 仅显示统计信息
            logger.info("统计信息已显示")
            return
            
        elif mode == "api_test":
            # API测试模式
            logger.info("开始API接口测试")
            api = FundNavAPI(db_config)
            
            # 测试API信息
            logger.info("=== API信息 ===")
            api_info = api.get_api_info()
            logger.info(f"API名称: {api_info['api_name']}")
            logger.info(f"版本: {api_info['version']}")
            logger.info(f"描述: {api_info['description']}")
            
            # 测试统计信息查询
            logger.info("\n=== 测试统计信息查询 ===")
            stats_result = api.get_fund_nav_statistics()
            if stats_result.get('success'):
                logger.info(f"统计查询成功: {stats_result}")
            else:
                logger.error(f"统计查询失败: {stats_result.get('error')}")
            
            # 测试历史净值查询（获取前5条记录）
            logger.info("\n=== 测试历史净值查询 ===")
            history_result = api.get_fund_nav_history(page_size=5)
            if history_result.get('success'):
                logger.info(f"历史查询成功，返回{len(history_result.get('data', []))}条记录")
                if history_result.get('data'):
                    logger.info(f"示例数据: {history_result['data'][0]}")
            else:
                logger.error(f"历史查询失败: {history_result.get('error')}")
            
            # 如果有数据，测试按基金代码查询
            if history_result.get('success') and history_result.get('data'):
                test_fund_code = history_result['data'][0]['fund_code']
                logger.info(f"\n=== 测试按基金代码查询 (基金代码: {test_fund_code}) ===")
                code_result = api.get_fund_nav_by_code(test_fund_code, limit=3)
                if code_result.get('success'):
                    logger.info(f"按代码查询成功，返回{len(code_result.get('data', []))}条记录")
                else:
                    logger.error(f"按代码查询失败: {code_result.get('error')}")
                
                # 测试按日期范围查询
                logger.info(f"\n=== 测试按日期范围查询 (基金代码: {test_fund_code}) ===")
                from datetime import datetime, timedelta
                end_date = datetime.now().strftime('%Y-%m-%d')
                start_date = (datetime.now() - timedelta(days=30)).strftime('%Y-%m-%d')
                
                date_result = api.get_fund_nav_by_date_range(start_date, end_date, test_fund_code)
                if date_result.get('success'):
                    logger.info(f"按日期范围查询成功，返回{len(date_result.get('data', []))}条记录")
                else:
                    logger.error(f"按日期范围查询失败: {date_result.get('error')}")
            
            logger.info("API接口测试完成")
            return
            
        else:
            logger.error(f"未知模式: {mode}")
            logger.info("可用模式: batch, continue, stats, api_test")
            return
    else:
        # 默认模式：下载少量基金进行测试
        logger.info("默认模式：下载前3个待下载基金进行测试")
        success = scraper.download_all_fund_nav(3)
        logger.info(f"测试下载{'成功' if success else '失败'}")
    
    # 显示最终统计信息
    logger.info("\n=== 最终统计信息 ===")
    final_stats = scraper.get_download_statistics()
    logger.info(f"总基金数量: {final_stats['total_funds']}")
    logger.info(f"已下载基金数量: {final_stats['downloaded_funds']}")
    logger.info(f"待下载基金数量: {final_stats['pending_funds']}")
    logger.info(f"下载进度: {final_stats['download_progress']}")
    logger.info(f"净值记录总数: {final_stats['total_nav_records']}")

if __name__ == '__main__':
    main()