#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
2025年9月18日A股分时数据同步脚本 - 支持断点续传
专门用于同步指定日期的分钟级数据，支持中断后继续执行
"""
import sys
import os
import time
import json
import logging
from datetime import datetime, timedelta
from typing import List, Tuple, Dict, Any, Set

# 添加项目根目录到Python路径
sys.path.append(os.path.dirname(os.path.abspath(__file__)))

from sync_stock_minute import StockMinuteSync
from database import DatabaseManager
from config import DATABASE_CONFIG

def setup_logging():
    """设置日志"""
    # 获取项目根目录
    PROJECT_ROOT = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
    LOG_DIR = os.path.join(PROJECT_ROOT, 'logs')
    os.makedirs(LOG_DIR, exist_ok=True)
    
    logging.basicConfig(
        level=logging.INFO,
        format='%(asctime)s [%(levelname)s] %(name)s: %(message)s',
        handlers=[
            logging.StreamHandler(),
            logging.FileHandler(os.path.join(LOG_DIR, 'sync_daily_minute_resume.log'), mode='a', encoding='utf-8')
        ]
    )

def get_progress_file_path(target_date: str) -> str:
    """获取进度文件路径"""
    return f"sync_progress_{target_date}.json"

def save_progress(target_date: str, completed_stocks: Set[str], failed_stocks: Set[str], 
                 total_records: int, start_time: datetime):
    """保存同步进度"""
    progress_data = {
        'target_date': target_date,
        'completed_stocks': list(completed_stocks),
        'failed_stocks': list(failed_stocks),
        'total_records': total_records,
        'start_time': start_time.isoformat(),
        'last_update': datetime.now().isoformat()
    }
    
    progress_file = get_progress_file_path(target_date)
    with open(progress_file, 'w', encoding='utf-8') as f:
        json.dump(progress_data, f, ensure_ascii=False, indent=2)

def load_progress(target_date: str) -> Dict[str, Any]:
    """加载同步进度"""
    progress_file = get_progress_file_path(target_date)
    
    if not os.path.exists(progress_file):
        return None
    
    try:
        with open(progress_file, 'r', encoding='utf-8') as f:
            progress_data = json.load(f)
        
        # 转换时间字符串回datetime对象
        if 'start_time' in progress_data:
            progress_data['start_time'] = datetime.fromisoformat(progress_data['start_time'])
        
        return progress_data
    except Exception as e:
        print(f"加载进度文件失败: {e}")
        return None

def get_completed_stocks_from_db(target_date: str) -> Set[str]:
    """从数据库中获取已同步的股票列表"""
    db = DatabaseManager(DATABASE_CONFIG)
    
    # 解析目标日期
    try:
        target_datetime = datetime.strptime(target_date, "%Y%m%d")
        start_time = target_datetime.replace(hour=9, minute=30, second=0)
        end_time = target_datetime.replace(hour=15, minute=0, second=0)
    except ValueError:
        return set()
    
    sql = """
    SELECT DISTINCT stock_code 
    FROM new_stock_minute_data 
    WHERE trade_time >= %s AND trade_time <= %s
    ORDER BY stock_code
    """
    
    try:
        results = db.execute_query(sql, (start_time, end_time))
        return {row['stock_code'] for row in results}
    except Exception as e:
        print(f"查询数据库中的已完成股票失败: {e}")
        return set()

def get_all_stock_codes() -> List[Tuple[str, str]]:
    """获取所有股票代码和市场信息"""
    db = DatabaseManager(DATABASE_CONFIG)
    
    sql = """
    SELECT stock_code, exchange 
    FROM new_stock_info 
    WHERE status = 'active' 
    ORDER BY stock_code
    """
    results = db.execute_query(sql)
    
    stock_market_list = []
    for row in results:
        stock_code = row['stock_code']
        exchange = row['exchange']
        # 将交易所代码转换为市场代码
        if exchange == 'SZSE':
            market = 'sz'
        elif exchange == 'SHSE':
            market = 'sh'
        else:
            continue  # 跳过未知交易所
        stock_market_list.append((stock_code, market))
    
    return stock_market_list

def sync_daily_minute_data_with_resume(target_date: str = "20250918", 
                                     batch_size: int = 10, 
                                     delay: float = 3.0,
                                     resume: bool = True) -> Dict[str, Any]:
    """同步指定日期的分钟级数据（支持断点续传）"""
    setup_logging()
    logger = logging.getLogger(__name__)
    
    # 解析目标日期
    try:
        target_datetime = datetime.strptime(target_date, "%Y%m%d")
        start_time = target_datetime.replace(hour=9, minute=30, second=0)  # 开盘时间
        end_time = target_datetime.replace(hour=15, minute=0, second=0)    # 收盘时间
    except ValueError:
        logger.error(f"日期格式错误: {target_date}，请使用YYYYMMDD格式")
        return {'success': False, 'error': '日期格式错误'}
    
    sync = StockMinuteSync()
    
    # 获取所有股票代码
    all_stock_market_list = get_all_stock_codes()
    total_stocks = len(all_stock_market_list)
    
    # 处理断点续传
    completed_stocks = set()
    failed_stocks = set()
    total_records = 0
    session_start_time = datetime.now()
    
    if resume:
        # 尝试从进度文件恢复
        progress_data = load_progress(target_date)
        if progress_data:
            completed_stocks = set(progress_data.get('completed_stocks', []))
            failed_stocks = set(progress_data.get('failed_stocks', []))
            total_records = progress_data.get('total_records', 0)
            session_start_time = progress_data.get('start_time', session_start_time)
            logger.info(f"从进度文件恢复: 已完成 {len(completed_stocks)} 只，失败 {len(failed_stocks)} 只")
        
        # 从数据库获取已同步的股票（作为备用检查）
        db_completed_stocks = get_completed_stocks_from_db(target_date)
        if db_completed_stocks:
            logger.info(f"数据库中已有 {len(db_completed_stocks)} 只股票的数据")
            # 合并两个集合
            completed_stocks.update(db_completed_stocks)
    
    # 筛选出需要同步的股票
    remaining_stocks = []
    for stock_code, market in all_stock_market_list:
        if stock_code not in completed_stocks and stock_code not in failed_stocks:
            remaining_stocks.append((stock_code, market))
    
    logger.info(f"开始同步 {target_date} 的分时数据...")
    logger.info(f"目标时间范围: {start_time} - {end_time}")
    logger.info(f"总股票数: {total_stocks}")
    logger.info(f"已完成: {len(completed_stocks)} 只")
    logger.info(f"失败: {len(failed_stocks)} 只")
    logger.info(f"剩余: {len(remaining_stocks)} 只")
    logger.info(f"批次大小: {batch_size}, 延迟: {delay}秒")
    
    result = {
        'success': False,
        'total_stocks': total_stocks,
        'completed_stocks': len(completed_stocks),
        'successful_stocks': len(completed_stocks),
        'failed_stocks': len(failed_stocks),
        'remaining_stocks': len(remaining_stocks),
        'total_records': total_records,
        'failed_list': list(failed_stocks),
        'duration': 0
    }
    
    if not remaining_stocks:
        logger.info("所有股票都已同步完成！")
        result['success'] = True
        return result
    
    successful_count = len(completed_stocks)
    failed_count = len(failed_stocks)
    failed_list = list(failed_stocks)
    
    # 分批处理剩余股票
    for i in range(0, len(remaining_stocks), batch_size):
        batch_stocks = remaining_stocks[i:i + batch_size]
        batch_num = i // batch_size + 1
        total_batches = (len(remaining_stocks) + batch_size - 1) // batch_size
        
        logger.info(f"处理第 {batch_num}/{total_batches} 批，股票数量: {len(batch_stocks)}")
        
        batch_start_time = datetime.now()
        batch_successful = 0
        batch_failed = 0
        batch_records = 0
        
        # 处理当前批次的股票
        for stock_code, market in batch_stocks:
            try:
                logger.info(f"正在同步 {stock_code}.{market.upper()}")
                
                # 获取指定日期的分钟级数据
                df = sync.get_minute_data(stock_code, market, start_time, end_time)
                
                if not df.empty:
                    # 保存到数据库
                    batch_data = df.to_dict('records')
                    synced_count = sync.minute_dao.batch_insert_or_update_minute_data(batch_data)
                    
                    batch_records += synced_count
                    batch_successful += 1
                    completed_stocks.add(stock_code)
                    logger.info(f"✓ {stock_code}.{market.upper()}: 同步 {synced_count} 条记录")
                else:
                    logger.warning(f"⚠ {stock_code}.{market.upper()}: 无数据")
                    batch_successful += 1  # 无数据也算成功
                    completed_stocks.add(stock_code)
                
                # 添加延迟
                if delay > 0:
                    time.sleep(delay)
                    
            except SystemExit as e:
                logger.error(f"程序被终止: {e}")
                logger.error("检测到连接被拒绝，停止所有同步操作")
                # 保存当前进度
                save_progress(target_date, completed_stocks, failed_stocks, 
                            total_records + batch_records, session_start_time)
                return result
            except Exception as e:
                batch_failed += 1
                failed_stocks.add(stock_code)
                failed_list.append(f"{stock_code}.{market.upper()}")
                logger.error(f"✗ {stock_code}.{market.upper()}: 同步失败 - {e}")
                continue
        
        successful_count += batch_successful
        failed_count += batch_failed
        total_records += batch_records
        
        # 保存进度
        save_progress(target_date, completed_stocks, failed_stocks, total_records, session_start_time)
        
        batch_duration = (datetime.now() - batch_start_time).total_seconds()
        logger.info(f"第 {batch_num} 批完成: 成功 {batch_successful} 只, 失败 {batch_failed} 只, "
                   f"记录 {batch_records} 条, 耗时 {batch_duration:.1f}秒")
        
        # 显示总体进度
        progress_pct = ((len(completed_stocks) + len(failed_stocks)) / total_stocks * 100)
        total_duration = (datetime.now() - session_start_time).total_seconds()
        logger.info(f"总体进度: {progress_pct:.1f}%, 完成: {len(completed_stocks)}, 失败: {len(failed_stocks)}, "
                   f"总记录: {total_records}, 耗时: {total_duration/60:.1f}分钟")
        
        # 批次间延迟
        if i + batch_size < len(remaining_stocks) and delay > 0:
            logger.info(f"等待 {delay} 秒后继续下一批次...")
            time.sleep(delay)
    
    # 计算最终结果
    total_duration = (datetime.now() - session_start_time).total_seconds()
    
    result.update({
        'success': True,
        'successful_stocks': successful_count,
        'failed_stocks': failed_count,
        'total_records': total_records,
        'failed_list': failed_list,
        'duration': total_duration
    })
    
    # 清理进度文件
    progress_file = get_progress_file_path(target_date)
    if os.path.exists(progress_file):
        os.remove(progress_file)
    
    logger.info("=" * 80)
    logger.info(f"{target_date} 分时数据同步完成！")
    logger.info(f"总股票数: {total_stocks}")
    logger.info(f"成功股票数: {successful_count}")
    logger.info(f"失败股票数: {failed_count}")
    logger.info(f"总记录数: {total_records}")
    logger.info(f"总耗时: {total_duration/60:.1f}分钟")
    if failed_list:
        logger.info(f"失败股票: {', '.join(failed_list[:10])}{'...' if len(failed_list) > 10 else ''}")
    logger.info("=" * 80)
    
    return result

def show_progress_status(target_date: str):
    """显示当前进度状态"""
    progress_data = load_progress(target_date)
    if not progress_data:
        print(f"没有找到 {target_date} 的进度文件")
        return
    
    print(f"目标日期: {progress_data['target_date']}")
    print(f"已完成股票: {len(progress_data['completed_stocks'])} 只")
    print(f"失败股票: {len(progress_data['failed_stocks'])} 只")
    print(f"总记录数: {progress_data['total_records']}")
    print(f"开始时间: {progress_data['start_time']}")
    print(f"最后更新: {progress_data['last_update']}")
    
    # 显示数据库中的实际数据
    db_completed = get_completed_stocks_from_db(target_date)
    print(f"数据库中的股票数: {len(db_completed)} 只")

def main():
    """主函数"""
    import argparse
    
    parser = argparse.ArgumentParser(description='同步指定日期的A股分时数据（支持断点续传）')
    parser.add_argument('--date', default='20250918', help='目标日期（格式：YYYYMMDD，默认：20250918）')
    parser.add_argument('--batch-size', type=int, default=10, help='每批处理的股票数量（默认：10）')
    parser.add_argument('--delay', type=float, default=3.0, help='请求间隔（秒，默认：3.0）')
    parser.add_argument('--test', action='store_true', help='测试模式，只处理前10只股票')
    parser.add_argument('--no-resume', action='store_true', help='不使用断点续传，重新开始')
    parser.add_argument('--status', action='store_true', help='显示当前进度状态')
    
    args = parser.parse_args()
    
    # 验证日期格式
    try:
        datetime.strptime(args.date, "%Y%m%d")
    except ValueError:
        print(f"错误：日期格式不正确 '{args.date}'，请使用YYYYMMDD格式")
        return
    
    if args.status:
        show_progress_status(args.date)
        return
    
    print("=" * 80)
    print(f"A股分时数据同步 - {args.date} (支持断点续传)")
    print("=" * 80)
    print(f"目标日期: {args.date}")
    print(f"批次大小: {args.batch_size}")
    print(f"请求延迟: {args.delay}秒")
    print(f"断点续传: {'禁用' if args.no_resume else '启用'}")
    if args.test:
        print("测试模式: 只处理前10只股票")
    print("=" * 80)
    
    try:
        if args.test:
            # 测试模式：只处理前10只股票
            stock_market_list = get_all_stock_codes()[:10]
            print(f"测试模式：处理 {len(stock_market_list)} 只股票")
            
            # 临时修改批次大小为1，便于观察
            result = sync_daily_minute_data_with_resume(args.date, 1, args.delay, not args.no_resume)
        else:
            result = sync_daily_minute_data_with_resume(args.date, args.batch_size, args.delay, not args.no_resume)
        
        # 打印最终结果
        print("\n" + "=" * 80)
        if result['success']:
            print("✓ 同步成功！")
            print(f"总股票数: {result['total_stocks']}")
            print(f"成功股票数: {result['successful_stocks']}")
            print(f"失败股票数: {result['failed_stocks']}")
            print(f"总记录数: {result['total_records']}")
            print(f"同步耗时: {result['duration']/60:.1f}分钟")
            if result['failed_list']:
                print(f"失败股票: {', '.join(result['failed_list'][:10])}{'...' if len(result['failed_list']) > 10 else ''}")
        else:
            print("✗ 同步失败！")
            print(f"错误信息: {result.get('error', '未知错误')}")
        print("=" * 80)
        
    except KeyboardInterrupt:
        print("\n\n用户中断操作，进度已保存")
        print("使用 --status 参数查看当前进度")
    except Exception as e:
        print(f"\n程序执行失败: {e}")

if __name__ == "__main__":
    main()
