#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
分批同步A股分钟级数据
每次同步少量股票，避免网络压力，支持断点续传
"""
import sys
import os
import time
import logging
from datetime import datetime, timedelta
sys.path.append(os.path.dirname(os.path.abspath(__file__)))

from sync_stock_minute import StockMinuteSync
from database import DatabaseManager
from config import DATABASE_CONFIG

def setup_logging():
    """设置日志"""
    # 获取项目根目录
    PROJECT_ROOT = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
    LOG_DIR = os.path.join(PROJECT_ROOT, 'logs')
    os.makedirs(LOG_DIR, exist_ok=True)
    
    logging.basicConfig(
        level=logging.INFO,
        format='%(asctime)s [%(levelname)s] %(name)s: %(message)s',
        handlers=[
            logging.StreamHandler(),
            logging.FileHandler(os.path.join(LOG_DIR, 'batch_sync_minute.log'), mode='a', encoding='utf-8')
        ]
    )

def get_sync_progress():
    """获取分钟级数据同步进度"""
    db = DatabaseManager(DATABASE_CONFIG)
    
    # 获取总股票数
    total_stocks = db.execute_query('SELECT COUNT(*) as count FROM new_stock_info WHERE status = "active"')[0]['count']
    
    # 获取已同步分钟级数据的股票数
    synced_stocks = db.execute_query('SELECT COUNT(DISTINCT stock_code) as count FROM new_stock_minute_data')[0]['count']
    
    # 获取总记录数
    total_records = db.execute_query('SELECT COUNT(*) as count FROM new_stock_minute_data')[0]['count']
    
    # 获取最新时间
    latest_time = db.execute_query('SELECT MAX(trade_time) as time FROM new_stock_minute_data')[0]['time']
    
    return {
        'total_stocks': total_stocks,
        'synced_stocks': synced_stocks,
        'remaining_stocks': total_stocks - synced_stocks,
        'total_records': total_records,
        'latest_time': latest_time.strftime('%Y-%m-%d %H:%M:%S') if latest_time else None,
        'progress_pct': (synced_stocks / total_stocks * 100) if total_stocks > 0 else 0
    }

def get_unsynced_stocks(limit=10):
    """获取未同步分钟级数据的股票列表"""
    db = DatabaseManager(DATABASE_CONFIG)
    
    sql = """
    SELECT si.stock_code, si.exchange
    FROM new_stock_info si 
    LEFT JOIN (
        SELECT DISTINCT stock_code 
        FROM new_stock_minute_data
    ) sm ON si.stock_code = sm.stock_code 
    WHERE si.status = 'active' AND sm.stock_code IS NULL
    ORDER BY si.stock_code
    LIMIT %s
    """
    
    results = db.execute_query(sql, (limit,))
    stock_market_list = []
    for row in results:
        stock_code = row['stock_code']
        exchange = row['exchange']
        # 将交易所代码转换为市场代码
        if exchange == 'SZSE':
            market = 'sz'
        elif exchange == 'SHSE':
            market = 'sh'
        else:
            continue
        stock_market_list.append((stock_code, market))
    
    return stock_market_list

def get_outdated_stocks(limit=10, max_hours=2):
    """获取需要更新的股票列表（数据过旧的股票）"""
    db = DatabaseManager(DATABASE_CONFIG)
    
    cutoff_time = datetime.now() - timedelta(hours=max_hours)
    
    sql = """
    SELECT sm.stock_code, si.exchange
    FROM new_stock_minute_data sm
    JOIN new_stock_info si ON sm.stock_code = si.stock_code
    WHERE si.status = 'active'
    GROUP BY sm.stock_code, si.exchange
    HAVING MAX(sm.trade_time) < %s
    ORDER BY MAX(sm.trade_time)
    LIMIT %s
    """
    
    results = db.execute_query(sql, (cutoff_time, limit))
    stock_market_list = []
    for row in results:
        stock_code = row['stock_code']
        exchange = row['exchange']
        # 将交易所代码转换为市场代码
        if exchange == 'SZSE':
            market = 'sz'
        elif exchange == 'SHSE':
            market = 'sh'
        else:
            continue
        stock_market_list.append((stock_code, market))
    
    return stock_market_list

def batch_sync(batch_size=5, max_hours=24, delay=5, update_outdated=True):
    """分批同步股票分钟级数据"""
    setup_logging()
    logger = logging.getLogger(__name__)
    
    sync = StockMinuteSync()
    
    logger.info("开始分批同步A股分钟级数据...")
    logger.info(f"批次大小: {batch_size}, 最大小时数: {max_hours}, 延迟: {delay}秒")
    
    total_start_time = datetime.now()
    total_synced = 0
    total_failed = 0
    
    while True:
        # 获取同步进度
        progress = get_sync_progress()
        
        logger.info(f"同步进度: {progress['synced_stocks']}/{progress['total_stocks']} "
                   f"({progress['progress_pct']:.1f}%), 剩余: {progress['remaining_stocks']} 只")
        logger.info(f"总记录数: {progress['total_records']}, 最新时间: {progress['latest_time']}")
        
        # 优先更新过时的数据
        if update_outdated:
            outdated_stocks = get_outdated_stocks(batch_size, max_hours=2)
            if outdated_stocks:
                logger.info(f"发现 {len(outdated_stocks)} 只股票数据需要更新")
                target_stocks = outdated_stocks
            else:
                # 没有过时数据，获取未同步的股票
                unsynced_stocks = get_unsynced_stocks(batch_size)
                if not unsynced_stocks:
                    logger.info("所有股票数据都是最新的")
                    break
                target_stocks = unsynced_stocks
        else:
            # 只同步未同步的股票
            target_stocks = get_unsynced_stocks(batch_size)
            if not target_stocks:
                logger.info("所有股票已同步完成！")
                break
        
        logger.info(f"开始同步批次: {[f'{code}.{market.upper()}' for code, market in target_stocks]}")
        
        batch_start_time = datetime.now()
        batch_synced = 0
        batch_failed = 0
        
        # 同步当前批次
        for stock_code, market in target_stocks:
            try:
                logger.info(f"正在同步股票: {stock_code}.{market.upper()}")
                result = sync.sync_single_stock_minute(stock_code, market, max_hours)
                
                if result['success']:
                    batch_synced += result['records_count']
                    if result['records_count'] > 0:
                        logger.info(f"✓ {stock_code}.{market.upper()}: 新增 {result['records_count']} 条记录")
                        # 只有实际获取了数据才延迟
                        if delay > 0:
                            time.sleep(delay)
                    else:
                        logger.info(f"✓ {stock_code}.{market.upper()}: 数据已是最新")
                        # 数据已是最新，跳过延迟
                else:
                    batch_failed += 1
                    logger.error(f"✗ {stock_code}.{market.upper()}: 同步失败 - {result['error']}")
                    # 失败的情况也需要延迟，避免频繁重试
                    if delay > 0:
                        time.sleep(delay)
                    
            except SystemExit as e:
                logger.error(f"程序被终止: {e}")
                logger.error("检测到连接被拒绝，停止所有同步操作")
                return
            except Exception as e:
                batch_failed += 1
                logger.error(f"✗ {stock_code}.{market.upper()}: 处理异常 - {e}")
                continue
        
        batch_duration = (datetime.now() - batch_start_time).total_seconds()
        total_synced += batch_synced
        total_failed += batch_failed
        
        logger.info(f"批次完成: 新增 {batch_synced} 条记录, 失败 {batch_failed} 只, 耗时 {batch_duration:.1f}秒")
        
        # 显示总体进度
        total_duration = (datetime.now() - total_start_time).total_seconds()
        progress = get_sync_progress()
        
        logger.info(f"总体进度: {progress['synced_stocks']}/{progress['total_stocks']} "
                   f"({progress['progress_pct']:.1f}%), 总记录: {progress['total_records']}, "
                   f"总耗时: {total_duration/60:.1f}分钟")
        
        # 批次间延迟
        if delay > 0:
            logger.info(f"等待 {delay} 秒后继续下一批次...")
            time.sleep(delay)
    
    total_duration = (datetime.now() - total_start_time).total_seconds()
    final_progress = get_sync_progress()
    
    logger.info("=" * 60)
    logger.info("分批同步完成！")
    logger.info(f"总股票数: {final_progress['total_stocks']}")
    logger.info(f"已同步: {final_progress['synced_stocks']}")
    logger.info(f"总记录数: {final_progress['total_records']}")
    logger.info(f"最新时间: {final_progress['latest_time']}")
    logger.info(f"总耗时: {total_duration/60:.1f}分钟")
    logger.info("=" * 60)

def main():
    """主函数"""
    import argparse
    
    parser = argparse.ArgumentParser(description='分批同步A股分钟级数据')
    parser.add_argument('--batch-size', type=int, default=5, help='每批处理的股票数量')
    parser.add_argument('--max-hours', type=int, default=24, help='最大同步小时数')
    parser.add_argument('--delay', type=float, default=5.0, help='请求间隔（秒）')
    parser.add_argument('--progress', action='store_true', help='仅显示当前进度')
    parser.add_argument('--no-update', action='store_true', help='不更新过时数据，只同步新股票')
    
    args = parser.parse_args()
    
    if args.progress:
        # 仅显示进度
        progress = get_sync_progress()
        print("=" * 60)
        print("A股分钟级数据同步进度")
        print("=" * 60)
        print(f"总股票数: {progress['total_stocks']}")
        print(f"已同步: {progress['synced_stocks']}")
        print(f"剩余: {progress['remaining_stocks']}")
        print(f"进度: {progress['progress_pct']:.1f}%")
        print(f"总记录数: {progress['total_records']}")
        print(f"最新时间: {progress['latest_time']}")
        print("=" * 60)
    else:
        # 执行分批同步
        update_outdated = not args.no_update
        batch_sync(args.batch_size, args.max_hours, args.delay, update_outdated)

if __name__ == "__main__":
    main()
