import asyncio
import logging
import sys
import os
import warnings
import time
from datetime import datetime, timedelta
import argparse

# 添加项目根目录到Python路径
sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))

# 处理Windows上的事件循环警告
if sys.platform.startswith('win'):
    # 使用WindowsSelectorEventLoopPolicy代替默认的ProactorEventLoop
    asyncio.set_event_loop_policy(asyncio.WindowsSelectorEventLoopPolicy())
    # 禁用事件循环关闭时的警告
    warnings.filterwarnings("ignore", message="Exception ignored in.*ProactorBasePipeTransport.__del__")

from services.exchange import ExchangeType, KLineInterval
from services.exchange.service import exchange_service, register_gate
from models.kline_5min import Kline5Min
from models.base import SessionLocal
from utils.db_utils import batch_update_or_insert

# 配置日志
logging.basicConfig(
    level=logging.INFO,
    format='%(asctime)s - %(name)s - %(levelname)s - %(message)s',
    handlers=[
        logging.StreamHandler(sys.stdout)
    ]
)
logger = logging.getLogger(__name__)

async def fetch_and_save_klines(symbol: str, days: int, throttle: float = 0.5):
    """
    获取并保存单个交易对的K线数据
    
    Args:
        symbol: 交易对符号，如 BTC-USDT
        days: 获取的历史天数
        throttle: API调用间隔（秒）
    
    Returns:
        tuple: (成功获取的K线数量, 数据库插入数量, 数据库更新数量)
    """
    try:
        logger.info(f"开始处理交易对: {symbol}, 计划获取最近 {days} 天的数据")
        
        # 计算总共需要获取的K线数量
        klines_per_day = 24 * (60 // 5)  # 5分钟K线
        total_klines_to_fetch = days * klines_per_day
        logger.info(f"目标K线数量: {total_klines_to_fetch}")
        
        all_klines = []
        fetched_timestamps = set()
        current_end_time = datetime.now()
        
        # 循环获取数据，直到满足天数要求
        while len(all_klines) < total_klines_to_fetch:
            logger.info(f"[{symbol}] 正在获取新一批K线数据，当前结束时间: {current_end_time}, "
                        f"已获取 {len(all_klines)} / {total_klines_to_fetch} 条")
            
            klines = await exchange_service.get_klines(
                exchange_type=ExchangeType.GATE,
                symbol=symbol,
                interval=KLineInterval.MINUTE_5,
                end_time=current_end_time,
                limit=1000  # 每次最多获取1000条
            )
            
            if not klines:
                logger.warning(f"[{symbol}] API未返回新的K线数据，停止获取。")
                break
            
            new_klines_count = 0
            for kline in klines:
                # 使用UTC时间戳进行去重
                timestamp_utc = kline['datetime_utc'].timestamp()
                if timestamp_utc not in fetched_timestamps:
                    all_klines.append(kline)
                    fetched_timestamps.add(timestamp_utc)
                    new_klines_count += 1
            
            if new_klines_count == 0:
                logger.warning(f"[{symbol}] API返回的均为重复数据，停止获取。")
                break
            
            # 更新下一次请求的结束时间为当前批次的最早时间
            oldest_kline = min(klines, key=lambda x: x['datetime_utc'])
            current_end_time = oldest_kline['datetime_utc']
            
            # 增加延时，避免API速率限制
            await asyncio.sleep(throttle)

        logger.info(f"[{symbol}] 总共获取到 {len(all_klines)} 条不重复的K线数据")
        
        # 保存到数据库
        if not all_klines:
            logger.info(f"[{symbol}] 没有新的K线数据需要保存。")
            return 0, 0, 0
            
        db = SessionLocal()
        try:
            # 将K线数据转换为字典列表，适合batch_update_or_insert函数使用
            klines_data = [
                {
                    'symbol': kline['symbol'],
                    'datetime': kline['datetime'],
                    'datetime_utc': kline['datetime_utc'],
                    'open': kline['open'],
                    'high': kline['high'],
                    'low': kline['low'],
                    'close': kline['close'],
                    'volume': kline['volume'],
                    'amount': kline['amount']
                } for kline in all_klines
            ]
            
            # 使用batch_update_or_insert函数处理数据
            inserted, updated = batch_update_or_insert(
                db=db,
                model=Kline5Min,
                data_list=klines_data,
                unique_fields=['symbol', 'datetime'],
                batch_size=500  # 每批处理500条记录
            )
            
            logger.info(f"[{symbol}] 数据库操作完成: 新增 {inserted} 条记录，更新 {updated} 条记录")
            return len(all_klines), inserted, updated
            
        except Exception as e:
            logger.error(f"[{symbol}] 保存数据失败: {e}", exc_info=True)
            raise
        finally:
            db.close()
        
    except Exception as e:
        logger.error(f"[{symbol}] 获取K线数据失败: {e}", exc_info=True)
        return 0, 0, 0

async def fetch_multiple_symbols(symbols, days, concurrency=2):
    """
    并行获取多个交易对的K线数据
    
    Args:
        symbols: 交易对列表
        days: 获取的历史天数
        concurrency: 并发数量
    """
    # 使用信号量控制并发数量
    semaphore = asyncio.Semaphore(concurrency)
    
    async def fetch_with_limit(symbol):
        async with semaphore:
            return symbol, await fetch_and_save_klines(symbol, days)
    
    # 注册Gate.io交易所
    register_gate(api_key="", api_secret="")
    
    try:
        # 创建任务并等待完成
        tasks = [fetch_with_limit(symbol) for symbol in symbols]
        results = await asyncio.gather(*tasks, return_exceptions=True)
        
        # 处理结果
        success_count = 0
        total_klines = 0
        total_inserted = 0
        total_updated = 0
        failed_symbols = []
        
        for result in results:
            if isinstance(result, Exception):
                logger.error(f"任务执行失败: {result}")
                failed_symbols.append("未知")
                continue
                
            symbol, (klines_count, inserted, updated) = result
            if klines_count > 0:
                success_count += 1
                total_klines += klines_count
                total_inserted += inserted
                total_updated += updated
            else:
                failed_symbols.append(symbol)
        
        # 打印汇总信息
        logger.info(f"数据获取任务完成，总计:")
        logger.info(f"- 成功处理交易对: {success_count}/{len(symbols)}")
        logger.info(f"- 获取K线数据: {total_klines}条")
        logger.info(f"- 数据库新增: {total_inserted}条")
        logger.info(f"- 数据库更新: {total_updated}条")
        
        if failed_symbols:
            logger.warning(f"以下交易对处理失败: {', '.join(failed_symbols)}")
            
    finally:
        # 关闭交易所连接
        await exchange_service.close()

def parse_symbols(symbols_arg):
    """解析命令行传入的交易对参数"""
    if not symbols_arg:
        return ["ETH-USDT"]
    
    result = []
    for symbol in symbols_arg:
        # 处理逗号分隔的列表
        if ',' in symbol:
            result.extend([s.strip() for s in symbol.split(',') if s.strip()])
        else:
            result.append(symbol.strip())
    
    return result

if __name__ == "__main__":
    parser = argparse.ArgumentParser(description="从Gate.io获取并保存多个交易对的合约K线数据")
    parser.add_argument("--symbols", type=str, nargs='+', 
                        help="要获取的交易对，可以是多个参数或逗号分隔的列表，例如: BTC-USDT ETH-USDT 或 'BTC-USDT,ETH-USDT'")
    parser.add_argument("--days", type=int, default=30, help="要获取的最近天数")
    parser.add_argument("--concurrency", type=int, default=2, help="并发获取的交易对数量")
    args = parser.parse_args()
    
    symbols = parse_symbols(args.symbols)
    print(f"准备获取以下交易对的K线数据: {', '.join(symbols)}")
    
    start_time = time.time()
    asyncio.run(fetch_multiple_symbols(symbols=symbols, days=args.days, concurrency=args.concurrency))
    elapsed = time.time() - start_time
    logger.info(f"任务完成，总耗时: {elapsed:.2f}秒") 