#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
道AI训练脚本
训练宇宙全息分形太极认知模型
"""

import paddle
import paddle.nn as nn
from cosmic_model.cosmic_daoai import PureCosmicDaoAI  # 修正导入
from data_processor import CosmicDataProcessor
import argparse
import logging

class DaoAITrainer:  # 修正类名
    """道AI训练器"""
    
    def __init__(self, config):
        self.config = config
        self.daoai = None  # 修正变量名
        self.optimizer = None
        self.criterion = None
        
    def setup_training(self):
        """设置训练环境"""
        logging.info("初始化道AI训练环境...")
        
        # 初始化道AI模型
        self.daoai = PureCosmicDaoAI(  # 修正变量名
            cosmic_dim=self.config.cosmic_dim,
            wisdom_dim=self.config.wisdom_dim
        )
        
        # 设置优化器
        self.optimizer = paddle.optimizer.Adam(
            parameters=self.daoai.parameters(),
            learning_rate=self.config.learning_rate
        )
        
        # 损失函数
        self.criterion = nn.MSELoss()
        
        logging.info("道AI训练环境初始化完成")
    
    def train_epoch(self, dataloader):
        """训练一个周期"""
        total_loss = 0
        self.daoai.train()  # 修正变量名
        
        for batch_idx, batch in enumerate(dataloader):
            # 前向传播
            outputs = self.daoai(batch['questions'])  # 修正变量名
            loss = self.criterion(outputs, batch['targets'])
            
            # 反向传播
            loss.backward()
            self.optimizer.step()
            self.optimizer.clear_grad()
            
            total_loss += loss.item()
            
            if batch_idx % 100 == 0:
                logging.info(f"批次 {batch_idx}, 损失: {loss.item():.4f}")
        
        return total_loss / len(dataloader)
    
    def train(self):
        """主训练循环"""
        logging.info("开始道AI训练...")
        
        # 准备数据
        data_processor = CosmicDataProcessor(self.config)
        train_loader = data_processor.get_train_dataloader()
        
        for epoch in range(self.config.epochs):
            avg_loss = self.train_epoch(train_loader)
            logging.info(f"周期 {epoch+1}/{self.config.epochs}, 平均损失: {avg_loss:.4f}")
            
            # 保存检查点
            if (epoch + 1) % self.config.save_interval == 0:
                self.save_checkpoint(epoch)
        
        logging.info("道AI训练完成")

def main():
    """主训练函数"""
    parser = argparse.ArgumentParser(description='道AI训练脚本')
    parser.add_argument('--cosmic_dim', type=int, default=1024, help='宇宙维度')
    parser.add_argument('--wisdom_dim', type=int, default=512, help='智慧维度')
    parser.add_argument('--epochs', type=int, default=100, help='训练周期')
    parser.add_argument('--learning_rate', type=float, default=1e-4, help='学习率')
    parser.add_argument('--save_interval', type=int, default=10, help='保存间隔')
    
    args = parser.parse_args()
    
    # 设置日志
    logging.basicConfig(level=logging.INFO)
    
    # 开始训练
    trainer = DaoAITrainer(args)  # 修正类名
    trainer.setup_training()
    trainer.train()

if __name__ == "__main__":
    main()