from fastapi import APIRouter, HTTPException, BackgroundTasks
from app.schemas.dataset import TrainingConfig, TrainingResponse
from app.core.config import settings
from app.models.ml.bert_model import BertSpamClassifier
from transformers import BertTokenizer
import torch
from torch.utils.data import Dataset, DataLoader
import pandas as pd
from cot_utils import generate_cot
import asyncio
from datetime import datetime

router = APIRouter()

class EmailDataset(Dataset):
    def __init__(self, df, tokenizer, max_len):
        self.texts = (df['text'] + ' [SEP] ' + df['cot']).tolist()
        self.labels = df['label'].map({'ham': 0, 'spam': 1}).tolist()
        self.tokenizer = tokenizer
        self.max_len = max_len
    
    def __len__(self):
        return len(self.texts)
    
    def __getitem__(self, idx):
        encoding = self.tokenizer(
            self.texts[idx],
            truncation=True,
            padding='max_length',
            max_length=self.max_len,
            return_tensors='pt'
        )
        return {
            'input_ids': encoding['input_ids'].squeeze(0),
            'attention_mask': encoding['attention_mask'].squeeze(0),
            'label': torch.tensor(self.labels[idx], dtype=torch.long)
        }

async def train_model_task(config: TrainingConfig):
    try:
        # 加载数据
        df = pd.read_csv(settings.DATA_DIR / f"{config.dataset_name}.csv")
        
        # 生成思维链
        if 'cot' not in df.columns:
            df['cot'] = df.apply(lambda row: generate_cot(row['text'], row['label']), axis=1)
        
        # 初始化模型和分词器
        tokenizer = BertTokenizer.from_pretrained(settings.PRETRAINED_MODEL_NAME)
        model = BertSpamClassifier(settings.PRETRAINED_MODEL_NAME)
        if torch.cuda.is_available():
            model.to(torch.device('cuda'))
        elif torch.backends.mps.is_available():
            model.to(torch.device('mps'))
        else:
            model.to(torch.device('cpu'))
        
        # 准备数据集
        dataset = EmailDataset(df, tokenizer, settings.MAX_LENGTH)
        dataloader = DataLoader(dataset, batch_size=config.batch_size, shuffle=True)
        
        # 训练
        optimizer = torch.optim.AdamW(model.parameters(), lr=config.learning_rate)
        loss_fn = torch.nn.CrossEntropyLoss()
        
        for epoch in range(config.epochs):
            model.train()
            total_loss = 0
            for batch in dataloader:
                optimizer.zero_grad()
                input_ids = batch['input_ids'].to(model.device)
                attention_mask = batch['attention_mask'].to(model.device)
                labels = batch['label'].to(model.device)
                
                outputs = model(input_ids, attention_mask)
                loss = loss_fn(outputs, labels)
                loss.backward()
                optimizer.step()
                total_loss += loss.item()
        
        # 保存模型
        timestamp = datetime.now().strftime("%Y%m%d_%H%M%S")
        model_path = settings.MODEL_DIR / f"spam_bert_cot_{timestamp}.pth"
        torch.save(model.state_dict(), model_path)
        
        return {
            "status": "success",
            "message": f"模型训练完成，已保存为 {model_path.name}",
            "metrics": {"final_loss": total_loss / len(dataloader)}
        }
    except Exception as e:
        return {
            "status": "error",
            "message": str(e)
        }

@router.post("/train", response_model=TrainingResponse)
async def train_model(
    config: TrainingConfig,
    background_tasks: BackgroundTasks
):
    """开始模型训练"""
    try:
        # 验证数据集是否存在
        dataset_path = settings.DATA_DIR / f"{config.dataset_name}.csv"
        if not dataset_path.exists():
            raise HTTPException(
                status_code=404,
                detail=f"数据集 {config.dataset_name} 不存在"
            )
        
        # 在后台开始训练
        background_tasks.add_task(train_model_task, config)
        
        return TrainingResponse(
            status="started",
            message="模型训练已在后台开始"
        )
    except Exception as e:
        raise HTTPException(status_code=500, detail=str(e)) 