import paddle
from paddle.vision.models import resnet50
from paddle.io import Dataset, DataLoader
from paddle.vision.transforms import Compose, Resize, ToTensor, Normalize
from PIL import Image
import numpy as np
import os
import json

class InsectDataset(Dataset):
    def __init__(self, list_file, root_dir, transform=None):
        super().__init__()
        self.transform = transform
        self.root_dir = root_dir
        self.samples = []

        with open(list_file, 'r') as f:
            for line in f.readlines():
                parts = line.strip().rsplit(' ', 1)
                if len(parts) != 2:
                    raise ValueError(f"Line format error: {line.strip()}")
                img_path, label_name = parts
                self.samples.append((img_path, label_name))

        # 自动生成 class_to_idx（按类别名排序）
        label_names = sorted(set(label for _, label in self.samples))
        self.class_to_idx = {label: idx for idx, label in enumerate(label_names)}

    def __getitem__(self, idx):
        img_path, label_name = self.samples[idx]
        img_full_path = os.path.join(self.root_dir, img_path)
        img = Image.open(img_full_path).convert('RGB')
        img = np.array(img)
        if self.transform:
            img = self.transform(img)
        label = self.class_to_idx[label_name]
        return img, label

    def __len__(self):
        return len(self.samples)

def train(config):
    device = 'gpu' if config.get('use_gpu', False) else 'cpu'
    paddle.set_device(device)

    transform = Compose([
        Resize((224, 224)),
        ToTensor(),
        Normalize(mean=[0.485, 0.456, 0.406],
                  std=[0.229, 0.224, 0.225])
    ])

    train_dataset = InsectDataset(config['train_file'], config['dataset_dir'], transform)
    train_loader = DataLoader(train_dataset, batch_size=config['train_batch_size'], shuffle=True)

    val_dataset = InsectDataset(config['test_file'], config['dataset_dir'], transform)
    val_loader = DataLoader(val_dataset, batch_size=config['test_batch_size'], shuffle=False)

    model = resnet50(pretrained=False, num_classes=len(train_dataset.class_to_idx))
    model.train()
    criterion = paddle.nn.CrossEntropyLoss()
    optimizer = paddle.optimizer.Adam(parameters=model.parameters(), learning_rate=config['learning_rate'])

    for epoch in range(config['num_epochs']):
        for batch_id, (images, labels) in enumerate(train_loader):
            logits = model(images)
            loss = criterion(logits, labels)
            loss.backward()
            optimizer.step()
            optimizer.clear_grad()

            if batch_id % 10 == 0:
                print(f"Epoch {epoch}, batch {batch_id}, loss: {loss.numpy().item():.4f}")

        # 验证
        model.eval()
        total_correct = 0
        total_samples = 0
        with paddle.no_grad():
            for images, labels in val_loader:
                logits = model(images)
                preds = logits.argmax(axis=1)
                total_correct += (preds == labels).numpy().sum()
                total_samples += labels.shape[0]
        acc = total_correct / total_samples
        print(f"Epoch {epoch} validation accuracy: {acc:.4f}")
        model.train()

    # 保存模型和类别映射
    save_dir = config['save_dir']
    paddle.save(model.state_dict(), os.path.join(save_dir, 'insect_resnet50.pdparams'))
    with open(os.path.join(save_dir, 'class_to_idx.json'), 'w') as f:
        json.dump(train_dataset.class_to_idx, f, ensure_ascii=False, indent=2)

if __name__ == '__main__':
    import yaml
    import sys
    config_path = sys.argv[2] if len(sys.argv) > 2 else 'insect_resnet50.yaml'
    with open(config_path, 'r') as f:
        config = yaml.safe_load(f)
    train(config)
