import colossalai
from transformers import LlamaForCausalLM
from colossal_monitor import ColossalMonitor, MonitorConfig

# 初始化分布式环境
colossalai.launch_from_torch(config={
    "parallel": {
        "data": 2,
        "pipeline": 2,
        "tensor": {"mode": "2.5d", "size": 4}
    },
    "zero": {"stage": 3}
})

# 加载模型
model = LlamaForCausalLM.from_pretrained("meta-llama/Llama-2-7b-hf")
colossalai.initialize(model)

# 监控配置
monitor_config = MonitorConfig(
    include_patterns=(r"model.layers\.\d+\.*",),
    phase="post",
    metrics=["norm", "max"],
    log_interval=20
)

# 初始化监控器
monitor = ColossalMonitor(model, monitor_config)

# 训练循环
monitor.start()
for step in range(1000):
    # 前向传播
    outputs = model(input_ids, labels=labels)
    loss = outputs.loss
    
    # 反向传播
    colossalai.backward(loss)
    
    # 参数更新
    if (step + 1) % gradient_accum_steps == 0:
        model.optimizer.step()
        model.optimizer.zero_grad()
        monitor.step()

monitor.stop()
