import json
import threading
from pathlib import Path
import torch

class ColossalMonitor:
    def __init__(self, model, config):
        self.model = model
        self.config = config
        self.hook = ColossalGradientHook(model, config)
        self.writer_thread = None
        self.step_counter = 0

    def start(self):
        self.hook.install_hooks()

    def step(self):
        self.step_counter += 1
        if self.step_counter % self.config.log_interval != 0:
            return
        
        # 收集数据
        data = {
            "step": self.step_counter,
            "rank": torch.distributed.get_rank(),
            "params": {}
        }
        
        for param_key in self.hook.grad_store:
            phase = self.config.phase
            grad = self.hook.grad_store[param_key][phase]
            
            if grad is None:
                continue
                
            stats = GradientCalculator.compute(grad, self.config.metrics)
            data["params"][param_key] = stats
        
        # 异步写入
        self._async_write(data)

    def _async_write(self, data):
        def write_task():
            output_dir = Path(self.config.output_dir)
            output_dir.mkdir(exist_ok=True)
            
            filename = f"rank{data['rank']}_step{data['step']}.json"
            with open(output_dir / filename, 'w') as f:
                json.dump(data, f, indent=2)
        
        if self.config.async_io:
            if self.writer_thread and self.writer_thread.is_alive():
                self.writer_thread.join()
            self.writer_thread = threading.Thread(target=write_task)
            self.writer_thread.start()
        else:
            write_task()

    def stop(self):
        self.hook.remove_hooks()
