import torch
import torch.nn as nn
from transformers import AutoModelForCausalLM, AutoTokenizer


class ModelRunner:
    """模型执行器"""

    def __init__(self, model_config, device="cuda"):
        self.model_config = model_config
        self.device = device

        # 加载模型
        self.model = AutoModelForCausalLM.from_pretrained(
            model_config.model,
            trust_remote_code=model_config.trust_remote_code,
            torch_dtype=torch.float16,
        ).to(device)

        self.model.eval()

        # 初始化KV缓存
        self.kv_cache = self._init_kv_cache()

    def _init_kv_cache(self):
        """初始化KV缓存"""
        config = self.model.config
        num_heads = config.num_attention_heads
        head_dim = config.hidden_size // num_heads
        num_layers = config.num_hidden_layers

        # 为每层创建KV缓存
        kv_cache = []
        for _ in range(num_layers):
            key_cache = torch.zeros(
                self.cache_config.num_gpu_blocks,
                self.cache_config.block_size,
                num_heads,
                head_dim,
                device=self.device,
                dtype=torch.float16
            )
            value_cache = torch.zeros(
                self.cache_config.num_gpu_blocks,
                self.cache_config.block_size,
                num_heads,
                head_dim,
                device=self.device,
                dtype=torch.float16
            )
            kv_cache.append((key_cache, value_cache))

        return kv_cache

    def execute_model(self, batch):
        """执行模型"""
        input_ids = batch['input_ids'].to(self.device)
        block_tables = batch['block_tables'].to(self.device)
        seq_lens = batch['seq_lens'].to(self.device)

        # 前向传播
        with torch.no_grad():
            outputs = self.model(
                input_ids=input_ids,
                past_key_values=self.kv_cache,
                block_tables=block_tables,
                seq_lens=seq_lens,
                use_cache=True
            )

        return outputs