import os
import torch
from transformers import AutoTokenizer, LlamaForCausalLM
from read_word_dataset import WordsDataset
from utils.model_path_getter import load_yaml

class OnlineModel:
    def __init__(self, start, end):
        self.model_dir = load_yaml()['model_path']
        self.offload_path = load_yaml()['offload_path']
        self.current_dir = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
        self.tokenizer = AutoTokenizer.from_pretrained(self.model_dir)
        if self.tokenizer.pad_token is None:
            self.tokenizer.pad_token = self.tokenizer.eos_token
        self.model = LlamaForCausalLM.from_pretrained(
            self.model_dir,
            torch_dtype=torch.float16,
            device_map="auto",
            offload_folder=self.offload_path
        )
        self.second_line_input = []
        self.layer_nums = { 0 }
        self.start = start
        self.end = end
        self.current_dataset_num = start
        self.step = 1
        self.dataset = WordsDataset()

    def add_hook(self):
        def second_line_input_hook(module, input, output):
            self.second_line_input.append(input[0].detach().to(torch.float32))

        for i, layer in enumerate(self.model.model.layers):
            if i in self.layer_nums:
                layer.mlp.down_proj.register_forward_hook(second_line_input_hook)
        print("Hook added.")

    def get_one(self):
        if self.current_dataset_num >= self.end:
            self.current_dataset_num = self.start

        self.second_line_input.clear()
        prompts = self.dataset.load_texts(self.current_dataset_num, self.current_dataset_num + self.step)
        inputs = self.tokenizer(prompts, return_tensors="pt", padding=True)
        inputs = {key: value.to(self.model.device) for key, value in inputs.items()}

        with torch.no_grad():
            outputs = self.model(**inputs)

        if not self.second_line_input:
            raise ValueError("Hook did not capture any data.")

        self.current_dataset_num += self.step
        return self.second_line_input

if __name__ == '__main__':
    online_model = OnlineModel(start=0, end=2)
    online_model.add_hook()
    ts = online_model.get_one()
    print(ts[0].shape)
    print(len(ts))
    print(ts)
