
from typing import Dict, Optional, Union, List

from llm_server.llm import Qwen2, Llama3
from llm_server.server.settings import ModelSettings


class LLM(object):
    def __init__(self, models: List[ModelSettings]) -> None:
        assert len(models) > 0, "No models provided!"

        self._default_model_settings: ModelSettings = models[0]
        self._default_model_alias: str = self._default_model_settings.model_alias 

        # Load model
        self._current_model = self.load_llm_from_model_settings(
            self._default_model_settings
        )


    def __call__(self, model: Optional[str] = None):
        if model is None:
            model = self._default_model_alias

        return self._current_model


    @staticmethod
    def load_llm_from_model_settings(settings: ModelSettings):
        kwargs = {}

        if settings.model.lower() == "qwen2":
            kwargs["model_url"] = "/opt/llm/model/qwen2-7B-bf16-hf"
            kwargs["emb_dim"] = 3584
            kwargs["vocab_size"] = 152064
            create_fn = Qwen2

        elif settings.model.lower() == "llama3":
            kwargs["model_url"] = "/opt/llm/model/llama-8B-bf16"
            kwargs["emb_dim"] = 4096
            kwargs["vocab_size"] = 128256
            create_fn = Llama3
        else:
            raise ValueError(f"Unknown model type {settings.model}")

        _model = create_fn(
            **kwargs)

        return _model


        
        