from llmtcg.chat.models.openai import OpenaiCompatibleChatModel

from typing import Any, Dict, Tuple
from os import environ
from dataclasses import dataclass


@dataclass
class InfiniAiChatModel(OpenaiCompatibleChatModel):

    """ Models that are deployed at Infini-AI. You need to set the environment
    variable INFINIAI_API_KEY or manually provide the api key when initialize
    the model instance. """

    api_key: str = environ.get("INFINIAI_API_KEY", "")
    model: str = "llama-3-70b-instruct"

    @property
    def api_address(self) -> str:
        return "https://cloud.infini-ai.com/maas/llama-3-70b-instruct/nvidia"

    @property
    def headers(self) -> Dict[str, str]:
        return {"Authorization": f"Bearer {self.api_key}"}

    @property
    def model_name(self) -> str:
        return self.model

    def detect_error(self, response: Any) -> Tuple[bool, str]:
        if 'code' in response:
            if response['code'] == 10003:
                return True, f"authentication error, please check API_KEY (current value is {self.api_key})"

        return False, ""
