import os

from google import genai

from rag.rag_base import LLMService


class GeminiService(LLMService):
    """
    pip install google-genai
    """

    def __init__(
            self,
            model: str = "gemini-2.1-flash-preview",
            embedding_model: str = "gemini-embedding-exp-03-07",
            db_collection: str = "gemini",
    ):
        super().__init__(model=model, embedding_model=embedding_model, db_collection=db_collection)
        self.client = genai.Client(api_key=os.getenv('GOOGLE_API_KEY'))

    def embed(self, text: str, store: bool) -> list[float]:
        """
        https://ai.google.dev/gemini-api/docs/embeddings
        google的embedding模型对文本的处理由两种参数，使用task_type参数标记为存储或者查询,这是其他embedding模型没有的
        """
        result = self.client.models.embed_content(
            model=self.embedding_model,
            contents=text,
            config={"task_type": "RETRIEVAL_DOCUMENT" if store else "RETRIEVAL_QUERY"}
        )
        assert result.embeddings
        assert result.embeddings[0].values
        return result.embeddings[0].values

    def chat(self, messages):
        response = self.client.models.generate_content(
            model=self.model,
            contents=messages
        )
        return response.choices[0].message.content


if __name__ == '__main__':
    service = GeminiService()
    # service.create_db()  # 先初始化向量数据库

    question = ""

    prompt = service.prompt(question)
    print(f"prompt: \n{prompt}")
    messages = [
        {'role': 'system', 'content': prompt},
        {'role': 'user', 'content': question}
    ]
    print(service.chat(messages))
