from langchain import PromptTemplate
from langchain.chat_models import ChatOpenAI
from langchain.text_splitter import LatexTextSplitter
from os import environ as env

from langchain.schema import (
    AIMessage,
    HumanMessage,
    SystemMessage
)
from langchain.prompts.chat import (
    ChatPromptTemplate,
    HumanMessagePromptTemplate,
    SystemMessagePromptTemplate,
)


class AIProxy:
    def __init__(self) -> None:
        self.chat = ChatOpenAI(openai_api_key=env.get("OPENAI_API_KEY"))

    def build_message(self, template, the_input_block):
        """
        build message from template with single parameter: input_block
        """
        human_message_prompt = HumanMessagePromptTemplate.from_template(template)
        chat_prompt = ChatPromptTemplate.from_messages([human_message_prompt])
        
        message = (chat_prompt.format_prompt(input_block=the_input_block).to_messages())
        return message

    def chatWithAI(self,message):
        return self.chat(message)
