from src.module.Model import Model

from langchain.globals import set_llm_cache
from langchain_community.cache import SQLiteCache
from langchain_core.output_parsers import StrOutputParser

set_llm_cache(SQLiteCache(database_path=".langchain.db"))

class CacheMessage(Model):
    def with_invoke(self, question):
        chain = self.model | StrOutputParser()
        print(chain.invoke(question))

    def start(self):
        self.model.invoke("Tell me a joke")
        print('----->')
        self.model.invoke("Tell me a joke")
