from langchain.prompts import PromptTemplate
import gradio as gr
from LLMs import myllm
hf = myllm()
template = """{question}"""
prompt = PromptTemplate.from_template(template)
chain = prompt | hf

def greet2(name):
    response = chain.invoke({"question": name})
    return response

def alternatingly_agree(message, history):
   return greet2(message)

gr.ChatInterface(alternatingly_agree).launch(server_name="0.0.0.0")


