Spaces:
Sleeping
Sleeping
from langchain.schema import AIMessage, HumanMessage | |
import gradio as gr | |
from langchain_community.llms import Ollama | |
llm = Ollama(model="llama3:8b", timeout=1000) | |
def predict(message, history): | |
history_langchain_format = [] | |
for human, ai in history: | |
history_langchain_format.append(HumanMessage(content=human)) | |
history_langchain_format.append(AIMessage(content=ai)) | |
history_langchain_format.append(HumanMessage(content=message)) | |
try: | |
caht_response = llm.invoke(history_langchain_format) | |
except Exception as e: | |
caht_response = "Error: " + str(e) | |
return caht_response | |
def run(): | |
demo = gr.ChatInterface(predict) | |
demo.launch(server_name="0.0.0.0", server_port=7860) | |
if __name__ == "__main__": | |
run() | |