Spaces:
Sleeping
Sleeping
from langchain.schema import AIMessage, HumanMessage | |
import gradio as gr | |
from langchain_community.llms import Ollama | |
llm = Ollama(model="mistral:7b", timeout=1000) | |
def predict(message, history): | |
history_langchain_format = [] | |
for human, ai in history: | |
history_langchain_format.append(HumanMessage(content=human)) | |
history_langchain_format.append(AIMessage(content=ai)) | |
history_langchain_format.append(HumanMessage(content=message)) | |
gpt_response = llm.invoke(history_langchain_format) | |
return gpt_response | |
gr.ChatInterface(predict).launch() | |