Test_NF / test_gradio.py
quocdat25's picture
Upload folder using huggingface_hub
601cd86 verified
from langchain_community.llms import HuggingFaceEndpoint
from langchain.prompts import PromptTemplate
from langchain.schema import AIMessage, HumanMessage
from langchain.chains import LLMChain
import gradio as gr
import os
from dotenv import load_dotenv
load_dotenv()
repo_id = "mistralai/Mistral-7B-Instruct-v0.2"
llm = HuggingFaceEndpoint(
repo_id = repo_id,
# huggingfacehub_api_token=HUGGINGFACEHUB_API_TOKEN,
)
template = """You're a good chatbot. Answer this request: {question}
Answer: Let's think step by step."""
prompt = PromptTemplate.from_template(template=template)
llm_chain = LLMChain(llm=llm, prompt=prompt)
def predict(message, history):
history_langchain_format = []
# for human, ai in history:
# history_langchain_format.append(HumanMessage(content=human))
# history_langchain_format.append(AIMessage(content=ai))
# history_langchain_format.append(HumanMessage(content=message))
# gpt_response = llm(history_langchain_format)
response = llm_chain.invoke(message)['text']
return response
gr.ChatInterface(predict).launch()