|
from langchain import PromptTemplate, HuggingFaceHub, LLMChain |
|
import streamlit as st |
|
import os |
|
|
|
os.environ["HUGGINGFACEHUB_API_TOKEN"] = "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" |
|
template = """Question: {question}. Answer:""" |
|
model = "ThangDinh/qthang-finetuned" |
|
|
|
promt = PromptTemplate(template=template, input_variables=["question"]) |
|
llm = HuggingFaceHub(repo_id=model, model_kwargs={"temperature":1e-10}) |
|
llm_chain = LLMChain(prompt=prompt, llm=llm) |
|
|
|
question = st.chat_input("Write your question") |
|
|
|
if question: |
|
st.write("loading...") |
|
|
|
answer = llm_chain.run(question) |
|
st.write(f"User has sent the following prompt: {answer_txt}") |
|
|
|
print("done") |
|
|
|
|