File size: 666 Bytes
e6028ec 51a2753 6dafdcf 4521d28 e6028ec 6dafdcf adef15b 6dafdcf adef15b 0dc5a9a e6028ec ae41529 50c8580 2522a9c |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 |
from langchain import PromptTemplate, HuggingFaceHub, LLMChain
import streamlit as st
import os
os.environ["HUGGINGFACEHUB_API_TOKEN"] = "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"
template = """Question: {question}. Answer:"""
model = "ThangDinh/qthang-finetuned"
promt = PromptTemplate(template=template, input_variables=["question"])
llm = HuggingFaceHub(repo_id=model, model_kwargs={"temperature":1e-10})
llm_chain = LLMChain(prompt=prompt, llm=llm)
question = st.chat_input("Write your question")
if question:
st.write("loading...")
answer = llm_chain.run(question)
st.write(f"User has sent the following prompt: {answer_txt}")
print("done")
|