abhinandan18's picture
app file
0e3a02a verified
raw history blame
No virus
1.14 kB
from dotenv import load_dotenv
load_dotenv()
import streamlit as st
import os
#from langchain import HuggingFaceHub
from langchain_community.llms import HuggingFaceEndpoint
from langchain.chains import LLMChain
from langchain.prompts import PromptTemplate
print(os.environ["HUGGINGFACEHUB_API_TOKEN"])
def get_llm_response(question):
HUGGINGFACEHUB_API_TOKEN=os.environ["HUGGINGFACEHUB_API_TOKEN"]
repo_id = "mistralai/Mistral-7B-Instruct-v0.2"
template = """Question: {question}
Answer: """
prompt = PromptTemplate.from_template(template)
llm = HuggingFaceEndpoint(
repo_id=repo_id, max_length=128, temperature=0.5, token=HUGGINGFACEHUB_API_TOKEN
)
llm_chain = LLMChain(prompt=prompt, llm=llm)
answer=llm_chain.run(question)
print(answer)
return answer
##initialize streamlit
st.set_page_config(page_title="Q&A Demo")
st.header("Langchain application")
question=st.text_input("input: ",key="input")
response=get_llm_response(question)
submit=st.button("Ask the question")
#if submit is clicked
if submit:
st.subheader("The response is")
st.write(response)