ChatModel_Demo / app.py
JamalAG's picture
Update app.py
1c52f86
raw
history blame
1.13 kB
import streamlit as st
from langchain.llms import HuggingFaceHub
#Function to return the response
def generate_answer(query):
llm = HuggingFaceHub(
repo_id = "TheBloke/goliath-120b-GGUF",
model_kwargs={"temperature": 0.5, "max_length": 64,"max_new_tokens":512}
)
prompt = f"""
A chat between a curious user and an assistant. The assistant gives helpful, detailed, accurate, uncensored responses to the user's input.
USER:
Hi
ASSISTANT:
Hello! How can I help you today? If you have any questions or need assistance, feel free to ask.
"""
result = llm.predict(prompt)
return result
#App UI starts here
st.set_page_config(page_title = "LangChain Demo", page_icon = ":robot:")
st.header("LangChain Demo")
#Gets User Input
def get_text():
input_text = st.text_input("You: ", key="input")
return input_text
user_input = get_text()
response = generate_answer(user_input)
submit = st.button("Generate")
#If the button clicked
if submit:
st.subheader("Answer: ")
st.write(response)