File size: 1,266 Bytes
1964765
ce6c237
1964765
ce6c237
1964765
ce6c237
1964765
 
 
 
ce6c237
1964765
 
ce6c237
1964765
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
# Q&A Chatbot
#from langchain.llms import OpenAI
from langchain import HuggingFaceHub
from dotenv import load_dotenv

load_dotenv()  # take environment variables from .env.

import streamlit as st
import os

huggingface_token = os.getenv("HUGGINGFACEHUB_API_TOKEN")

## Function to load OpenAI model and get respones
os.environ["HUGGINGFACEHUB_API_TOKEN"]=huggingface_token
llm_huggingface=HuggingFaceHub(repo_id="google/flan-t5-xxl",model_kwargs={"temperature":0.6,"max_length":64})


def get_openai_response(question):
    #llm=OpenAI(model_name="text-davinci-003",temperature=0.5)
    return llm_huggingface(question)

##initialize our streamlit app

st.set_page_config(page_title="Q&A Demo")
st.title("Q&A Chatbot using LangChain")

st.markdown(
    """
    <style>
    body {
        background-color: #f0f2f6;
        color: #333333;
    }
    </style>
    """,
    unsafe_allow_html=True
)

st.sidebar.title("Options")
st.sidebar.info("Welcome to the Q&A Chatbot!")

st.write("Ask me anything and I'll do my best to answer!")

input=st.text_input("Question: ",key="input",value=" ")
response=get_openai_response(input)


submit=st.button("Ask the question")

## If ask button is clicked

if submit:
    st.subheader("The Answer is")
    st.write(response)