Spaces:
Sleeping
Sleeping
import os | |
import qdrant_client | |
import streamlit as st | |
from langchain.chains import RetrievalQA | |
from langchain.llms import HuggingFaceHub | |
from langchain_community.vectorstores import FAISS | |
from langchain_openai import OpenAIEmbeddings | |
client = qdrant_client.QdrantClient( | |
os.getenv("qdrant_host"), api_key=os.getenv("qdrant_key") | |
) | |
def main(): | |
# st.set_page_config(page_title="Chat with multiple PDFs", | |
# page_icon=":books:") | |
# st.write(css, unsafe_allow_html=True) | |
st.set_page_config(page_title="Ask Qdrant", page_icon=":books:") | |
st.header("Ask your remote database 💬") | |
embeddings = OpenAIEmbeddings() | |
db = FAISS.load_local("faiss_index", embeddings) | |
llm = HuggingFaceHub( | |
repo_id="bigscience/bloom", | |
model_kwargs={"temperature": 0.2, "max_length": 512, "max_new_tokens": 100}, | |
) | |
qa = RetrievalQA.from_chain_type( | |
llm=llm, chain_type="stuff", retriever=db.as_retriever() | |
) | |
# show user input | |
user_question = st.text_input("Ask a question Mastercard's available APIs:") | |
if user_question: | |
answer = qa.invoke(user_question) | |
st.write(f"Question: {answer['query']}") | |
st.write(f"Answer: {answer['result']}") | |
col1, col2, col3 = st.columns([1, 6, 1]) | |
with col1: | |
st.write("") | |
with col2: | |
st.write("") | |
with col3: | |
st.image("mc_symbol_opt_73_3x.png") | |
if __name__ == "__main__": | |
main() | |