karthick965938 commited on
Commit
3a4c713
1 Parent(s): 0675d7b

Upload 5 files

Browse files
app.py ADDED
@@ -0,0 +1,28 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import streamlit as st
2
+ from index import ask
3
+
4
+ st.subheader(':red[AI Chatbot] :robot_face:')
5
+ st.caption('This bot is here to assist you in discovering more about https://www.cardinality.ai/ :sunglasses:')
6
+
7
+ # Initialize chat history
8
+ if "messages" not in st.session_state:
9
+ st.session_state.messages = []
10
+
11
+ # Display chat messages from history on app rerun
12
+ for message in st.session_state.messages:
13
+ with st.chat_message(message["role"]):
14
+ st.markdown(message["content"])
15
+
16
+ # React to user input
17
+ if prompt := st.chat_input("What is up?"):
18
+ # Display user message in chat message container
19
+ st.chat_message("human").markdown(prompt)
20
+ # Add user message to chat history
21
+ st.session_state.messages.append({"role": "user", "content": prompt})
22
+
23
+ response = ask(prompt)
24
+ # Display assistant response in chat message container
25
+ with st.chat_message("assistant"):
26
+ st.markdown(response)
27
+ # Add assistant response to chat history
28
+ st.session_state.messages.append({"role": "assistant", "content": response})
faiss_index/faiss_index_index.faiss ADDED
Binary file (129 kB). View file
 
faiss_index/index.pkl ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b448cfb51dbccfdb2adac81c992ec471461e3cc302c583e4729465a6db3904b2
3
+ size 31034
index.py ADDED
@@ -0,0 +1,29 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from langchain.document_loaders import PyPDFLoader
2
+ from langchain.embeddings.openai import OpenAIEmbeddings
3
+ from langchain.vectorstores import ElasticVectorSearch, Pinecone, Weaviate, FAISS
4
+ from langchain.chains import RetrievalQA
5
+ from langchain.chat_models import ChatOpenAI
6
+ import os
7
+ os.environ["OPENAI_API_KEY"]
8
+
9
+ # loader = PyPDFLoader("cardinality.pdf")
10
+ # pages = loader.load_and_split()
11
+ # print(len(pages), pages)
12
+
13
+ embeddings = OpenAIEmbeddings()
14
+ # Create DB
15
+ # db = FAISS.from_documents(pages, embeddings)
16
+
17
+ # Save the DB in your local
18
+ # db.save_local("faiss_index")
19
+
20
+ # # load the DB
21
+ new_db = FAISS.load_local("faiss_index", embeddings)
22
+
23
+ # # Init LLM
24
+ llm = ChatOpenAI()
25
+ qa_chain = RetrievalQA.from_chain_type(llm, retriever=new_db.as_retriever())
26
+
27
+ def ask(user_query):
28
+ res=qa_chain({"query": user_query})
29
+ return res["result"]
requirements.txt ADDED
@@ -0,0 +1,5 @@
 
 
 
 
 
 
1
+ langchain
2
+ pypdf
3
+ openai
4
+ faiss-cpu
5
+ tiktoken