kartavya17 commited on
Commit
0a77cdf
1 Parent(s): 8f4a3a7

Upload 2 files

Browse files
Files changed (3) hide show
  1. .gitattributes +1 -0
  2. app.py +57 -0
  3. us_census.pdf +3 -0
.gitattributes CHANGED
@@ -33,3 +33,4 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
 
 
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
36
+ us_census.pdf filter=lfs diff=lfs merge=lfs -text
app.py ADDED
@@ -0,0 +1,57 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import streamlit as st
2
+ import os
3
+ from langchain_groq import ChatGroq
4
+ from langchain.embeddings import HuggingFaceEmbeddings
5
+ from langchain_community.embeddings.yandex import YandexGPTEmbeddings
6
+ from langchain.text_splitter import RecursiveCharacterTextSplitter
7
+ from langchain.chains.combine_documents import create_stuff_documents_chain
8
+ from langchain_core.prompts import ChatPromptTemplate
9
+ from langchain.chains import create_retrieval_chain
10
+ from langchain.vectorstores import FAISS
11
+ from langchain_community.document_loaders import PyPDFLoader
12
+
13
+ from dotenv import load_dotenv
14
+ load_dotenv()
15
+
16
+ #Load groq API Keys.
17
+ groq_api_key = os.getenv("groq_api_key")
18
+
19
+ st.title("ChatGroq with LLAMA3 Demo :sparkles:")
20
+
21
+ llm = ChatGroq(groq_api_key=groq_api_key ,model_name = "llama3-8b-8192")
22
+
23
+ prompt = ChatPromptTemplate.from_template(
24
+ """
25
+ Answer the questions based on provided context only.
26
+ Please provide Accurate response based on question and explain it widely.
27
+ <context>
28
+ {context}
29
+ <context>
30
+ Question : {input}
31
+ """
32
+ )
33
+
34
+
35
+ def vector_embeddings():
36
+ if "vectors" not in st.session_state:
37
+ st.session_state.embeddings = HuggingFaceEmbeddings()
38
+ st.session_state.loader = PyPDFLoader("us_census.pdf") # Data Injection
39
+ st.session_state.docs = st.session_state.loader.load() # Document Loading
40
+ st.session_state.text_splitter = RecursiveCharacterTextSplitter(chunk_size=1000,chunk_overlap=200) # Chunk Creation
41
+ st.session_state.final_documents = st.session_state.text_splitter.split_documents(st.session_state.docs[:20]) # Document splitting
42
+ st.session_state.vectors = FAISS.from_documents(st.session_state.final_documents,st.session_state.embeddings) # vector Huggingface Embedding
43
+
44
+ prompt1 = st.text_input("Enter the Question from your Mind : ")
45
+
46
+
47
+ if st.button("Document Embeddings"):
48
+ vector_embeddings()
49
+ st.write("Vector store DB is Ready.")
50
+
51
+ if prompt1:
52
+ document_chain = create_stuff_documents_chain(llm,prompt)
53
+ retriver = st.session_state.vectors.as_retriever()
54
+ retrival_chain = create_retrieval_chain(retriver,document_chain)
55
+
56
+ response = retrival_chain.invoke({'input':prompt1})
57
+ st.write(response['answer'])
us_census.pdf ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6e4343c41dc08f071d5b92522b8bb49a588a1d943886e8f1b6b0da151d0884d9
3
+ size 1154739