Spaces:
Running
Running
Deepak7376
commited on
Commit
•
111afc4
1
Parent(s):
a63eb02
Update app.py
Browse files
app.py
CHANGED
@@ -44,14 +44,9 @@ def data_ingestion():
|
|
44 |
#create embeddings here
|
45 |
embeddings = SentenceTransformerEmbeddings(model_name="all-MiniLM-L6-v2")
|
46 |
vectordb = FAISS.from_documents(splits, embeddings)
|
47 |
-
|
48 |
-
|
49 |
-
# #create vector store here
|
50 |
-
# db = Chroma.from_documents(texts, embeddings, persist_directory=persist_directory, client_settings=CHROMA_SETTINGS)
|
51 |
-
# db.persist()
|
52 |
-
# db=None
|
53 |
-
|
54 |
|
|
|
55 |
@st.cache_resource
|
56 |
def qa_llm():
|
57 |
pipe = pipeline(
|
@@ -68,23 +63,24 @@ def qa_llm():
|
|
68 |
llm = HuggingFacePipeline(pipeline=pipe)
|
69 |
embeddings = SentenceTransformerEmbeddings(model_name="all-MiniLM-L6-v2")
|
70 |
|
71 |
-
|
72 |
retriever = db.as_retriever()
|
73 |
-
|
74 |
-
|
75 |
-
|
76 |
-
|
77 |
-
|
|
|
78 |
)
|
79 |
-
return
|
80 |
|
81 |
def process_answer(instruction):
|
82 |
response = ''
|
83 |
instruction = instruction
|
84 |
-
|
85 |
-
|
86 |
-
|
87 |
-
return
|
88 |
|
89 |
def get_file_size(file):
|
90 |
file.seek(0, os.SEEK_END)
|
@@ -162,11 +158,5 @@ def main():
|
|
162 |
display_conversation(st.session_state)
|
163 |
|
164 |
|
165 |
-
|
166 |
-
|
167 |
-
|
168 |
-
|
169 |
-
|
170 |
if __name__ == "__main__":
|
171 |
main()
|
172 |
-
|
|
|
44 |
#create embeddings here
|
45 |
embeddings = SentenceTransformerEmbeddings(model_name="all-MiniLM-L6-v2")
|
46 |
vectordb = FAISS.from_documents(splits, embeddings)
|
47 |
+
vectordb.save_local("faiss_index")
|
|
|
|
|
|
|
|
|
|
|
|
|
48 |
|
49 |
+
|
50 |
@st.cache_resource
|
51 |
def qa_llm():
|
52 |
pipe = pipeline(
|
|
|
63 |
llm = HuggingFacePipeline(pipeline=pipe)
|
64 |
embeddings = SentenceTransformerEmbeddings(model_name="all-MiniLM-L6-v2")
|
65 |
|
66 |
+
vectordb = FAISS.load_local("faiss_index", embeddings)
|
67 |
retriever = db.as_retriever()
|
68 |
+
|
69 |
+
# Build a QA chain
|
70 |
+
qa_chain = RetrievalQA.from_chain_type(
|
71 |
+
llm=llm,
|
72 |
+
chain_type="stuff",
|
73 |
+
retriever=vectordb.as_retriever(),
|
74 |
)
|
75 |
+
return qa_chain
|
76 |
|
77 |
def process_answer(instruction):
|
78 |
response = ''
|
79 |
instruction = instruction
|
80 |
+
qa_chain = qa_llm()
|
81 |
+
|
82 |
+
generated_text = qa_chain.run(instruction)
|
83 |
+
return generated_text
|
84 |
|
85 |
def get_file_size(file):
|
86 |
file.seek(0, os.SEEK_END)
|
|
|
158 |
display_conversation(st.session_state)
|
159 |
|
160 |
|
|
|
|
|
|
|
|
|
|
|
161 |
if __name__ == "__main__":
|
162 |
main()
|
|