Update functions.py
Browse files- functions.py +5 -4
functions.py
CHANGED
@@ -140,7 +140,7 @@ def process_corpus(corpus, _tok, title, _embeddings, chunk_size=200, overlap=50)
|
|
140 |
texts,
|
141 |
_embeddings,
|
142 |
index_name = "earnings-embeddings",
|
143 |
-
namespace = f'{title
|
144 |
metadatas = [
|
145 |
{'source':i} for i in range(len(texts))]
|
146 |
)
|
@@ -169,15 +169,14 @@ def embed_text(query,corpus,title,embedding_model,_emb_tok,chain_type='stuff'):
|
|
169 |
|
170 |
'''Embed text and generate semantic search scores'''
|
171 |
|
|
|
|
|
172 |
embeddings = gen_embeddings(embedding_model)
|
173 |
|
174 |
-
title = title[0]
|
175 |
docsearch = process_corpus(corpus,_emb_tok,title, embeddings)
|
176 |
|
177 |
docs = docsearch.similarity_search_with_score(query, k=3, namespace = f'{title}-earnings')
|
178 |
|
179 |
-
docs = [d[0] for d in docs]
|
180 |
-
|
181 |
if chain_type == 'Normal':
|
182 |
|
183 |
PROMPT = PromptTemplate(template=template,
|
@@ -195,6 +194,8 @@ def embed_text(query,corpus,title,embedding_model,_emb_tok,chain_type='stuff'):
|
|
195 |
|
196 |
elif chain_type == 'Refined':
|
197 |
|
|
|
|
|
198 |
initial_qa_prompt = PromptTemplate(
|
199 |
input_variables=["context_str", "question"], template=initial_qa_template
|
200 |
)
|
|
|
140 |
texts,
|
141 |
_embeddings,
|
142 |
index_name = "earnings-embeddings",
|
143 |
+
namespace = f'{title}-earnings',
|
144 |
metadatas = [
|
145 |
{'source':i} for i in range(len(texts))]
|
146 |
)
|
|
|
169 |
|
170 |
'''Embed text and generate semantic search scores'''
|
171 |
|
172 |
+
title = title.split()[0].lower()
|
173 |
+
|
174 |
embeddings = gen_embeddings(embedding_model)
|
175 |
|
|
|
176 |
docsearch = process_corpus(corpus,_emb_tok,title, embeddings)
|
177 |
|
178 |
docs = docsearch.similarity_search_with_score(query, k=3, namespace = f'{title}-earnings')
|
179 |
|
|
|
|
|
180 |
if chain_type == 'Normal':
|
181 |
|
182 |
PROMPT = PromptTemplate(template=template,
|
|
|
194 |
|
195 |
elif chain_type == 'Refined':
|
196 |
|
197 |
+
docs = [d[0] for d in docs]
|
198 |
+
|
199 |
initial_qa_prompt = PromptTemplate(
|
200 |
input_variables=["context_str", "question"], template=initial_qa_template
|
201 |
)
|