Update app.py
Browse files
app.py
CHANGED
@@ -13,11 +13,11 @@ RAG_CHROMA = "Chroma"
|
|
13 |
RAG_MONGODB = "MongoDB"
|
14 |
|
15 |
config = {
|
16 |
-
"chunk_overlap": 150,
|
17 |
-
"chunk_size": 1500,
|
18 |
-
"k": 3,
|
19 |
-
"model_name": "gpt-4-0613",
|
20 |
-
"temperature": 0,
|
21 |
}
|
22 |
|
23 |
def invoke(openai_api_key, rag_option, prompt):
|
@@ -38,26 +38,20 @@ def invoke(openai_api_key, rag_option, prompt):
|
|
38 |
try:
|
39 |
start_time_ms = round(time.time() * 1000)
|
40 |
|
41 |
-
if (rag_option ==
|
42 |
-
|
43 |
-
#document_storage_chroma(splits)
|
44 |
-
|
45 |
-
completion, chain = rag_chain(openai_api_key, prompt)
|
46 |
-
result = completion["result"]
|
47 |
-
elif (rag_option == RAG_MONGODB):
|
48 |
-
#splits = document_loading_splitting()
|
49 |
-
#document_storage_mongodb(splits)
|
50 |
-
|
51 |
-
completion, chain = rag_chain(openai_api_key, prompt)
|
52 |
-
result = completion["result"]
|
53 |
-
else:
|
54 |
-
completion, chain = llm_chain(openai_api_key, prompt)
|
55 |
|
56 |
if (completion.generations[0] != None and completion.generations[0][0] != None):
|
57 |
result = completion.generations[0][0].text
|
58 |
generation_info = completion.generations[0][0].generation_info
|
59 |
|
60 |
llm_output = completion.llm_output
|
|
|
|
|
|
|
|
|
|
|
|
|
61 |
except Exception as e:
|
62 |
err_msg = e
|
63 |
|
|
|
13 |
RAG_MONGODB = "MongoDB"
|
14 |
|
15 |
config = {
|
16 |
+
"chunk_overlap": 150, # document splitting
|
17 |
+
"chunk_size": 1500, # document splitting
|
18 |
+
"k": 3, # document retrieval
|
19 |
+
"model_name": "gpt-4-0613", # llm
|
20 |
+
"temperature": 0, # llm
|
21 |
}
|
22 |
|
23 |
def invoke(openai_api_key, rag_option, prompt):
|
|
|
38 |
try:
|
39 |
start_time_ms = round(time.time() * 1000)
|
40 |
|
41 |
+
if (rag_option == RAG_OFF):
|
42 |
+
completion, chain = llm_chain(config, openai_api_key, prompt)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
43 |
|
44 |
if (completion.generations[0] != None and completion.generations[0][0] != None):
|
45 |
result = completion.generations[0][0].text
|
46 |
generation_info = completion.generations[0][0].generation_info
|
47 |
|
48 |
llm_output = completion.llm_output
|
49 |
+
else:
|
50 |
+
#splits = document_loading_splitting()
|
51 |
+
#document_storage_mongodb(splits)
|
52 |
+
|
53 |
+
completion, chain = rag_chain(config, openai_api_key, rag_option, prompt)
|
54 |
+
result = completion["result"]
|
55 |
except Exception as e:
|
56 |
err_msg = e
|
57 |
|