bstraehle commited on
Commit
3ddc880
1 Parent(s): c8a9d42

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +8 -8
app.py CHANGED
@@ -8,12 +8,12 @@ from trace import wandb_trace
8
 
9
  _ = load_dotenv(find_dotenv())
10
 
11
- RAG_BATCH = False # document loading, splitting, storage
12
 
13
  config = {
14
- "chunk_overlap": 150, # document splitting
15
- "chunk_size": 1500, # document splitting
16
- "k": 3, # document retrieval
17
  "model_name": "gpt-4-0314", # llm
18
  "temperature": 0, # llm
19
  }
@@ -30,8 +30,8 @@ def invoke(openai_api_key, rag_option, prompt):
30
  if (prompt == ""):
31
  raise gr.Error("Prompt is required.")
32
 
33
- if (RAG_BATCH):
34
- rag_batch(config)
35
 
36
  chain = None
37
  completion = ""
@@ -43,12 +43,12 @@ def invoke(openai_api_key, rag_option, prompt):
43
  start_time_ms = round(time.time() * 1000)
44
 
45
  if (rag_option == RAG_OFF):
46
- completion, chain, cb = llm_chain(config, openai_api_key, prompt)
47
 
48
  if (completion.generations[0] != None and completion.generations[0][0] != None):
49
  result = completion.generations[0][0].text
50
  else:
51
- completion, chain, cb = rag_chain(config, openai_api_key, rag_option, prompt)
52
 
53
  result = completion["result"]
54
  except Exception as e:
 
8
 
9
  _ = load_dotenv(find_dotenv())
10
 
11
+ RUN_RAG_BATCH = False # document loading, splitting, storage
12
 
13
  config = {
14
+ "chunk_overlap": 150, # split documents
15
+ "chunk_size": 1500, # split documents
16
+ "k": 3, # retrieve documents
17
  "model_name": "gpt-4-0314", # llm
18
  "temperature": 0, # llm
19
  }
 
30
  if (prompt == ""):
31
  raise gr.Error("Prompt is required.")
32
 
33
+ if (RUN_RAG_BATCH):
34
+ run_rag_batch(config)
35
 
36
  chain = None
37
  completion = ""
 
43
  start_time_ms = round(time.time() * 1000)
44
 
45
  if (rag_option == RAG_OFF):
46
+ completion, chain, cb = run_llm_chain(config, openai_api_key, prompt)
47
 
48
  if (completion.generations[0] != None and completion.generations[0][0] != None):
49
  result = completion.generations[0][0].text
50
  else:
51
+ completion, chain, cb = run_rag_chain(config, openai_api_key, rag_option, prompt)
52
 
53
  result = completion["result"]
54
  except Exception as e: