chansung commited on
Commit
c4a2855
1 Parent(s): d82a572

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +5 -4
app.py CHANGED
@@ -14,6 +14,7 @@ from constants import DEFAULT_GLOBAL_CTX
14
 
15
  from pingpong import PingPong
16
  from pingpong.context import CtxLastWindowStrategy
 
17
 
18
  TOKEN = os.getenv('HF_TOKEN')
19
  MODEL_ID = 'meta-llama/Llama-2-70b-chat-hf'
@@ -109,9 +110,8 @@ def reset_chat(idx, ld, state):
109
 
110
  def internet_search(ppmanager, serper_api_key, global_context, ctx_num_lconv, device="cpu"):
111
  internet_search_ppm = copy.deepcopy(ppm)
112
- internet_search_prompt = f"My question is '{user_msg}'. Based on the conversation history, "
113
- f"give me an appropriate query to answer my question for google search. "
114
- f"You should not say more than query. You should not say any words except the query."
115
 
116
  internet_search_ppm.pingpongs[-1].ping = internet_search_prompt
117
  internet_search_prompt = build_prompts(internet_search_ppm, "", win_size=ctx_num_lconv)
@@ -161,7 +161,8 @@ async def chat_stream(
161
  yield "", uis, prompt, str(res)
162
 
163
  async for result in gen_text(
164
- prompt, hf_model=MODEL_ID, hf_token=TOKEN,
 
165
  parameters={
166
  'max_new_tokens': res_mnts,
167
  'do_sample': res_sample,
 
14
 
15
  from pingpong import PingPong
16
  from pingpong.context import CtxLastWindowStrategy
17
+ from pingpong.context import InternetSearchStrategy, SimilaritySearcher
18
 
19
  TOKEN = os.getenv('HF_TOKEN')
20
  MODEL_ID = 'meta-llama/Llama-2-70b-chat-hf'
 
110
 
111
  def internet_search(ppmanager, serper_api_key, global_context, ctx_num_lconv, device="cpu"):
112
  internet_search_ppm = copy.deepcopy(ppm)
113
+ user_msg = internet_search_ppm.pingpongs[-1].ping
114
+ internet_search_prompt = f"My question is '{user_msg}'. Based on the conversation history, give me an appropriate query to answer my question for google search. You should not say more than query. You should not say any words except the query."
 
115
 
116
  internet_search_ppm.pingpongs[-1].ping = internet_search_prompt
117
  internet_search_prompt = build_prompts(internet_search_ppm, "", win_size=ctx_num_lconv)
 
161
  yield "", uis, prompt, str(res)
162
 
163
  async for result in gen_text(
164
+ search_prompt if internet_option else prompt,
165
+ hf_model=MODEL_ID, hf_token=TOKEN,
166
  parameters={
167
  'max_new_tokens': res_mnts,
168
  'do_sample': res_sample,