kaikaidai bergr7f commited on
Commit
183a44f
·
verified ·
1 Parent(s): 89d760f

fix/update-flow-judge-model (#9)

Browse files

- fix: new flow judge url + set default hps (2c19fc2075cbd9af183eae497dd0970474ef75e8)


Co-authored-by: Bernardo Garcia <bergr7f@users.noreply.huggingface.co>

Files changed (1) hide show
  1. gen_api_answer.py +5 -4
gen_api_answer.py CHANGED
@@ -149,11 +149,11 @@ def get_atla_response(model_name, prompt, system_prompt=None, max_tokens=500, te
149
  except Exception as e:
150
  return f"Error with Atla model {model_name}: {str(e)}"
151
 
152
- def get_flow_judge_response(model_name, prompt, max_tokens=500, temperature=0.1, top_p=0.95) -> str:
153
  """Get response from Flow Judge"""
154
  try:
155
  response = requests.post(
156
- "https://tsukuyomi.tailfa581.ts.net/v1/chat/completions",
157
  headers={
158
  "Content-Type": "application/json",
159
  "Authorization": f"Bearer {flow_judge_api_key}"
@@ -165,7 +165,8 @@ def get_flow_judge_response(model_name, prompt, max_tokens=500, temperature=0.1,
165
  ],
166
  "max_tokens": max_tokens,
167
  "temperature": temperature,
168
- "top_p": top_p
 
169
  }
170
  )
171
  response.raise_for_status()
@@ -299,7 +300,7 @@ def get_model_response(
299
  )
300
  elif organization == "Flow AI":
301
  return get_flow_judge_response(
302
- api_model, final_prompt, max_tokens, temperature
303
  )
304
  else:
305
  # All other organizations use Together API
 
149
  except Exception as e:
150
  return f"Error with Atla model {model_name}: {str(e)}"
151
 
152
+ def get_flow_judge_response(model_name, prompt, max_tokens=2048, temperature=0.1, top_p=0.95) -> str:
153
  """Get response from Flow Judge"""
154
  try:
155
  response = requests.post(
156
+ "https://arena.flow-ai.io/v1/chat/completions",
157
  headers={
158
  "Content-Type": "application/json",
159
  "Authorization": f"Bearer {flow_judge_api_key}"
 
165
  ],
166
  "max_tokens": max_tokens,
167
  "temperature": temperature,
168
+ "top_p": top_p,
169
+ "stop": None
170
  }
171
  )
172
  response.raise_for_status()
 
300
  )
301
  elif organization == "Flow AI":
302
  return get_flow_judge_response(
303
+ api_model, final_prompt, # Keep default hps
304
  )
305
  else:
306
  # All other organizations use Together API