shank commited on
Commit
cd968e7
·
1 Parent(s): 2a482a5

Fix: precaution to prevent infinite loop

Browse files
Files changed (1) hide show
  1. inference.py +7 -2
inference.py CHANGED
@@ -179,8 +179,13 @@ def run_episode(task_id: str) -> dict:
179
  last_result = {"reward": {"grader_score": 0.0, "cumulative_reward": 0.0}, "observation": obs}
180
  action = {}
181
 
 
 
182
  while not done:
183
- # Get LLM action using the robust helper
 
 
 
184
  try:
185
  raw = get_completion(messages)
186
  if not raw:
@@ -241,7 +246,7 @@ def main():
241
  print(f"Env: {ENV_BASE_URL}")
242
 
243
  if not has_token and "openai.com" in API_BASE_URL:
244
- print("WARNING: HF_TOKEN is missing but using default OpenAI endpoint. This may fail.")
245
 
246
  print("=" * 55)
247
 
 
179
  last_result = {"reward": {"grader_score": 0.0, "cumulative_reward": 0.0}, "observation": obs}
180
  action = {}
181
 
182
+ max_steps = obs.get("max_steps",25)
183
+ step_count = 0
184
  while not done:
185
+ step_count+=1
186
+ if(step_count>max_steps+5):
187
+ print(f"[!] Safety limit reached, breaking loop")
188
+ break
189
  try:
190
  raw = get_completion(messages)
191
  if not raw:
 
246
  print(f"Env: {ENV_BASE_URL}")
247
 
248
  if not has_token and "openai.com" in API_BASE_URL:
249
+ print("WARNING: HF_TOKEN is missing. API calls will likely fail.")
250
 
251
  print("=" * 55)
252