pingnie commited on
Commit
07fa1fd
1 Parent(s): 3655a9e

add debug info

Browse files
Files changed (1) hide show
  1. backend-cli.py +7 -10
backend-cli.py CHANGED
@@ -420,9 +420,9 @@ def get_args():
420
  parser = argparse.ArgumentParser(description="Run the backend")
421
  parser.add_argument("--debug", action="store_true", help="Run in debug mode")
422
  # debug parameters
423
- parser.add_argument("--task", type=str, default="selfcheckgpt", help="Task to debug")
424
- parser.add_argument("--model", type=str, default="facebook/opt-1.3b", help="Model to debug")
425
- parser.add_argument("--precision", type=str, default="float16", help="Precision to debug")
426
  parser.add_argument("--inference-framework", type=str, default="hf-chat", help="Inference framework to debug")
427
  parser.add_argument("--limit", type=int, default=None, help="Limit for the number of samples")
428
  return parser.parse_args()
@@ -435,13 +435,10 @@ if __name__ == "__main__":
435
  if local_debug:
436
  # debug_model_names = [args.model] # Use model from arguments
437
  # debug_task_name = [args.task] # Use task from arguments
438
- debug_model_names = ["mistralai/Mixtral-8x7B-Instruct-v0.1", "mistralai/Mixtral-8x7B-v0.1",
439
- "databricks/dbrx-instruct", "databricks/dbrx-base",
440
- "mistralai/Mixtral-8x22B-v0.1", "mistralai/Mixtral-8x22B-Instruct-v0.1", "alpindale/WizardLM-2-8x22B",
441
- "CohereForAI/c4ai-command-r-plus"] # Use model from arguments
442
- debug_task_name = ['mmlu', 'selfcheckgpt'] # Use task from arguments
443
- # precisions = ['4bit', '8bit']
444
- precisions = ['float32', 'float16']
445
  task_lst = TASKS_HARNESS.copy()
446
  for precision in precisions:
447
  for debug_model_name in debug_model_names:
 
420
  parser = argparse.ArgumentParser(description="Run the backend")
421
  parser.add_argument("--debug", action="store_true", help="Run in debug mode")
422
  # debug parameters
423
+ parser.add_argument("--task", type=str, default="selfcheckgpt,mmlu", help="Task to debug")
424
+ parser.add_argument("--model", type=str, default="mistralai/Mixtral-8x7B-Instruct-v0.1,mistralai/Mixtral-8x7B-v0.1", help="Model to debug")
425
+ parser.add_argument("--precision", type=str, default="float32,float16,8bit,4bit", help="Precision to debug")
426
  parser.add_argument("--inference-framework", type=str, default="hf-chat", help="Inference framework to debug")
427
  parser.add_argument("--limit", type=int, default=None, help="Limit for the number of samples")
428
  return parser.parse_args()
 
435
  if local_debug:
436
  # debug_model_names = [args.model] # Use model from arguments
437
  # debug_task_name = [args.task] # Use task from arguments
438
+ debug_model_names = args.model.split(",")
439
+ debug_task_name = args.task.split(",")
440
+ precisions = args.precision.split(",")
441
+ print(f"debug_model_names: {debug_model_names}, debug_task_name: {debug_task_name}, precisions: {precisions}")
 
 
 
442
  task_lst = TASKS_HARNESS.copy()
443
  for precision in precisions:
444
  for debug_model_name in debug_model_names: