pminervini commited on
Commit
ad7bcbf
1 Parent(s): 277f064
Files changed (1) hide show
  1. src/backend/run_eval_suite.py +1 -1
src/backend/run_eval_suite.py CHANGED
@@ -17,7 +17,7 @@ def run_evaluation(eval_request: EvalRequest, task_names, num_fewshot, batch_siz
17
  results = evaluator.simple_evaluate(model="hf-causal-experimental", # "hf-causal"
18
  model_args=eval_request.get_model_args(),
19
  tasks=task_names, num_fewshot=num_fewshot,
20
- batch_size=batch_size, device=device, ise_cache=use_cache,
21
  limit=limit, write_out=True, output_base_path="logs")
22
 
23
  results["config"]["model_dtype"] = eval_request.precision
 
17
  results = evaluator.simple_evaluate(model="hf-causal-experimental", # "hf-causal"
18
  model_args=eval_request.get_model_args(),
19
  tasks=task_names, num_fewshot=num_fewshot,
20
+ batch_size=batch_size, device=device, use_cache=use_cache,
21
  limit=limit, write_out=True, output_base_path="logs")
22
 
23
  results["config"]["model_dtype"] = eval_request.precision