pminervini commited on
Commit
652d88f
1 Parent(s): 6411ad7
Files changed (1) hide show
  1. src/backend/run_eval_suite.py +4 -1
src/backend/run_eval_suite.py CHANGED
@@ -1,4 +1,6 @@
1
  from lm_eval import tasks, evaluator, utils
 
 
2
  from src.backend.manage_requests import EvalRequest
3
 
4
  import logging
@@ -10,7 +12,8 @@ def run_evaluation(eval_request: EvalRequest, task_names, num_fewshot, batch_siz
10
  if limit:
11
  print("WARNING: --limit SHOULD ONLY BE USED FOR TESTING. REAL METRICS SHOULD NOT BE COMPUTED USING LIMIT.")
12
 
13
- # task_names = utils.pattern_match(task_names, tasks.ALL_TASKS)
 
14
 
15
  print(f"Selected Tasks: {task_names}")
16
 
 
1
  from lm_eval import tasks, evaluator, utils
2
+ from lm_eval.tasks import initialize_tasks
3
+
4
  from src.backend.manage_requests import EvalRequest
5
 
6
  import logging
 
12
  if limit:
13
  print("WARNING: --limit SHOULD ONLY BE USED FOR TESTING. REAL METRICS SHOULD NOT BE COMPUTED USING LIMIT.")
14
 
15
+ initialize_tasks('INFO')
16
+ task_names = utils.pattern_match(task_names, tasks.ALL_TASKS)
17
 
18
  print(f"Selected Tasks: {task_names}")
19