from sklearn.metrics import accuracy_score
from os.path import join, abspath
import os
import json

work_dir = abspath(join(__file__, '..'))

with open(join(work_dir, 'dev.json'), 'r', encoding='utf8') \
        as file:
    ground_truth = [
        int(json.loads(each)['label'])
        for each in file.readlines()
    ]


def evaluate(scheme_path: str, result_file_name: str):
    with open(join(work_dir, scheme_path, 'output/', result_file_name),
              encoding='utf8') as f:
        result_records = [
            int(json.loads(each)['result']['index'])
            for each in f.readlines()
        ]
    assert len(ground_truth) == len(result_records)
    print(f'{scheme_path}, {result_file_name}, accuracy_score:'
          f' {accuracy_score(ground_truth, result_records)}')


shot_mode = ['few_shot', 'one_shot', 'zero_shot']

for _, dirs, _ in os.walk(work_dir):
    for dir_path in dirs:
        for shot_mode in ['few_shot', 'one_shot', 'zero_shot']:
            if dir_path.startswith('scheme'):
                evaluate(dir_path, f'dev_{shot_mode}.json')
