import sys
import os


def run_shots_prompt_expr(schemes):
    if len(schemes) <= 0:
        return

    from backend.experiment.model import model_name, vocab_name
    from backend.experiment.framework.executor import Executor

    task_config = {}

    for impl in schemes:
        for mode, name in shots_prompt_modes():
            cache_filename = f'{vocab_name}_{name}_{seed}'
            if insert_start_token:
                cache_filename += '_start_token'
            if only_calculate_last:
                cache_filename += '_only_last_shot'
            result_file_name = f'result_{model_name}_{name}_{seed}'
            if test_dev:
                result_file_name += '_dev'
            if insert_start_token:
                result_file_name += '_start_token'

            cache_filename += '.json'

            if not os.path.exists(
                    os.path.join(impl.cache_dir(), cache_filename)):
                # 先计算好task并缓存，但是不在内存中驻留
                impl(
                    tokenizer=tokenizer,
                    test_dev=test_dev,
                    seed=seed
                ).get_tasks(
                    mode=mode,
                    insert_start_token=insert_start_token,
                    cache_filename=cache_filename,
                    only_calculate_last=only_calculate_last
                )

            task_config[name] = {
                'cache_filename': cache_filename,
                'result_file_name': result_file_name + '.json'
            }

    executor = Executor(
        execute_threads=execute_threads,
        task_handler_builder=lambda: impl.get_task_handler()
    )

    for impl in schemes:
        for mode, name in shots_prompt_modes():
            config = task_config[name]

            result_file_dir = os.path.join(impl.work_dir(), 'result')

            if not os.path.exists(result_file_dir):
                os.makedirs(result_file_dir)

            result_file_path = os.path.join(
                result_file_dir, config['result_file_name'])
            executor.execute(
                tasks=impl(
                    tokenizer=tokenizer,
                    test_dev=test_dev,
                    seed=seed
                ).get_tasks(
                    mode=mode,
                    insert_start_token=insert_start_token,
                    cache_filename=config['cache_filename'],
                    only_calculate_last=only_calculate_last
                ),
                result_file_path=result_file_path
            )


def run_raw_text_perplexity_expr(impl_class):
    from backend.experiment.model import vocab_name, model_name
    cache_filename = f'{vocab_name}'
    impl = impl_class(tokenizer=tokenizer)
    if insert_start_token:
        cache_filename += '_start_token'

    result_file_name = f'{model_name}'
    if insert_start_token:
        result_file_name += '_start_token'

    result_file_dir = os.path.join(impl.work_dir(), 'result')

    if not os.path.exists(result_file_dir):
        os.makedirs(result_file_dir)

    result_file_path = os.path.join(result_file_dir, result_file_name + '.json')

    Executor(
        execute_threads=execute_threads,
        task_handler_builder=lambda: impl.get_task_handler()
    ).execute(
        tasks=impl.get_tasks(
            insert_start_token=True,
            cache_filename=cache_filename + '.json'
        ),
        result_file_path=result_file_path
    )


def main():
    shots_prompt_schemes = []
    raw_text_ppl_schemes = []
    for each in execute_schemes():
        if issubclass(each, ShotsPromptImplement):
            shots_prompt_schemes.append(each)
        elif issubclass(each, PromptPerplexityImplement):
            raw_text_ppl_schemes.append(each)
        else:
            raise ValueError(f'not recognize implement class: {each}')
    run_shots_prompt_expr(shots_prompt_schemes)
    for each in raw_text_ppl_schemes:
        run_raw_text_perplexity_expr(each)


if __name__ == '__main__':
    sys.path.append(os.path.abspath(os.path.join(__file__, '../../..')))
    from backend.experiment.tokenizer import tokenizer
    from backend.experiment.framework.executor import Executor
    from backend.experiment.args import execute_schemes, execute_threads, \
        test_dev, insert_start_token, only_calculate_last, seed, \
        shots_prompt_modes
    from backend.experiment.framework.shots_prompt.implement import \
        ShotsPromptImplement
    from backend.experiment.framework.implement import \
        PromptPerplexityImplement

    main()
