import itertools, yaml, argparse

def gen(args):
    verifiers = args.verifiers.split(',')
    datasets = args.datasets.split(',')
    modes = ['bootstrap', 'incremental']
    opt_dfa = ['true', 'false']

    tasks = {}

    for e in itertools.product(verifiers, datasets, modes, opt_dfa):
        key = '-'.join(e)
        tasks[key] = {
            'verifier': e[0],
            'workload': e[1],
            'mode': e[2],
            'opt-dfa': e[3],
            'output-folder': args.output_prefix + '/' + key,
            'link-latency': args.link_latency,
            'fib-collection-latency': args.fib_collection_latency,
            'timeout': 0
        }
    with open(args.output, 'w') as f:
        data = yaml.safe_dump(tasks)
        f.write(data)

if __name__ == '__main__':
    parser = argparse.ArgumentParser(description='The task generation script')
    parser.add_argument('output', help='the output task file')
    parser.add_argument('-verifiers', default='apkeep,deltaNet,libra,netplumber,ddm', help='the set of verifiers')
    parser.add_argument('-datasets', default='i2,st', help='the set of datasets')
    parser.add_argument('-output-prefix', default='output', help='the prefix of output folder')
    parser.add_argument('-link-latency', default=0, help='the link latency, for distributed verifiers')
    parser.add_argument('-fib-collection-latency', default=0, help='the fib collection latency, for centrolized verifiers')
    args = parser.parse_args()
    gen(args)