# -*- encoding: utf-8 -*-

# @File    : merge_dataset_info.py
# @Time    : 19-6-20
# @Author  : zjh

r"""
    Merge multi dataset info files into one.
"""

import argparse
from collections import Counter

from .. import common


def get_args():
    parser = argparse.ArgumentParser(description=__doc__)
    parser.add_argument("dataset_infos", nargs="+",
                        help="The dataset info files.")
    parser.add_argument("output_path",
                        help="The output path of merged dataset info file.")
    parser.add_argument("-v", "--verbose", action="store_true",
                        help="Show details of merged dataset.")
    return parser.parse_args()


FLAG = get_args()


def dataset_state(dataset):
    cnt = Counter(dataset["classes"])
    print("Example number: ", len(dataset["paths"]))
    print("Class number: ", len(cnt))
    print("Class distribution: ")
    fmt = "{:<8}{:<32}\t{:>8}\t{}"
    print(fmt.format("INDEX", "UUID", "COUNT", "NAME"))
    for i, (k, c) in enumerate(cnt.most_common()):
        print(fmt.format(i, k, c, k))


def _check_dataset(dataset):
    if set(dataset.keys()) != {"paths", "classes", "display_names"}:
        raise ValueError('Dataset info must and only has keys: "paths", "classes", "display_names".')
    if len(set([len(v) for v in dataset.values()])) != 1:
        raise ValueError('The length of "paths", "classes" and "display_names" must be same.')


def main():
    merged_ds = {
        "paths": [],
        "classes": [],
        "display_names": []
    }

    for p in FLAG.dataset_infos:
        ds = common.load_json(p)
        _check_dataset(ds)
        for k in ds:
            merged_ds[k] += ds[k]

    common.save_json(merged_ds, FLAG.output_path)

    if FLAG.verbose:
        dataset_state(merged_ds)


if __name__ == "__main__":
    main()
