#!/usr/bin/env python3
import json
from tqdm import tqdm
import datasets
from datasets import load_dataset
import uuid

dataset_root_dir = "/opt/local/datasets"
infinity_instruct_path = f"{dataset_root_dir}/BAAI/Infinity-Instruct"
infinity_instruct_gen_path = f"{infinity_instruct_path}/Gen"
infinity_instruct_7m_path = f"{infinity_instruct_path}/7M"
infinity_preference_path = f"{dataset_root_dir}/BAAI/Infinity-Preference"

infinity_instruct_9m_path = f"{dataset_root_dir}/BAAI/Infinity-Instruct-9M"


def generate_infinity_instruct_9m(num_samples, output_path, num_shards=20, seed=10042):
    ds_7m = load_dataset(infinity_instruct_7m_path, split="train")
    ds_gen = load_dataset(infinity_instruct_gen_path, split="train")
    ds_preference = load_dataset("parquet", data_dir=infinity_preference_path, split="train")

    print(f"Loaded {len(ds_7m)} examples from {infinity_instruct_7m_path}")
    print(f"Loaded {len(ds_gen)} examples from {infinity_instruct_gen_path}")
    print(f"Loaded {len(ds_preference)} examples from {infinity_preference_path}")

    ds_7m = ds_7m.map(lambda x: {
        "id": str(uuid.uuid1()).replace("-", "")
    }, )
    print(ds_7m[0])

    def map_func(example):
        new_example = {
            "id": example["id"],
            "conversations": example["conversations"],
            "meta": {
                'label': example["label"],
                "langdetect": example["langdetect"],
                "source": example["source"],
                "reward": example["reward"],
            },
        }
        return new_example

    ds_7m = ds_7m.map(map_func)
    ds_gen = ds_gen.map(map_func)

    ds_instruct = datasets.concatenate_datasets([ds_7m, ds_gen])
    ds_instruct = ds_instruct.shuffle(seed=seed)

    print(ds_instruct[0])
    print(f"Combined {len(ds_7m)} and {len(ds_gen)} to get {len(ds_instruct)}")

    if num_samples > 0:
        ds_instruct = ds_instruct[:num_samples]

    if output_path.endswith(".jsonl"):
        with open(output_path, "w", encoding='utf-8') as fd:
            for d in tqdm(ds_instruct):
                fd.write(json.dumps(d, ensure_ascii=False) + "\n")
    else:
        for index in range(num_shards):
            shard = ds_instruct.shard(index=index, num_shards=num_shards, contiguous=True)
            parquet_file_path = f"{output_path}/data-{index:05d}-of-{num_shards:05d}.parquet"
            shard.to_parquet(parquet_file_path)

    print(f"Saved {len(ds_instruct)} examples to {output_path}")

def generate_infinity_instruct_50k(num_samples, output_path, num_shards=5):
    ds = load_dataset(infinity_instruct_9m_path, split="train")
    ds_50k = ds.select(range(num_samples))
    for index in range(num_shards):
        shard = ds_50k.shard(index=index, num_shards=num_shards, contiguous=True)
        parquet_file_path = f"{output_path}/data-{index:05d}-of-{num_shards:05d}.parquet"
        shard.to_parquet(parquet_file_path)
    print(f"Saved {len(ds_50k)} examples to {output_path}")

def sampling(num_samples, output_path, num_shards=1):
    ds = load_dataset(infinity_instruct_9m_path, split="train")
    ds_250k = ds.select(range(num_samples))
    for index in range(num_shards):
        shard = ds_250k.shard(index=index, num_shards=num_shards, contiguous=True)
        parquet_file_path = f"{output_path}/data-{index:05d}-of-{num_shards:05d}.parquet"
        shard.to_parquet(parquet_file_path)
    print(f"Saved {len(ds_250k)} examples to {output_path}")


def get_args():
    import argparse
    parser = argparse.ArgumentParser()
    parser.add_argument("--generate_infinity_instruct_9m", action="store_true")
    parser.add_argument("--do_sampling", action="store_true")
    parser.add_argument("--num_samples", type=int, default=-1)
    parser.add_argument("--output_path", type=str)
    parser.add_argument("--num_shards", type=int, default=20)
    parser.add_argument("--seed", type=int, default=10042)
    return parser.parse_args()


def main():
    args = get_args()
    if args.generate_infinity_instruct_9m:
        generate_infinity_instruct_9m(
            num_samples=args.num_samples,
            output_path=args.output_path,
            num_shards=args.num_shards,
            seed=args.seed,
        )
    if args.do_sampling:
        sampling(
            num_samples=args.num_samples,
            output_path=args.output_path,
            num_shards=args.num_shards,
        )


if __name__ == "__main__":
    main()

