import pickle
from .sim_registry import build_from_cfg
from mind3d.dataset.registry import DATASETS
from mind3d.utils.sim_sample_ops import (DataBasePreprocessor, DBFilterByDifficulty, DBFilterByMinNumPoint)
from mind3d.utils.sim_sample_ops import DataBaseSamplerV2

def build_db_preprocess(db_prep_config, logger=None):
    # logger = logging.getLogger("build_db_preprocess")
    cfg = db_prep_config
    if "filter_by_difficulty" in cfg:
        v = cfg["filter_by_difficulty"]
        return DBFilterByDifficulty(v, logger=logger)
    elif "filter_by_min_num_points" in cfg:
        v = cfg["filter_by_min_num_points"]
        return DBFilterByMinNumPoint(v, logger=logger)
    else:
        raise ValueError("unknown database prep type")

def build_dbsampler(cfg, logger=None):
    # logger = logging.getLogger("build_dbsampler")
    prepors = [build_db_preprocess(c, logger=logger) for c in cfg['db_prep_steps']]
    db_prepor = DataBasePreprocessor(prepors)
    rate = cfg['rate']
    gt_drop_percentage = cfg['gt_drop_percentage']
    gt_drop_max_keep_points = cfg['gt_drop_max_keep_points']
    groups = cfg['sample_groups']
    point_dim = cfg['point_dim']
    info_path = cfg['db_info_path']
    with open(info_path, "rb") as f:
        db_infos = pickle.load(f)
    sampler = DataBaseSamplerV2(
        db_infos, groups, db_prepor, rate, gt_drop_percentage, gt_drop_max_keep_points,
        point_dim, logger=logger
    )
    return sampler

def build_dataset(cfg, default_args=None):
    # if isinstance(cfg, (list, tuple)):
    #     dataset = ConcatDataset([build_dataset(c, default_args) for c in cfg])
    # elif cfg["type"] == "RepeatDataset":
    #     dataset = RepeatDataset(
    #         build_dataset(cfg["dataset"], default_args), cfg["times"]
    #     )
    # else:
    dataset = build_from_cfg(cfg, DATASETS, default_args)

    return dataset
