# Copyright (c) 2025 Bytedance Ltd. and/or its affiliates
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
#     http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

import os
from .utils import DaskRowIterator, read_parquet
from utils import get_logger

read_file_method_map = {
    "parquet": read_parquet
}


class RetrievalDataset:
    def __init__(self, dataset_name, file_type, dataset_dir, query_cfg, target_cfg, ground_truth_cfg, dynamic_target_cfg=None, image_cfg=None, **kwargs):
        self.dataset_name = dataset_name
        self.file_type = file_type
        self.dataset_dir = dataset_dir
        self.query_cfg = query_cfg
        self.target_cfg = target_cfg
        self.ground_truth_cfg = ground_truth_cfg
        self.dynamic_target_cfg = dynamic_target_cfg
        self.image_cfg = image_cfg

        self.logger = get_logger("RetrievalDataset")
        self.logger.info(f"Loading Dataset: {self.dataset_name}")

        assert self.file_type in read_file_method_map, f"file_type {self.file_type} not supported"
        self.read_file_method = read_file_method_map[self.file_type]

        self.query_dir = None
        self.target_dir = None
        self.ground_truth_dir = None
        self.dynamic_target_dir = None
        self.image_dir = None
        self._check_path()
        self.dataset = {}
        self._load()
        self.ground_truth = None
        self.gt_query_id_field_name = None
        self.gt_target_id_field_name = None
        self.gt_label_field_name = None
        self._load_ground_truth()
        self.logger.info(f"Load Dataset: {self.dataset_name} completed")

    def _check_path(self):
        assert "dir_name" in self.query_cfg, "query_cfg must contain dir_name"
        assert "dir_name" in self.target_cfg, "target_cfg must contain dir_name"
        assert "dir_name" in self.ground_truth_cfg, "ground_truth_cfg must contain dir_name"
        if self.dynamic_target_cfg:
            assert "dir_name" in self.dynamic_target_cfg, "dynamic_target_cfg must contain dir_name"
        if self.image_cfg:
            assert "dir_name" in self.image_cfg, "image_cfg must contain dir_name"

        self.query_dir = os.path.join(self.dataset_dir, self.query_cfg["dir_name"])
        self.target_dir = os.path.join(self.dataset_dir, self.target_cfg["dir_name"])
        self.ground_truth_dir = os.path.join(self.dataset_dir, self.ground_truth_cfg["dir_name"])
        if self.dynamic_target_cfg:
            self.dynamic_target_dir = os.path.join(self.dataset_dir, self.dynamic_target_cfg["dir_name"])
        if self.image_cfg:
            self.image_dir = os.path.join(self.dataset_dir, self.image_cfg["dir_name"])
        assert os.path.exists(self.query_dir), f"query_dir {self.query_dir} not exists"
        assert os.path.exists(self.target_dir), f"target_dir {self.target_dir} not exists"
        assert os.path.exists(self.ground_truth_dir), f"ground_truth_dir {self.ground_truth_dir} not exists"
        if self.dynamic_target_dir:
            assert os.path.exists(self.dynamic_target_dir), f"dynamic_target_dir {self.dynamic_target_dir} not exists"
        if self.image_dir:
            assert os.path.exists(self.image_dir), f"image_dir {self.image_dir} not exists"

    def _load(self):
        split = ["query", "target"]
        split_cfg_map = {
            "query": self.query_cfg,
            "target": self.target_cfg,
        }
        split_dir_map = {
            "query": self.query_dir,
            "target": self.target_dir,
        }
        if self.dynamic_target_dir:
            split.append("dynamic_target")
            split_cfg_map["dynamic_target"] = self.dynamic_target_cfg
            split_dir_map["dynamic_target"] = self.dynamic_target_dir
        for s in split:
            split_cfg = split_cfg_map[s]
            split_dir = split_dir_map[s]
            feature_columns = split_cfg.get("feature_columns", {})
            assert isinstance(feature_columns, dict) and len(
                feature_columns) > 0, "feature_columns must be a non-empty dict"
            id_column = split_cfg.get("id_column", "")
            assert isinstance(id_column, str) and len(id_column) > 0, "id_column must be a non-empty string"
            read_columns = [id_column] + list(feature_columns.values())
            df = self.read_file_method(split_dir, columns=read_columns)
            self.dataset[s] = DaskRowIterator(
                df, id_column=id_column, export_features=feature_columns, image_dir=self.image_dir)

    def _load_ground_truth(self):
        query_id_column = self.ground_truth_cfg.get("query_id_column", "")
        target_id_column = self.ground_truth_cfg.get("target_id_column", "")
        label_column = self.ground_truth_cfg.get("label_column")
        assert isinstance(query_id_column, str) and len(
            query_id_column) > 0, "query_id_column must be a non-empty string"
        assert isinstance(target_id_column, str) and len(
            target_id_column) > 0, "target_id_column must be a non-empty string"
        self.gt_query_id_field_name = query_id_column
        self.gt_target_id_field_name = target_id_column
        read_columns = [query_id_column, target_id_column]
        if label_column:
            read_columns.append(label_column)
            self.gt_label_field_name = label_column

        df = self.read_file_method(self.ground_truth_dir, columns=read_columns)
        self.ground_truth = df.compute()

    @property
    def query(self):
        return self.dataset["query"]

    @property
    def target(self):
        return self.dataset["target"]

    @property
    def dynamic_target(self):
        return self.dataset.get("dynamic_target")
