# Copyright      2023  Xiaomi Corp.        (authors: Zengrui Jin)
#
# See ../../../../LICENSE for clarification regarding multiple authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
#     http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.


import argparse
import logging
from functools import lru_cache
from pathlib import Path
from typing import Dict

from lhotse import CutSet, load_manifest_lazy
import os

class MultiDataset:
    def __init__(self, args: argparse.Namespace):
        """
        Args:
          manifest_dir:
            It is expected to contain the following files:
            - aishell2_cuts_train.jsonl.gz
        """
        self.test_sets = args.input_test_dir # 测试集的数据集名称集合（str型，需以空格做切分转为list）
        self.valid_fbank_dir = Path(args.valid_fbank_dir) # 验证集的父目录
        self.test_parent_dir = Path(args.test_parent_dir) # 测试集的父目录
        self.valid_prefix = args.valid_prefix # 验证集前缀
        self.train_switch = args.train_switch # 验证集前缀


    def train_cuts(self) -> CutSet:

        if self.train_switch == "u2" :
            logging.info("About to get multidataset train cuts for second training")
            cuts01 = load_manifest_lazy(
                "/home/local_data/yhdai/train_data/zipformer/cuts01/41_cuts01_cuts_train.jsonl.gz"
            )
            logging.info("Loading cuts02 for second training")
            cuts02 = load_manifest_lazy(
                "/home/local_data/yhdai/train_data/zipformer/cuts02/41_cuts02_cuts_train.jsonl.gz"
            )
            # logging.info("Loading asru for second training")
            # asru = load_manifest_lazy(
            #     "/home/work_nfs14/yhdai/data_zipformer/features_data/asru200/fbank/gxldata_cuts_train.jsonl.gz"
            # )
            logging.info("Loading batch_en for second training")
            batch_en = load_manifest_lazy(
                "/home/local_data/yhdai/train_data/zipformer/batch_en/41_batch_en_cuts_train.jsonl.gz"
            )
            logging.info("Loading AISHELL-2 for second training")
            aishell2 = load_manifest_lazy(
                "/home/local_data/yhdai/data_zipformer/batch01/AISHELL-2/41_worked_cuts_train.jsonl.gz"
            )
            logging.info("Loading wenetspeech_3000 for second training")
            wenetspeech_3000 = load_manifest_lazy(
                "/home/local_data/yhdai/data_zipformer/batch02/wenetspeech_3000h/41_worked_cuts_train.jsonl.gz"
            )
            logging.info("Loading wenetspeech_7000 for second training")
            wenetspeech_7000 = load_manifest_lazy(
                "/home/local_data/yhdai/data_zipformer/batch02/wenetspeech_7000h/41_worked_cuts_train.jsonl.gz"
            )
            logging.info("Loading gigaspeech for second training")
            gigaspeech = load_manifest_lazy(
                "/home/local_data/yhdai/data_zipformer/batch02/gigaspeech/41_worked_cuts_train.jsonl.gz"
            )
            #*************************爬虫数据*****************************************************************
            # logging.info("Loading cuts_pachong01 for second training")
            # cuts_pachong01 = load_manifest_lazy(
            #     "/home/local_data/yhdai/train_data/zipformer/cuts_pachong01/41_cuts_pachong01_cuts_train.jsonl.gz"
            # )
            # logging.info("Loading cuts_pachong02 for second training")
            # cuts_pachong02 = load_manifest_lazy(
            #     "/home/local_data/yhdai/train_data/zipformer/cuts_pachong02/41_cuts_pachong02_cuts_train.jsonl.gz"
            # )
            logging.info("Loading 8_liukai_chuantongguoxue for stage03 training")
            _8_liukai_chuantongguoxue = load_manifest_lazy("/home/local_data/yhdai/data_zipformer/batch03/8_liukai_chuantongguoxue/41_worked_cuts_train.jsonl.gz"
            )
            logging.info("Loading shangye_caijing_3 for stage03 training")
            shangye_caijing_3 = load_manifest_lazy("/home/local_data/yhdai/data_zipformer/batch03/shangye_caijing_3/41_worked_cuts_train.jsonl.gz"
            )
            logging.info("Loading pachong sets that wer is 0")
            wer_0 = load_manifest_lazy("/home/local_data/yhdai/train_data/zipformer/out_merged_data/wer_0.jsonl.gz"
            )
            logging.info("Loading pachong sets that wer is between 0 and 5")
            wer_0_5 = load_manifest_lazy("/home/local_data/yhdai/train_data/zipformer/out_merged_data/wer_0.jsonl.gz"
            )

            # 拼接所有训练集
            cuts_train_all = CutSet.mux(
                cuts01,
                wenetspeech_3000,
                cuts02,
                wenetspeech_7000,
                batch_en,
                gigaspeech,
                aishell2,
                # cuts_pachong01,
                # cuts_pachong02,
                # wer_0,
                # wer_0_5,
                _8_liukai_chuantongguoxue,
                shangye_caijing_3,
                weights=[
                    len(cuts01),
                    len(wenetspeech_3000),
                    len(cuts02),
                    len(wenetspeech_7000),
                    len(batch_en),
                    len(gigaspeech),
                    len(aishell2),
                    # len(cuts_pachong01),
                    # len(cuts_pachong02),
                    len(_8_liukai_chuantongguoxue),
                    len(shangye_caijing_3),
                    # len(wer_0),
                    # len(wer_0_5),
                ],
            )
            if len(cuts_train_all) == 0:
                raise NotImplementedError(
                    f"""No data were input for training!"""
                )
            # 进行shuffle操作
            logging.info("Shuffing for cuts_train_all...")
            cuts_train_all = cuts_train_all.shuffle()
            logging.info("Shuffing for cuts_train_all over!")
            return cuts_train_all

    def dev_cuts(self) -> CutSet:
        logging.info("About to get multidataset dev cuts")
        dev_cuts = load_manifest_lazy(
            self.valid_fbank_dir / f"{self.valid_prefix}_cuts_dev.jsonl.gz"
        )

        return dev_cuts


    # modified by yhdai 2024.07.17
    def test_cuts(self) -> Dict[str, CutSet]:
        logging.info("About to get multidataset test cuts")
        # 将传入的测试集合转化为列表循环取得
        logging.info(f"dyh info: test sets is: {self.test_sets}")
        sets_name = self.test_sets.split(' ')
        test_parent_dir_str = self.test_parent_dir
        test_family_dict = {}
        for dataname in sets_name:
            prefix_i = dataname
            fbank_dir_i = os.path.join(test_parent_dir_str, dataname, "fbank")
            cut_path_i = Path(fbank_dir_i) / f"{prefix_i}_cuts_test.jsonl.gz"
            if not os.path.exists(cut_path_i):
                continue
            cuts_i = load_manifest_lazy(
                cut_path_i
            )
            test_family_dict[dataname] = cuts_i
        return test_family_dict

