import random
import numpy as np
import torch
import torch.nn.functional as F
import os


def setup_seed(seed, rank=0):
    SEED = int(seed) + rank
    random.seed(SEED)
    np.random.seed(SEED)
    torch.manual_seed(SEED)
    torch.cuda.manual_seed_all(SEED)
    torch.backends.cudnn.deterministic = True
    torch.backends.cudnn.benchmark = False
    return SEED


def get_source_list(file_path: str):
    files = []
    with open(file_path, "r") as f:
        for line in f.readlines():
            l = line.replace("\n", "").rstrip()
            files.append(l)
    return files


def list_to_files(arr: list, file_path):
    dir_name = os.path.dirname(file_path)
    os.makedirs(dir_name, exist_ok=True)
    with open(file_path, "w") as f:
        for e in arr:
            if e.endswith("\n"):
                f.write(e)
            else:
                f.write(e + "\n")


class AttrDict(dict):
    def __init__(self, *args, **kwargs):
        super(AttrDict, self).__init__(*args, **kwargs)
        self.__dict__ = self

    def __getattribute__(self, name: str):
        try:
            return super().__getattribute__(name)
        except AttributeError:
            return None


def truc_tensor(*x: torch.Tensor, length=None):
    """
    Given a list of tensors with the same length as arguments, chunk the tensors into a given length.
    Note that all the tensors will be chunked using the same offset

    Args:
        x: the list of tensors to be chunked, should have the same length with shape [T, E]
        length: the length to be chunked into, if length is None, return the original audio
    Returns:
        A list of chuncked tensors
    """
    x_len = x[0].size(0)  # [T]
    res = []
    if length == None:
        for a in x:
            res.append(a)
        return res[0] if len(res) == 1 else res
    if x_len > length:
        offset = random.randint(0, x_len - length - 1)
        for a in x:
            res.append(a[offset : offset + length])
    # else:
    #     for a in x:
    #         res.append(F.pad(a, (0, 0, 0, length - a.size(0)), "constant"))
    else:
        for a in x:
            padding = [0] * (a.dim() - 1) * 2 + [0, length - x_len]  # Only pad the first dimension
            res.append(F.pad(a, padding, "constant"))
    return res[0] if len(res) == 1 else res