from .mine_hard import random_positive, mine_negative
from utils.utils import format_esm
import random
import torch
import numpy as np

def collate_fn(batch):
    B = len(batch)
    lengths = [len(b[2]) for b in batch]
    L_max = max([len(b[2]) for b in batch]) + 2
    X = np.zeros([B, L_max, 1024])
    for i, b in enumerate(batch):
        x = b[2]
        x_pad = np.pad(x, [[0,L_max-len(b[2])], [0,0]], 'constant', constant_values=(np.nan, ))
        X[i,:,:] = x_pad
    mask = np.isnan(X)
    X[mask] = 0.
    return batch[0], batch[1], torch.from_numpy(X).to(dtype=torch.float32), torch.from_numpy(1-mask).to(dtype=torch.float32), lengths


class MultiPosNeg_dataset_with_mine_EC(torch.utils.data.Dataset):

    def __init__(self, id_ec, ec_id, mine_neg, n_pos, n_neg, data_dir='/state/partition/wzzheng/clean/data/train_valid_split/split100'):
        self.id_ec = id_ec
        self.ec_id = ec_id
        self.n_pos = n_pos
        self.n_neg = n_neg
        self.full_list = []
        self.mine_neg = mine_neg
        self.data_dir = data_dir
        for ec in ec_id.keys():
            if '-' not in ec:
                self.full_list.append(ec)

    def __len__(self):
        return len(self.full_list)

    def __getitem__(self, index):
        anchor_ec = self.full_list[index]
        anchor = random.choice(self.ec_id[anchor_ec])
        a = format_esm(torch.load(self.data_dir + '/esm_data/' + anchor + '.pt')).unsqueeze(0)
        data = [a]
        for _ in range(self.n_pos):
            pos = random_positive(anchor, self.id_ec, self.ec_id)
            p = format_esm(torch.load(self.data_dir + '/esm_data/' + pos + '.pt')).unsqueeze(0)
            data.append(p)
        for _ in range(self.n_neg):
            neg = mine_negative(anchor, self.id_ec, self.ec_id, self.mine_neg)
            n = format_esm(torch.load(self.data_dir + '/esm_data/' + neg + '.pt')).unsqueeze(0)
            data.append(n)
        return torch.cat(data)
