import torch
from torch.utils.data.sampler import Sampler
import random
import numpy as np

class InfiniteSampler(Sampler):
    def __init__(self, size, shuffle=True):
        self.size = size
        self.shuffle = shuffle

    def __iter__(self):
        while True:
            if self.shuffle:
                yield from torch.randperm(self.size)
            else:
                yield from torch.arange(self.size)

class InfiniteInstanceBalanceSampler(Sampler):
    def __init__(self, dataset_dicts, num_classes):
        self.weights = self.calculate_weights(dataset_dicts, num_classes)
        self.size = len(self.weights)

    def calculate_weights(self, dataset_dicts, num_classes):
        data = np.zeros((len(dataset_dicts), num_classes), dtype=np.float32)
        for i, dataset_dict in enumerate(dataset_dicts):
            for obj in dataset_dict['annotations']:
                data[i, obj['category_id']] += 1
        data = np.linalg.pinv(data).sum(axis=0)
        data_sum = data.sum()
        assert data_sum > 0, "unable to balance classes"
        return data / data_sum
        
    def __iter__(self):
        while True:
            yield np.random.choice(self.size, 1, p=self.weights).item()
    
class InfiniteCategoryBalanceSampler(Sampler):
    def __init__(self, dataset_dicts, num_classes):
        self.weights = self.calculate_weights(dataset_dicts, num_classes)
        self.size = len(self.weights)

    def calculate_weights(self, dataset_dicts, num_classes):
        data = np.zeros((len(dataset_dicts), num_classes), dtype=np.float32)
        for i, dataset_dict in enumerate(dataset_dicts):
            for obj in dataset_dict['annotations']:
                data[i, obj['category_id']] = 1
        data = np.linalg.pinv(data).sum(axis=0)
        data_sum = data.sum()
        assert data_sum > 0, "unable to balance classes"
        return data / data_sum
        
    def __iter__(self):
        while True:
            yield np.random.choice(self.size, 1, p=self.weights).item()

class InfiniteDatasetTypeBalanceSampler(Sampler):
    def __init__(self, dataset_dicts):
        self.weights = self.calculate_weights(dataset_dicts)
        self.size = len(self.weights)

    def calculate_weights(self, dataset_dicts):
        type_set = set(dataset_dict['params']['dataset_type'] for dataset_dict in dataset_dicts)
        num_type = len(type_set)
        type_map = {dataset_type: i for i, dataset_type in enumerate(type_set)}
        data = np.zeros((len(dataset_dicts), num_type), dtype=np.float32)
        for i, dataset_dict in enumerate(dataset_dicts):
            data[i, type_map[dataset_dict['params']['dataset_type']]] = 1
        data = np.linalg.pinv(data).sum(axis=0)
        data_sum = data.sum()
        assert data_sum > 0, "unable to balance datasets"
        return data / data_sum
        
    def __iter__(self):
        while True:
            yield np.random.choice(self.size, 1, p=self.weights).item()
