import os
import torch
import numpy as np
from torch.utils.data.dataloader import DataLoader
from torch.utils.data.sampler import SubsetRandomSampler


class Logger:
    def __init__(self, log_path):
        self.log_path = log_path

    def write(self, txt):
        with open(self.log_path, 'a') as f:
            f.write(txt)
            f.write("\r\n")


def get_learning_rate_group(optimizer):
    lr = []
    for param_group in optimizer.param_groups:
        lr += [param_group['lr']]
    return lr


def get_learning_rate(optimizer):
    return optimizer.param_groups[0]['lr']


def format_logs(logs):
    str_logs = ['{} - {:.4}'.format(k, v) for k, v in logs.items()]
    s = ', '.join(str_logs)
    return s


def get_train_val_loader(train_dataset, val_dataset, train_bs, val_bs, labeled_ratio):
    num_workers = 0
    train_dataset_size = len(train_dataset)
    partial_size = int(labeled_ratio * train_dataset_size)
    train_ids = np.arange(train_dataset_size)
    np.random.shuffle(train_ids)

    train_sampler = SubsetRandomSampler(train_ids[:partial_size])
    train_gt_sampler = SubsetRandomSampler(train_ids[:partial_size])
    train_remain_sampler = SubsetRandomSampler(train_ids[partial_size:])

    val_loader = DataLoader(dataset=val_dataset,
                            batch_size=val_bs,
                            num_workers=num_workers if val_bs > 1 else 1,
                            shuffle=False,
                            pin_memory=True)

    train_loader = DataLoader(train_dataset,
                              batch_size=train_bs,
                              sampler=train_sampler,
                              num_workers=num_workers,
                              pin_memory=True)
    train_loader_remain = DataLoader(train_dataset,
                                     batch_size=train_bs,
                                     sampler=train_remain_sampler,
                                     num_workers=num_workers,
                                     pin_memory=True)
    train_loader_gt = DataLoader(train_dataset,
                                 batch_size=train_bs,
                                 sampler=train_gt_sampler,
                                 num_workers=num_workers,
                                 pin_memory=True)

    return train_loader, train_loader_remain, train_loader_gt, val_loader


def load_checkpoint(model, checkpoint_path):
    if not os.path.exists(checkpoint_path):
        checkpoint_path = checkpoint_path.replace("peter/zze/codes/", "hjr/zze/")
    checkpoint = torch.load(checkpoint_path)
    model.load_state_dict(checkpoint['net'])
    del checkpoint


def save_model(model, save_path, iteration, loss, metric):
    torch.save({
        'net': model.module.state_dict() if hasattr(model, 'module') else model.state_dict(),
        'iteration': iteration,
        'loss': loss,
        'metric': metric
    }, save_path)
