import os
import torch
import torch.nn as nn
from ..models.monodetr.diffusers.utils import convert_state_dict_to_diffusers
from peft.utils import get_peft_model_state_dict
from ..models.monodetr.diffusers.loaders.lora_pipeline import StableDiffusionLoraLoaderMixin


def model_state_to_cpu(model_state):
    model_state_cpu = type(model_state)()  # ordered dict
    for key, val in model_state.items():
        model_state_cpu[key] = val.cpu()
    return model_state_cpu

def remove_backbone(origin_dict):
    keys=list(origin_dict.keys()).copy()
    for k in keys:
        if(('unet' in k and "time_embedding" not in k) or 'vae' in k or 'text_encoder' in k or 'tokenizer' in k):
            origin_dict.pop(k)

    return origin_dict

def get_checkpoint_state(model=None, optimizer=None, epoch=None, best_result=None, best_epoch=None):
    optim_state = optimizer.state_dict() if optimizer is not None else None
    if model is not None:
        if isinstance(model, torch.nn.DataParallel):
            state_dict=remove_backbone(model.state_dict())
            unet_lora_state_dict = convert_state_dict_to_diffusers(get_peft_model_state_dict(model.backbone[0].unet))
            model_state = model_state_to_cpu(state_dict)
        else:
            unet_lora_state_dict = convert_state_dict_to_diffusers(get_peft_model_state_dict(model.backbone[0].unet))
            model_state = remove_backbone(model.state_dict())
    else:
        model_state = None

    return {'epoch': epoch, 'model_state': model_state, 'optimizer_state': optim_state, 'best_result': best_result, 'best_epoch': best_epoch,'lora_state_dict':unet_lora_state_dict}


def save_checkpoint(state, filename):
    filename = '{}.pth'.format(filename)
    StableDiffusionLoraLoaderMixin.save_lora_weights(os.path.dirname(filename),state['lora_state_dict'])
    torch.save(state, filename)


def load_checkpoint(model, optimizer, filename, map_location, logger=None):
    if os.path.isfile(filename):
        logger.info("==> Loading from checkpoint '{}'".format(filename))
        checkpoint = torch.load(filename, map_location)
        epoch = checkpoint.get('epoch', -1)
        best_result = checkpoint.get('best_result', 0.0)
        best_epoch = checkpoint.get('best_epoch', 0.0)
        if model is not None and checkpoint['model_state'] is not None:
            model.load_state_dict(checkpoint['model_state'],strict=False)
            # lora_state_dict, network_alphas = StableDiffusionLoraLoaderMixin.lora_state_dict(os.path.dirname(filename))
            lora_state_dict=checkpoint['lora_state_dict']
            network_alphas=None
            StableDiffusionLoraLoaderMixin.load_lora_into_unet(lora_state_dict,network_alphas,model.backbone[0].unet)
        if optimizer is not None and checkpoint['optimizer_state'] is not None:
            optimizer.load_state_dict(checkpoint['optimizer_state'],strict=False)
        logger.info("==> Done")
    else:
        raise FileNotFoundError

    return epoch, best_result, best_epoch
