import folder_paths
import torch
import safetensors.torch
import os
import sys


import comfy.diffusers_load
import comfy.samplers
import comfy.sample
import comfy.sd
import comfy.utils
import comfy.controlnet
    
class LoadLoraFromFile:
    def __init__(self):
        self.loaded_lora = None

    @classmethod
    def INPUT_TYPES(s):
        return {"required": { 
                              "model": ("MODEL",),
                              "clip": ("CLIP", ),
                              "lora_name": ("STRING", {"default": "lora file path"}),
                              "strength_model": ("FLOAT", {"default": 1.0, "min": -20.0, "max": 20.0, "step": 0.01}),
                              "strength_clip": ("FLOAT", {"default": 1.0, "min": -20.0, "max": 20.0, "step": 0.01}),
                              }}
    RETURN_TYPES = ("MODEL", "CLIP")
    FUNCTION = "load_lora"
    
    CATEGORY = "loaders"
    def load_lora(self, model, clip, lora_name, strength_model, strength_clip):
        if strength_model == 0 and strength_clip == 0:
            return (model, clip)
            
        if os.path.exists(lora_name) and os.path.isfile(lora_name):
            lora_path=lora_name
        else:
            lora_path = folder_paths.get_full_path("loras", lora_name)
        lora = None
        if self.loaded_lora is not None:
            if self.loaded_lora[0] == lora_path:
                lora = self.loaded_lora[1]
            else:
                temp = self.loaded_lora
                self.loaded_lora = None
                del temp
        print('Lora File:',lora_path)
        if not os.path.exists(lora_path):
            print("Lora文件不存在:",lora_path)
        if lora is None:
            lora = comfy.utils.load_torch_file(lora_path, safe_load=True)
            self.loaded_lora = (lora_path, lora)

        model_lora, clip_lora = comfy.sd.load_lora_for_models(model, clip, lora, strength_model, strength_clip)
        return (model_lora, clip_lora)
class LoadLorasFromFile:
    def __init__(self):
        self.loaded_lora = {}
    LORA_COUNT=5
    @classmethod
    def INPUT_TYPES(s):
        loras = ["None"] + folder_paths.get_filename_list("loras")
        loraname=("STRING", {"default": "lora file path"})
        strength_model=("FLOAT", {"default": 1.0, "min": -20.0, "max": 20.0, "step": 0.01})
        strength_clip=("FLOAT", {"default": 1.0, "min": -20.0, "max": 20.0, "step": 0.01})
        fields={ 
                              "model": ("MODEL",),
                              "clip": ("CLIP", ),
                              "lora_name": (loras,),
                              "strength_model": ("FLOAT", {"default": 1.0, "min": -20.0, "max": 20.0, "step": 0.01}),
                              "strength_clip": ("FLOAT", {"default": 1.0, "min": -20.0, "max": 20.0, "step": 0.01}),
                              }
        
        for i in range(1,s.LORA_COUNT):
            fields[f"lora_name_{i}"]=(loras,)
            fields[f"strength_model_{i}"]=("FLOAT", {"default": 0.9, "min": -20.0, "max": 20.0, "step": 0.01})
            fields[f"strength_clip_{i}"]=("FLOAT", {"default": 0.9, "min": -20.0, "max": 20.0, "step": 0.01})
            pass
        return {"required": fields}
    RETURN_TYPES = ("MODEL", "CLIP")
    FUNCTION = "load_lora"
    
    CATEGORY = "loaders"
    def get_lora_data(self,lora_name):
        if os.path.exists(lora_name) and os.path.isfile(lora_name):
            lora_path=lora_name
        else:
            lora_path = folder_paths.get_full_path("loras", lora_name)
        pass
        if lora_path=="" or lora_path is None or not os.path.exists(lora_path):
            print("Lora文件不存在:",lora_path)
            return None
        lora=None
        cache_name=os.path.basename(lora_path)
        cache_data=self.loaded_lora.get(cache_name,None)
        iscache=False
        if cache_data is not None:
            if cache_data['path'] == lora_path:
                lora = cache_data['data']
                iscache=True
            else:
                self.loaded_lora.pop(cache_name)
        
        if lora is None:
            lora = comfy.utils.load_torch_file(lora_path, safe_load=True)
            cache_data = dict(path=lora_path,data=lora)
            self.loaded_lora[cache_name]=cache_data
        print(f"lora load:{lora_name}, cache:{iscache} path:{lora_path} ")
        return lora
    def load_lora(self, model, clip, lora_name, strength_model, strength_clip, **kwargs):
        print('kwargs',kwargs)
        model_lora=model
        clip_lora=clip
        if strength_model == 0 and strength_clip == 0:
            return (model_lora, clip_lora)
        lora = self.get_lora_data(lora_name)
        if lora is not None:
            model_lora, clip_lora = comfy.sd.load_lora_for_models(model_lora, clip_lora, lora, strength_model, strength_clip)
        for i in range(1,self.LORA_COUNT):
            lora_name=kwargs.get(f"lora_name_{i}","")
            strength_model=kwargs.get(f"strength_model_{i}",0.9)
            strength_clip = kwargs.get(f"strength_clip_{i}",0.9)
            lora=self.get_lora_data(lora_name=lora_name)
            if lora is not None:
                print(f"lora merge<{i}>:",lora_name,strength_model,strength_clip)
                model_lora, clip_lora = comfy.sd.load_lora_for_models(model_lora, clip_lora, lora, strength_model, strength_clip)
            pass
        return (model_lora, clip_lora)  
NODE_CLASS_MAPPINGS = {
    "LoraLoaderFromFile": LoadLoraFromFile,
    "X_LoadLoraFromFile": LoadLoraFromFile,
    "X_LoadLorasFromFile": LoadLorasFromFile,
}