import os
import json
import torch
import safetensors.torch
import folder_paths

class QwenLoraConverterNode:

    @classmethod
    def INPUT_TYPES(cls):

        return {
            "required": {
                "lora_file": (folder_paths.get_filename_list("loras"),),
            },
        }
    
    RETURN_TYPES = ()
    FUNCTION = "convert_lora"
    OUTPUT_NODE = True
    CATEGORY = "Qwen-Image Lora Converter"

    def convert_lora(self, lora_file):
        # 获取LoRA文件完整路径
        lora_dir = folder_paths.get_folder_paths("loras")[0]
        lora_path = os.path.join(lora_dir, lora_file)
        
        print(f"正在处理LoRA文件: {lora_path}")
        
        # 读取LoRA文件
        try:
            if lora_path.endswith('.safetensors'):
                lora_data = safetensors.torch.load_file(lora_path, device="cpu")
            else:
                lora_data = torch.load(lora_path, map_location="cpu")
        except Exception as e:
            print(f"读取LoRA文件失败: {e}")
            return ()
        
        # # 打印所有key值
        # print("LoRA文件包含的key值:")
        # for key in lora_data.keys():
        #     print(f"  {key}")
        
        # 转换key值格式
        converted_dict = self._convert_keys(lora_data)
        converted_dict = self._fix_prefix(converted_dict)
        
        # 保存转换后的文件
        base_name = os.path.splitext(os.path.basename(lora_file))[0]
        output_filename = f"{base_name}_converted.safetensors"
        output_path = os.path.join(os.path.dirname(lora_path), output_filename)
        
        try:
            safetensors.torch.save_file(converted_dict, output_path)
            print(f"转换后的LoRA文件已保存到: {output_path}")
        except Exception as e:
            print(f"保存转换后的LoRA文件失败: {e}")
            return ()
        
        return ()
    
    
    def _convert_keys(self, lora_data):
        converted_dict = {}
        for key, value in lora_data.items():
            if 'lora_' not in key:
                converted_dict[key] = value
                continue

            fixed_key = key.replace(".default.weight", "")
            
            if fixed_key.endswith(".lora_A"):
                fixed_key = fixed_key.replace(".lora_A", ".lora.down.weight")
            elif fixed_key.endswith(".lora_B"):
                fixed_key = fixed_key.replace(".lora_B", ".lora.up.weight")
            else:
                continue
                
            full_key = "diffusion_model." + fixed_key
            converted_dict[full_key] = value
        
        return converted_dict
    

    def _fix_prefix(self, lora_data):
        converted_dict = {}
        for key, value in lora_data.items():
            if key.startswith("transformer_blocks."):
                fixed_key = "diffusion_model." + key
                converted_dict[fixed_key] = value
            elif key.startswith("transformer.transformer_blocks"):
               fixed_key = key.replace("transformer.transformer_blocks", "diffusion_model.transformer_blocks", 1)
               converted_dict[fixed_key] = value
            else:
                converted_dict[key] = value
                continue
       
        return converted_dict


# 注册节点
NODE_CLASS_MAPPINGS = {
    "QwenLoraConverterNode": QwenLoraConverterNode
}

NODE_DISPLAY_NAME_MAPPINGS = {
    "QwenLoraConverterNode": "Qwen-Image Lora Converter"
}
