import torch
from safetensors.torch import load_file, save_file
import os

source_dir = "./pretrained/Qwen2.5-VL-3B-Instruct"
target_dir = "./pretrained/Qwen2.5-VL-3B-Instruct-fp16"
os.makedirs(target_dir, exist_ok=True)

for file_name in os.listdir(source_dir):
    if file_name.endswith(".safetensors"):
        file_path = os.path.join(source_dir, file_name)
        weights = load_file(file_path)

        # 转换 dtype 为 float16
        new_weights = {k: v.to(torch.float16) for k, v in weights.items()}

        # 保存到新的目录
        target_path = os.path.join(target_dir, file_name)
        save_file(new_weights, target_path)

        print(f"Converted {file_name} to float16")

# 复制其他必要文件（config.json, tokenizer.json, 等）
import shutil
for file_name in ["config.json", "tokenizer.json", "tokenizer_config.json", 
                  "generation_config.json", "preprocessor_config.json", 
                  "model.safetensors.index.json"]:
    src_path = os.path.join(source_dir, file_name)
    dst_path = os.path.join(target_dir, file_name)
    if os.path.exists(src_path):
        shutil.copy(src_path, dst_path)
