import os
import hashlib,shutil
from PIL import Image
import imagehash

def find_duplicate_images(folder_path):
    # 存储哈希值和对应文件路径
    hash_dict = {}
    duplicates = []

    for filename in os.listdir(folder_path):
        if filename.lower().endswith(('.png', '.jpg', '.jpeg', '.bmp', '.gif')):
            file_path = os.path.join(folder_path, filename)
            try:
                # 计算图像哈希（感知哈希，适用于相似图片）
                img = Image.open(file_path)
                img_hash = str(imagehash.average_hash(img))

                # 如果哈希已存在，说明是重复图片
                if img_hash in hash_dict:
                    duplicates.append(file_path)
                else:
                    hash_dict[img_hash] = file_path
            except Exception as e:
                print(f"Error processing {filename}: {e}")

    return duplicates

def delete_duplicates(duplicates,dst_dir):
    for file_path in duplicates:
        try:
            # os.remove(file_path)
            shutil.move( file_path , dst_dir )
            print(f"Deleted: {file_path}")
        except Exception as e:
            print(f"Failed to delete {file_path}: {e}")

if __name__ == "__main__":
    folder_path = "/mnt/nas/datasets/diction/ZipArchive0739_clo"  # 替换为你的文件夹路径
    dst_dir = '/mnt/nas/datasets/diction/ZipArchive0739_clo_sifted'
    os.makedirs(dst_dir)
    duplicates = find_duplicate_images(folder_path)
    
    if duplicates:
        print(f"Found {len(duplicates)} duplicate images.")
        delete_duplicates(duplicates,dst_dir)
    else:
        print("No duplicates found.")