import os
import cv2
import numpy as np
from PIL import Image
import argparse
from collections import defaultdict
import threading
from concurrent.futures import ThreadPoolExecutor
import time
import logging
from datetime import datetime

def setup_logging(directory, log_to_file=True):
    """设置日志记录"""
    logger = logging.getLogger('SimilarityDetector')
    logger.setLevel(logging.INFO)
    
    # 清除之前的处理器
    if logger.handlers:
        logger.handlers.clear()
    
    # 控制台处理器 - 总是启用
    console_handler = logging.StreamHandler()
    console_handler.setLevel(logging.INFO)
    console_formatter = logging.Formatter('%(message)s')
    console_handler.setFormatter(console_formatter)
    logger.addHandler(console_handler)
    
    # 文件处理器 - 可选
    if log_to_file:
        log_file = os.path.join(directory, f'similarity_detection_{datetime.now().strftime("%Y%m%d_%H%M%S")}.log')
        file_handler = logging.FileHandler(log_file, encoding='utf-8')
        file_handler.setLevel(logging.INFO)
        file_formatter = logging.Formatter('%(asctime)s - %(message)s')
        file_handler.setFormatter(file_formatter)
        logger.addHandler(file_handler)
        
        logger.info(f"日志文件: {log_file}")
    
    return logger

def get_image_files(directory):
    """获取目录下所有图片文件"""
    image_extensions = {'.png', '.jpg', '.jpeg', '.bmp', '.tiff', '.webp'}
    image_files = []
    
    for root, dirs, files in os.walk(directory):
        for file in files:
            ext = os.path.splitext(file)[1].lower()
            if ext in image_extensions:
                image_files.append(os.path.join(root, file))
    
    return image_files

def calculate_hash(image_path, logger):
    """计算图像的差异哈希（dHash）"""
    try:
        img = Image.open(image_path)
        img = img.convert('L').resize((9, 8), Image.LANCZOS)
        pixels = list(img.getdata())
        
        diff = []
        for i in range(8):
            for j in range(8):
                if pixels[i*9 + j] > pixels[i*9 + j+1]:
                    diff.append(1)
                else:
                    diff.append(0)
        
        hash_val = 0
        for i, bit in enumerate(diff):
            if bit:
                hash_val |= 1 << i
                
        return hash_val
    except Exception as e:
        logger.warning(f"计算图像哈希时出错 {image_path}: {e}")
        return None

def hamming_distance(hash1, hash2):
    """计算两个哈希值的汉明距离"""
    if hash1 is None or hash2 is None:
        return 64
    return bin(hash1 ^ hash2).count('1')

def similarity_percentage(hash1, hash2):
    """基于汉明距离计算相似度百分比"""
    distance = hamming_distance(hash1, hash2)
    return (1 - distance / 64) * 100

def compute_hashes_batch(image_paths, logger):
    """批量计算图像哈希值"""
    results = []
    for path in image_paths:
        hash_val = calculate_hash(path, logger)
        results.append((path, hash_val))
    return results

def find_similar_images(directory, similarity_threshold=90, max_workers=10, logger=None):
    """使用多线程查找相似图片"""
    logger.info(f"开始扫描目录: {directory}")
    image_files = get_image_files(directory)
    logger.info(f"找到 {len(image_files)} 个图片文件")
    
    if len(image_files) < 2:
        logger.error("需要至少2个图片文件进行比较")
        return {}
    
    # 阶段1：多线程计算所有图片的哈希值
    logger.info("开始计算图片哈希值...")
    hashes_dict = {}
    processed_count = 0
    total_count = len(image_files)
    
    batch_size = max(1, total_count // max_workers)
    batches = [image_files[i:i+batch_size] for i in range(0, len(image_files), batch_size)]
    
    with ThreadPoolExecutor(max_workers=max_workers) as executor:
        futures = [executor.submit(compute_hashes_batch, batch, logger) for batch in batches]
        
        for i, future in enumerate(futures):
            batch_results = future.result()
            for path, hash_val in batch_results:
                if hash_val is not None:
                    hashes_dict[path] = hash_val
                processed_count += 1
            
            # 实时显示进度
            progress = (processed_count / total_count) * 100
            logger.info(f"哈希计算进度: {processed_count}/{total_count} ({progress:.1f}%)")
    
    logger.info(f"成功计算 {len(hashes_dict)} 个图片的哈希值")
    
    if len(hashes_dict) < 2:
        logger.error("可用的图片数量不足，无法进行比较")
        return {}
    
    # 阶段2：多线程比较图片相似度
    logger.info("开始比较图片相似度...")
    similar_pairs = []
    lock = threading.Lock()
    
    hash_items = list(hashes_dict.items())
    total_comparisons = len(hash_items) * (len(hash_items) - 1) // 2
    completed_comparisons = 0
    
    def compare_chunk(start_idx, end_idx):
        """比较指定范围内的图片"""
        nonlocal completed_comparisons
        chunk_results = []
        for i in range(start_idx, end_idx):
            path1, hash1 = hash_items[i]
            for j in range(i + 1, len(hash_items)):
                path2, hash2 = hash_items[j]
                similarity = similarity_percentage(hash1, hash2)
                if similarity >= similarity_threshold:
                    chunk_results.append((path1, path2, similarity))
                
                # 更新进度
                completed_comparisons += 1
                if completed_comparisons % 1000 == 0:  # 每1000次比较更新一次进度
                    progress = (completed_comparisons / total_comparisons) * 100
                    logger.info(f"比较进度: {completed_comparisons}/{total_comparisons} ({progress:.1f}%)")
        
        with lock:
            similar_pairs.extend(chunk_results)
    
    # 将比较任务分成多个块
    total_images = len(hash_items)
    chunk_size = max(1, total_images // (max_workers * 2))
    
    with ThreadPoolExecutor(max_workers=max_workers) as executor:
        futures = []
        for start_idx in range(0, total_images, chunk_size):
            end_idx = min(start_idx + chunk_size, total_images)
            futures.append(executor.submit(compare_chunk, start_idx, end_idx))
        
        for future in futures:
            future.result()
    
    logger.info(f"完成比较，找到 {len(similar_pairs)} 个相似图片对")
    
    # 将相似对分组为相似图片组
    similar_groups = group_similar_images(similar_pairs)
    logger.info(f"分组完成，共 {len(similar_groups)} 组相似图片")
    
    return similar_groups

def group_similar_images(similar_pairs):
    """将相似图片对分组为相似的图片组"""
    parent = {}
    
    def find(x):
        if x not in parent:
            parent[x] = x
        if parent[x] != x:
            parent[x] = find(parent[x])
        return parent[x]
    
    def union(x, y):
        root_x, root_y = find(x), find(y)
        if root_x != root_y:
            parent[root_y] = root_x
    
    for path1, path2, _ in similar_pairs:
        union(path1, path2)
    
    groups = defaultdict(list)
    for path in parent:
        root = find(path)
        groups[root].append(path)
    
    result_groups = {}
    for i, (root, paths) in enumerate(groups.items()):
        if len(paths) > 1:
            result_groups[i + 1] = sorted(paths)
    
    return result_groups

def save_results_to_file(similar_groups, directory, logger):
    """将结果保存到文件"""
    if not similar_groups:
        logger.info("未找到相似度超过阈值的图片")
        return None
    
    result_file = os.path.join(directory, f'similarity_results_{datetime.now().strftime("%Y%m%d_%H%M%S")}.txt')
    
    try:
        with open(result_file, 'w', encoding='utf-8') as f:
            f.write("相似图片检测报告\n")
            f.write("=" * 50 + "\n")
            f.write(f"生成时间: {datetime.now().strftime('%Y-%m-%d %H:%M:%S')}\n")
            f.write(f"扫描目录: {directory}\n")
            f.write(f"找到 {len(similar_groups)} 组相似图片\n\n")
            
            for group_id, image_paths in similar_groups.items():
                f.write(f"第 {group_id} 组相似图片 ({len(image_paths)} 张):\n")
                for path in image_paths:
                    rel_path = os.path.relpath(path, directory)
                    file_size = os.path.getsize(path) / 1024
                    f.write(f"  - {rel_path} ({file_size:.1f} KB)\n")
                f.write("\n")
        
        logger.info(f"结果已保存到文件: {result_file}")
        return result_file
    except Exception as e:
        logger.error(f"保存结果文件时出错: {e}")
        return None

def display_results(similar_groups, directory, logger):
    """显示相似图片结果"""
    if not similar_groups:
        logger.info("未找到相似度超过阈值的图片")
        return
    
    logger.info(f"\n找到 {len(similar_groups)} 组相似图片:")
    logger.info("=" * 80)
    
    for group_id, image_paths in similar_groups.items():
        logger.info(f"第 {group_id} 组相似图片 ({len(image_paths)} 张):")
        
        for path in image_paths:
            rel_path = os.path.relpath(path, directory)
            file_size = os.path.getsize(path) / 1024
            logger.info(f"  - {rel_path} ({file_size:.1f} KB)")
        
        logger.info("")

def get_run_directory():
    """获取当前脚本所在目录的绝对路径[6,8](@ref)"""
    # 使用os.path.realpath(__file__)获取当前脚本的绝对路径[6,8](@ref)

    return os.getcwd()

def main():
    """主函数"""
    # 获取当前脚本所在目录作为默认扫描目录[6,8](@ref)
    default_directory = get_run_directory()
    
    parser = argparse.ArgumentParser(description='使用多线程查找相似图片')
    parser.add_argument('directory', nargs='?', default=default_directory, 
                       help=f'要扫描的图片目录路径（默认：当前脚本所在目录: {default_directory}）')
    parser.add_argument('-t', '--threshold', type=float, default=90, 
                       help='相似度阈值 (0-100)，默认90')
    parser.add_argument('-w', '--workers', type=int, default=10,
                       help='线程池大小，默认10')
    parser.add_argument('--no-log-file', action='store_true',
                       help='不生成日志文件')
    
    args = parser.parse_args()
    
    # 如果用户没有提供目录参数，使用当前脚本所在目录[6,8](@ref)
    if not os.path.exists(args.directory):
        print(f"错误: 目录 '{args.directory}' 不存在")
        print(f"当前脚本所在目录: {default_directory}")
        return
    
    if args.threshold < 0 or args.threshold > 100:
        print("错误: 相似度阈值必须在 0-100 之间")
        return
    
    # 设置日志
    logger = setup_logging(args.directory, not args.no_log_file)
    
    start_time = time.time()
    logger.info("=" * 80)
    logger.info("图片相似度检测工具 - 开始执行")
    logger.info("=" * 80)
    logger.info(f"扫描目录: {args.directory}")
    logger.info(f"相似度阈值: {args.threshold}%")
    logger.info(f"线程数: {args.workers}")
    logger.info(f"开始时间: {datetime.now().strftime('%Y-%m-%d %H:%M:%S')}")
    
    try:
        similar_groups = find_similar_images(args.directory, args.threshold, args.workers, logger)
        display_results(similar_groups, args.directory, logger)
        save_results_to_file(similar_groups, args.directory, logger)
        
        end_time = time.time()
        elapsed_time = end_time - start_time
        logger.info(f"检测完成，总耗时: {elapsed_time:.2f} 秒")
        logger.info(f"完成时间: {datetime.now().strftime('%Y-%m-%d %H:%M:%S')}")
        
    except Exception as e:
        logger.error(f"检测过程中出现错误: {e}")
        logger.error("检测失败")
    
    logger.info("=" * 80)
    logger.info("图片相似度检测工具 - 执行结束")
    logger.info("=" * 80)

if __name__ == "__main__":
    main()