package com.beemans.corekit.duplicate;

import androidx.annotation.NonNull;
import androidx.annotation.Nullable;

import java.io.File;
import java.io.FileInputStream;
import java.io.RandomAccessFile;
import java.security.MessageDigest;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicInteger;

public class AdvancedDuplicateScanner {
    private static final int DEFAULT_MAX_DEPTH = 5;
    private static final int PARTIAL_HASH_SIZE = 1024 * 1024; // 1MB
    private static final int LARGE_FILE_THRESHOLD = 10 * 1024 * 1024; // 10MB

    private int maxDepth;
    private boolean useFullHashForSmallFiles;
    @NonNull
    private Set<String> excludedDirectories;

    // 取消标志和进度追踪
    @NonNull
    private final AtomicBoolean cancelled = new AtomicBoolean(false);
    @NonNull
    private final AtomicInteger scannedFilesCount = new AtomicInteger(0);
    @NonNull
    private final AtomicInteger potentialDuplicatesCount = new AtomicInteger(0);
    @Nullable
    private ScanListener listener;

    public AdvancedDuplicateScanner() {
        this(DEFAULT_MAX_DEPTH);
    }

    public AdvancedDuplicateScanner(int maxDepth) {
        this.maxDepth = maxDepth;
        this.useFullHashForSmallFiles = true;
        this.excludedDirectories = new HashSet<>(/*Arrays.asList(
                "cache", "temp", "tmp", ".thumbnails", ".trash", "android", "data"
        )*/);
    }

    /**
     * 设置扫描监听器
     */
    public void setScanListener(@Nullable ScanListener listener) {
        this.listener = listener;
    }

    /**
     * 取消扫描
     */
    public void cancelScan() {
        cancelled.set(true);
    }

    /**
     * 重置扫描状态
     */
    private void resetScanState() {
        cancelled.set(false);
        scannedFilesCount.set(0);
        potentialDuplicatesCount.set(0);
    }

    /**
     * 扫描重复文件的主方法
     */
    @NonNull
    public List<List<File>> scanDuplicateFiles(@NonNull File rootDirectory) {
        return scanDuplicateFiles(Collections.singletonList(rootDirectory));
    }

    @NonNull
    public List<List<File>> scanDuplicateFiles(@NonNull List<File> rootDirectories) {
        // 重置状态
        resetScanState();

        // 第一步：按文件大小分组
        Map<Long, List<File>> sizeGroups = groupFilesBySize(rootDirectories);

        // 检查是否已取消
        if (cancelled.get()) {
            return Collections.emptyList();
        }

        // 第二步：对大小相同的文件进行哈希验证
        return verifyDuplicateByHash(sizeGroups);
    }

    /**
     * 第一步：按文件大小分组（带层级控制）
     */
    @NonNull
    private Map<Long, List<File>> groupFilesBySize(@NonNull List<File> rootDirectories) {
        Map<Long, List<File>> sizeMap = new ConcurrentHashMap<>();
        ExecutorService executor = Executors.newFixedThreadPool(Math.max(1, Runtime.getRuntime().availableProcessors() - 1));

        try {
            for (File rootDir : rootDirectories) {
                if (cancelled.get()) {
                    break;
                }
                if (rootDir.exists() && rootDir.isDirectory()) {
                    executor.execute(() -> scanDirectoryWithDepth(rootDir, 1, sizeMap));
                }
            }

            executor.shutdown();
            try {
                executor.awaitTermination(10, TimeUnit.MINUTES);
            } catch (InterruptedException e) {
                Thread.currentThread().interrupt();
            }
        } finally {
            // 确保资源被释放
            if (!executor.isShutdown()) {
                executor.shutdownNow();
            }
        }

        // 过滤掉没有重复的大小组
        Map<Long, List<File>> filtered = filterPotentialDuplicates(sizeMap);
        potentialDuplicatesCount.set(filtered.values().stream().mapToInt(List::size).sum());
        notifyProgress();
        return filtered;
    }

    /**
     * 递归扫描目录（带深度控制）
     */
    private void scanDirectoryWithDepth(@NonNull File currentDir, int currentDepth, @NonNull Map<Long, List<File>> sizeMap) {
        // 检查是否取消
        if (cancelled.get()) {
            return;
        }

        if (currentDepth > maxDepth || !currentDir.exists() || !currentDir.isDirectory()) {
            return;
        }

        File[] files = currentDir.listFiles();
        if (files == null) return;

        for (File file : files) {
            // 检查是否取消
            if (cancelled.get()) {
                return;
            }

            if (file.isFile() && file.length() > 0 && !isExcludedFile(file)) {
                // 添加到大小分组（使用线程安全的List）
                long fileSize = file.length();
                sizeMap.computeIfAbsent(fileSize, k -> Collections.synchronizedList(new ArrayList<>())).add(file);
                
                // 更新扫描计数
                int count = scannedFilesCount.incrementAndGet();
                if (count % 100 == 0) {
                    notifyProgress();
                }
            } else if (file.isDirectory() && !isExcludedDirectory(file) && currentDepth <= maxDepth) {
                // 递归扫描子目录（修复边界条件：应该是 <= maxDepth）
                scanDirectoryWithDepth(file, currentDepth + 1, sizeMap);
            }
        }
    }

    /**
     * 通知进度更新
     */
    private void notifyProgress() {
        if (listener != null) {
            listener.onScanProgress(scannedFilesCount.get(), potentialDuplicatesCount.get());
        }
    }

    /**
     * 第二步：哈希验证重复文件
     */
    @NonNull
    private List<List<File>> verifyDuplicateByHash(@NonNull Map<Long, List<File>> sizeGroups) {
        List<List<File>> duplicates = Collections.synchronizedList(new ArrayList<>());
        ExecutorService executor = Executors.newFixedThreadPool(Runtime.getRuntime().availableProcessors());

        try {
            for (Map.Entry<Long, List<File>> entry : sizeGroups.entrySet()) {
                if (cancelled.get()) {
                    break;
                }
                if (entry.getValue().size() > 1) {
                    executor.execute(() -> processSizeGroup(entry.getValue(), duplicates));
                }
            }

            executor.shutdown();
            try {
                executor.awaitTermination(10, TimeUnit.MINUTES);
            } catch (InterruptedException e) {
                Thread.currentThread().interrupt();
            }
        } finally {
            // 确保资源被释放
            if (!executor.isShutdown()) {
                executor.shutdownNow();
            }
        }

        return duplicates;
    }

    /**
     * 处理同大小的文件组
     */
    private void processSizeGroup(@NonNull List<File> sameSizeFiles, @NonNull List<List<File>> duplicates) {
        if (cancelled.get()) {
            return;
        }

        Map<String, List<File>> hashGroups = new HashMap<>();

        for (File file : sameSizeFiles) {
            if (cancelled.get()) {
                return;
            }

            String fileHash = calculateOptimizedHash(file);
            if (!fileHash.isEmpty()) {
                hashGroups.computeIfAbsent(fileHash, k -> new ArrayList<>()).add(file);
            }
        }

        // 添加真正的重复文件组
        for (List<File> duplicateGroup : hashGroups.values()) {
            if (cancelled.get()) {
                return;
            }

            if (duplicateGroup.size() > 1) {
                duplicates.add(duplicateGroup);
                // 通知发现重复文件组
                if (listener != null) {
                    listener.onDuplicateFound(duplicateGroup);
                }
            }
        }
    }

    /**
     * 优化哈希计算策略
     */
    @NonNull
    private String calculateOptimizedHash(@NonNull File file) {
        try {
            long fileSize = file.length();

            // 根据文件大小选择哈希策略
            if (fileSize == 0) {
                return "zero_size_file";
            } else if (useFullHashForSmallFiles && fileSize <= LARGE_FILE_THRESHOLD) {
                // 小文件使用完整哈希
                return calculateFullHash(file);
            } else {
                // 大文件使用部分哈希 + 文件大小 + 文件名
                return calculateEnhancedPartialHash(file);
            }
        } catch (Exception e) {
            return "";
        }
    }

    /**
     * 增强的部分哈希（结合多个元素）
     */
    @NonNull
    private String calculateEnhancedPartialHash(@NonNull File file) {
        try {
            MessageDigest digest = MessageDigest.getInstance("SHA-256");
            long fileSize = file.length();

            // 1. 添加文件大小信息
            digest.update(longToBytes(fileSize));

            // 2. 计算文件前1MB的哈希
            byte[] partialHash = calculatePartialHash(file, PARTIAL_HASH_SIZE);
            digest.update(partialHash);

            // 3. 添加文件中间1MB的哈希（对于大文件）
            if (fileSize > 20 * 1024 * 1024) { // 20MB以上的文件
                long midPoint = fileSize / 2;
                byte[] midHash = calculateHashAtPosition(file, midPoint, 1024 * 1024);
                if (midHash != null) {
                    digest.update(midHash);
                }
            }

            // 4. 添加文件最后1KB的哈希
            if (fileSize > 1024) {
                byte[] endHash = calculateHashAtPosition(file, Math.max(0, fileSize - 1024), 1024);
                if (endHash != null) {
                    digest.update(endHash);
                }
            }

            return bytesToHex(digest.digest());
        } catch (Exception e) {
            return calculatePartialHashHex(file, PARTIAL_HASH_SIZE);
        }
    }

    /**
     * 计算文件指定位置的哈希
     */
    @Nullable
    private byte[] calculateHashAtPosition(@NonNull File file, long position, int size) {
        try (RandomAccessFile raf = new RandomAccessFile(file, "r")) {
            if (position >= raf.length()) return null;

            raf.seek(position);
            byte[] buffer = new byte[Math.min(size, (int) (raf.length() - position))];
            int bytesRead = raf.read(buffer);

            if (bytesRead > 0) {
                MessageDigest digest = MessageDigest.getInstance("SHA-256");
                digest.update(buffer, 0, bytesRead);
                return digest.digest();
            }
        } catch (Exception e) {
            // 忽略错误，返回null
        }
        return null;
    }

    /**
     * 计算完整文件哈希
     */
    @NonNull
    private String calculateFullHash(@NonNull File file) {
        try {
            MessageDigest digest = MessageDigest.getInstance("SHA-256");
            try (FileInputStream fis = new FileInputStream(file)) {
                byte[] buffer = new byte[8192];
                int bytesRead;
                while ((bytesRead = fis.read(buffer)) != -1) {
                    digest.update(buffer, 0, bytesRead);
                }
            }
            return bytesToHex(digest.digest());
        } catch (Exception e) {
            return "";
        }
    }

    /**
     * 计算部分文件哈希（返回字节数组）
     */
    @NonNull
    private byte[] calculatePartialHash(@NonNull File file, int maxBytes) {
        try {
            MessageDigest digest = MessageDigest.getInstance("SHA-256");
            try (FileInputStream fis = new FileInputStream(file)) {
                byte[] buffer = new byte[Math.min(maxBytes, 8192)];
                int totalRead = 0;
                int bytesRead;

                while (totalRead < maxBytes && (bytesRead = fis.read(buffer)) != -1) {
                    int bytesToProcess = Math.min(bytesRead, maxBytes - totalRead);
                    digest.update(buffer, 0, bytesToProcess);
                    totalRead += bytesToProcess;

                    if (totalRead >= maxBytes) break;
                }
            }
            return digest.digest();
        } catch (Exception e) {
            return new byte[0];
        }
    }

    /**
     * 计算部分文件哈希（返回十六进制字符串）
     */
    @NonNull
    private String calculatePartialHashHex(@NonNull File file, int maxBytes) {
        byte[] hash = calculatePartialHash(file, maxBytes);
        return hash.length > 0 ? bytesToHex(hash) : "";
    }

    /**
     * 工具方法：过滤可能有重复的文件组
     */
    @NonNull
    private Map<Long, List<File>> filterPotentialDuplicates(@NonNull Map<Long, List<File>> sizeMap) {
        Map<Long, List<File>> result = new HashMap<>();
        for (Map.Entry<Long, List<File>> entry : sizeMap.entrySet()) {
            if (entry.getValue().size() > 1) {
                result.put(entry.getKey(), entry.getValue());
            }
        }
        return result;
    }

    /**
     * 检查是否为排除的目录
     */
    private boolean isExcludedDirectory(@NonNull File dir) {
        String dirName = dir.getName().toLowerCase();
        return excludedDirectories.stream().anyMatch(dirName::contains);
    }

    /**
     * 检查是否为排除的文件
     */
    private boolean isExcludedFile(@NonNull File file) {
        String fileName = file.getName().toLowerCase();
        // 排除系统文件和临时文件
//        return fileName.startsWith(".") ||
//                fileName.contains("cache") ||
//                fileName.contains("temp") ||
//                fileName.contains("tmp");

        return false;
    }

    /**
     * 工具方法：字节数组转十六进制
     */
    @NonNull
    private String bytesToHex(@NonNull byte[] bytes) {
        StringBuilder hexString = new StringBuilder();
        for (byte b : bytes) {
            String hex = Integer.toHexString(0xff & b);
            if (hex.length() == 1) {
                hexString.append('0');
            }
            hexString.append(hex);
        }
        return hexString.toString();
    }

    /**
     * 工具方法：long转byte数组
     */
    @NonNull
    private byte[] longToBytes(long value) {
        byte[] result = new byte[8];
        for (int i = 7; i >= 0; i--) {
            result[i] = (byte) (value & 0xFF);
            value >>= 8;
        }
        return result;
    }

    // Getter和Setter方法
    public void setMaxDepth(int maxDepth) {
        this.maxDepth = maxDepth;
    }

    public void setUseFullHashForSmallFiles(boolean useFullHashForSmallFiles) {
        this.useFullHashForSmallFiles = useFullHashForSmallFiles;
    }

    public void setExcludedDirectories(@NonNull Set<String> excludedDirectories) {
        this.excludedDirectories = excludedDirectories;
    }

    public void addExcludedDirectory(@NonNull String directory) {
        this.excludedDirectories.add(directory.toLowerCase());
    }
}