package com.roin.tools.service;

import java.io.*;
import java.nio.file.Files;
import java.nio.file.StandardCopyOption;
import java.util.*;
import org.apache.commons.codec.digest.DigestUtils;
import me.xdrop.fuzzywuzzy.FuzzySearch;

/**
 * 查找重复文件业务类
 * @author Roin
 */
public class DuplicateFileService {

    /**
     * 通过文件指纹MD5码来查找重复文件
     * @param targetDir 目标目录
     * @param fileTypeLimitList 文件类型限制集合
     * @param excludeDirs 排除子目录名称集合
     * @return
     * @throws IOException
     */
    public List<List<File>> findDuplicateFileByMD5(File targetDir, Set<String> fileTypeLimitList, Set<String> excludeDirs) throws IOException {
        List<List<File>> result = new ArrayList<>();
        List<File> fileList = new ArrayList<>();
        getAllFile(targetDir,fileList,fileTypeLimitList,excludeDirs);

        //Map<Integer, File> fileMap = new HashMap<>(2);
        Map<Integer, String> md5Map = new HashMap<>(2);
        for (File file : fileList) {
            //fileMap.put(file.hashCode(),file);
            String md5Str =  DigestUtils.md5Hex(new FileInputStream(file));
            md5Map.put(file.hashCode(),md5Str);
        }
        System.out.println("正在遍历所有文件...");
        //用来标记哪些文件已经是重复文件
        Set<Integer> hashCodeSet = new HashSet<>();
        for (Iterator<File> iterator = fileList.iterator(); iterator.hasNext();) {
            File srcFile = iterator.next();
            Integer srcHashCode = srcFile.hashCode();
            if(hashCodeSet.contains(srcHashCode)){
                continue;
            }
            //把当前对象从集合中移除
            iterator.remove();
            List<File> tempList = new ArrayList<>();
            tempList.add(srcFile);
            String md5Str =  md5Map.get(srcFile.hashCode());
            for (Iterator<File> iterator_ = fileList.iterator(); iterator_.hasNext();) {
                File tarFile = iterator_.next();
                Integer tarHashCode = tarFile.hashCode();
                String md5Str2 =  md5Map.get(tarHashCode);
                if (!hashCodeSet.contains(tarHashCode) && md5Str.equals(md5Str2)){
                    tempList.add(tarFile);
                    hashCodeSet.add(tarHashCode);
                }
            }
            if (tempList.size()>1){
                result.add(tempList);
                hashCodeSet.add(srcHashCode);
            }
        }
        return result;
    }


    /**
     * 通过文件名来查找重复文件
     * @param targetDir
     * @param similarity
     * @param fileTypeLimitList
     * @param excludeTexts
     * @param excludeDirs
     * @return
     */
    public List<List<File>> findDuplicateFileByName(File targetDir, Set<String> fileTypeLimitList, Set<String> excludeDirs,int similarity,Set<String> excludeTexts ){
        List<List<File>> result = new ArrayList<>();
        List<File> fileList = new ArrayList<>();
        getAllFile(targetDir,fileList,fileTypeLimitList,excludeDirs);
        boolean hasExcText = !(excludeTexts==null || excludeTexts.isEmpty());
        Set<Integer> tempSet = new HashSet<>();
        for (Iterator<File> iterator = fileList.iterator(); iterator.hasNext();) {
            File srcFile = iterator.next();
            //把当前对象从集合中移除
            iterator.remove();
            List<File> tempList = new ArrayList<>();
            tempList.add(srcFile);
            tempSet.add(srcFile.hashCode());
            String srcName = srcFile.getName();
            if (hasExcText){
                for (String excText : excludeTexts) {
                    srcName = srcName.replaceAll(excText,"");
                }
            }
            for (Iterator<File> iterator_ = fileList.iterator(); iterator_.hasNext();) {
                File tarFile = iterator_.next();
                if (tempSet.contains(tarFile.hashCode())){
                    continue;
                }
                String tarName = tarFile.getName();
                if (hasExcText){
                    for (String excText : excludeTexts) {
                        tarName = tarName.replaceAll(excText,"");
                    }
                }
                int simi = FuzzySearch.tokenSetRatio(srcName, tarName);
                //大于指定相似度
                if(simi>=similarity){
                    tempList.add(tarFile);
                    tempSet.add(tarFile.hashCode());
                }
            }
            if (tempList.size()>1){
                result.add(tempList);
            }
        }
        return result;
    }

    /**
     * 迭代遍历获取目录的所有文件
     * @param dir
     * @param fileList
     * @param excludeDirs
     */
    private void getAllFile(File dir, List<File> fileList, Set<String> fileTypeLimitList, Set<String> excludeDirs){
        boolean hasFileTypeLimit = !(fileTypeLimitList==null||fileTypeLimitList.isEmpty());
        if (dir.exists() && dir.isDirectory()){
            File[] subFileList = dir.listFiles();
            System.out.println(String.format("正在获取【%s】目录下的文件，共%s文件和目录...",dir.getPath(),subFileList.length));
            for (File file : subFileList) {
                String name = file.getName();
                if (file.isFile()){
                    if (hasFileTypeLimit){
                        for (String type : fileTypeLimitList) {
                            if (name.endsWith(type)){
                                fileList.add(file);
                            }
                        }
                    }else{
                        fileList.add(file);
                    }
                }else{
                    if(excludeDirs!=null && !excludeDirs.contains(file.getName())) {
                        getAllFile(file, fileList, fileTypeLimitList, excludeDirs);
                    }
                }
            }
        }
    }
}
