#include <iostream>
#include <fstream>
#include <vector>
#include <string>
#include <queue>
#include <unordered_map>
#include <unordered_set>
#include <thread>
#include <mutex>
#include <chrono>
#include <algorithm>
#include <sstream>
#include <iomanip>
#include <functional>
#include <cstring>
#include <ctime>
#include <memory>
#include <atomic>

#ifdef _WIN32
#include <windows.h>
#include <direct.h>
#include <io.h>
#include <sys/stat.h>
#else
#include <dirent.h>
#include <sys/stat.h>
#include <unistd.h>
#endif

using namespace std;
using namespace std::chrono;

// 文件信息结构体 - 使用指针优化
struct FileInfo
{
    string *path; // 使用指针避免字符串拷贝
    string *filename;
    size_t size;
    time_t lastWriteTime;
    time_t lastAccessTime;
    unique_ptr<vector<unsigned char>> firstBytes; // 使用智能指针管理动态数组
    unique_ptr<string> contentHash;               // 使用智能指针

    FileInfo(string *p, string *fn, size_t s, time_t lwt, time_t lat)
        : path(p), filename(fn), size(s), lastWriteTime(lwt), lastAccessTime(lat)
    {
    }

    // 移动构造函数
    FileInfo(FileInfo &&other) noexcept
        : path(other.path), filename(other.filename), size(other.size),
          lastWriteTime(other.lastWriteTime), lastAccessTime(other.lastAccessTime),
          firstBytes(move(other.firstBytes)), contentHash(move(other.contentHash))
    {
        other.path = nullptr;
        other.filename = nullptr;
    }

    ~FileInfo()
    {
        delete path;
        delete filename;
    }

    // 禁用拷贝
    FileInfo(const FileInfo &) = delete;
    FileInfo &operator=(const FileInfo &) = delete;
};

// 优化的内存池类
class StringPool
{
private:
    vector<unique_ptr<string>> pool;
    mutex poolMutex;

public:
    string *allocate(const string &str)
    {
        lock_guard<mutex> lock(poolMutex);
        pool.push_back(make_unique<string>(str));
        return pool.back().get();
    }

    void clear()
    {
        lock_guard<mutex> lock(poolMutex);
        pool.clear();
    }
};

// 函数指针类型定义
typedef size_t (*HashFunction)(const string &);
typedef unique_ptr<vector<unsigned char>> (*ReadBytesFunction)(const string &, size_t);

// 文件系统操作类 - 使用函数指针优化
class Filesystem
{
public:
    struct FileEntry
    {
        unique_ptr<string> path; // 使用智能指针
        unique_ptr<string> name;
        bool isDirectory;
        size_t size;
        time_t modTime;
        time_t accessTime;
        FileEntry() {}

        FileEntry(FileEntry &&other) noexcept
            : path(move(other.path)), name(move(other.name)),
              isDirectory(other.isDirectory), size(other.size),
              modTime(other.modTime), accessTime(other.accessTime) {}
    };

    static bool exists(const string &path)
    {
#ifdef _WIN32
        DWORD attr = GetFileAttributesA(path.c_str());
        return attr != INVALID_FILE_ATTRIBUTES;
#else
        struct stat st;
        return stat(path.c_str(), &st) == 0;
#endif
    }

    static bool isDirectory(const string &path)
    {
#ifdef _WIN32
        DWORD attr = GetFileAttributesA(path.c_str());
        return (attr != INVALID_FILE_ATTRIBUTES) && (attr & FILE_ATTRIBUTE_DIRECTORY);
#else
        struct stat st;
        if (stat(path.c_str(), &st) == 0)
        {
            return S_ISDIR(st.st_mode);
        }
        return false;
#endif
    }

    static vector<FileEntry> listDirectory(const string &path)
    {
        vector<FileEntry> entries;

#ifdef _WIN32
        WIN32_FIND_DATAA findData;
        string searchPath = path + "\\*";
        HANDLE hFind = FindFirstFileA(searchPath.c_str(), &findData);
        if (hFind != INVALID_HANDLE_VALUE)
        {
            do
            {
                string name = findData.cFileName;
                if (name != "." && name != "..")
                {
                    struct FileEntry entry;
                    entry.path = make_unique<string>(path + "\\" + name);
                    entry.name = make_unique<string>(name);
                    entry.isDirectory = (findData.dwFileAttributes & FILE_ATTRIBUTE_DIRECTORY) != 0;
                    entry.size = (size_t)findData.nFileSizeLow;

                    FILETIME ft = findData.ftLastWriteTime;
                    SYSTEMTIME st;
                    FileTimeToSystemTime(&ft, &st);
                    struct tm tm_time = {};
                    tm_time.tm_year = st.wYear - 1900;
                    tm_time.tm_mon = st.wMonth - 1;
                    tm_time.tm_mday = st.wDay;
                    tm_time.tm_hour = st.wHour;
                    tm_time.tm_min = st.wMinute;
                    tm_time.tm_sec = st.wSecond;
                    entry.modTime = mktime(&tm_time);
                    entry.accessTime = entry.modTime;
                    entries.push_back(move(entry));
                }
            } while (FindNextFileA(hFind, &findData));
            FindClose(hFind);
        }
#else
        DIR *dir = opendir(path.c_str());
        if (dir)
        {
            struct dirent *entry;
            while ((entry = readdir(dir)) != nullptr)
            {
                string name = entry->d_name;
                if (name != "." && name != "..")
                {
                    struct FileEntry fileEntry;
                    fileEntry.path = make_unique<string>(path + "/" + name);
                    fileEntry.name = make_unique<string>(name);

                    struct stat st;
                    if (stat(fileEntry.path->c_str(), &st) == 0)
                    {
                        fileEntry.isDirectory = S_ISDIR(st.st_mode);
                        fileEntry.size = st.st_size;
                        fileEntry.modTime = st.st_mtime;
                        fileEntry.accessTime = st.st_atime;
                        entries.push_back(move(fileEntry));
                    }
                }
            }
            closedir(dir);
        }
#endif
        return entries;
    }
};

// 日志类
class Logger
{
private:
    ofstream logFile;
    mutex logMutex;

public:
    Logger(const string &filename) : logFile(filename, ios::app)
    {
        if (!logFile.is_open())
        {
            throw runtime_error("无法打开日志文件:" + filename);
        }
    }

    ~Logger()
    {
        if (logFile.is_open())
        {
            logFile.close();
        }
    }

    void log(const string &message)
    {
        lock_guard<mutex> lock(logMutex);
        auto now = system_clock::now();
        auto time_t = system_clock::to_time_t(now);
        auto ms = duration_cast<milliseconds>(now.time_since_epoch()) % 1000;
        logFile << "[" << put_time(localtime(&time_t), "%Y-%m-%d %H:%M%S");
        logFile << "." << setfill('0') << setw(3) << ms.count() << "] ";
        logFile << message << endl;
        logFile.flush();
    }
};

// 优化的哈希计算器
class HashCalculator
{
private:
    // 函数指针成员
    HashFunction hashFunc;

public:
    HashCalculator() : hashFunc([](const string &str) -> size_t
                                {
        hash<string> hasher;
        return hasher(str); }) {}

    // 设置不同的哈希函数
    void setHashFunction(HashFunction func)
    {
        hashFunc = func;
    }

    unique_ptr<string> calculateHash(const string &content)
    {
        size_t hashValue = hashFunc(content);
        auto result = make_unique<string>();
        stringstream ss;
        ss << hex << hashValue;
        *result = ss.str();
        return result;
    }
};

// 优化的重复文件查找器类
class DuplicateFileFinder
{
private:
    Logger &logger;
    string targetDirectory;
    mutex fileMutex;
    vector<FileInfo> allFiles;
    StringPool stringPool;

    // 函数指针成员
    ReadBytesFunction readBytesFunc;
    HashCalculator hashCalculator;

    atomic<size_t> filesProcessed{0};
    atomic<size_t> totalFiles{0};

    // 将时间转换为字符串
    string timeToString(time_t time)
    {
        char buffer[100];
        struct tm *timeinfo = localtime(&time);
        strftime(buffer, sizeof(buffer), "%Y-%m-%d %H:%M:%S", timeinfo);
        return string(buffer);
    }

    // 优化的文件内容哈希计算 - 使用流式读取避免大内存分配
    unique_ptr<string> calculateContentHash(const string &filePath)
    {
        try
        {
            ifstream file(filePath, ios::binary);
            if (!file.is_open())
            {
                logger.log("无法打开文件计算哈希:" + filePath);
                return make_unique<string>();
            }

            const size_t bufferSize = 8192; // 8KB缓冲区
            vector<char> buffer(bufferSize);
            stringstream content;

            while (file.read(buffer.data(), bufferSize))
            {
                content.write(buffer.data(), file.gcount());
            }
            content.write(buffer.data(), file.gcount());

            return hashCalculator.calculateHash(content.str());
        }
        catch (const exception &e)
        {
            logger.log("计算内容哈希时出错:" + string(e.what()) + "文件:" + filePath);
            return make_unique<string>();
        }
    }

    // 优化的字节读取函数
    static unique_ptr<vector<unsigned char>> readFirstBytes(const string &filePath, size_t maxBytes = 100)
    {
        auto bytes = make_unique<vector<unsigned char>>();
        try
        {
            ifstream file(filePath, ios::binary);
            if (!file.is_open())
            {
                return bytes;
            }

            bytes->resize(maxBytes);
            file.read(reinterpret_cast<char *>(bytes->data()), maxBytes);
            bytes->resize(file.gcount());
        }
        catch (const exception &e)
        {
            // 错误处理在调用处进行
        }
        return bytes;
    }

    // 处理单个文件 - 使用指针避免拷贝
    void processFile(Filesystem::FileEntry &&entry)
    {
        try
        {
            if (!entry.isDirectory)
            {
                // 使用字符串池分配内存
                string *pathPtr = stringPool.allocate(*entry.path);
                string *namePtr = stringPool.allocate(*entry.name);

                FileInfo fileInfo(pathPtr, namePtr, entry.size,
                                  entry.modTime, entry.accessTime);

                {
                    lock_guard<mutex> lock(fileMutex);
                    allFiles.push_back(move(fileInfo));
                }

                // 原子操作更新计数
                size_t processed = ++filesProcessed;
                if (processed % 100 == 0)
                {
                    logger.log("已处理 " + to_string(processed) + " / " +
                               to_string(totalFiles) + " 个文件");
                }
            }
        }
        catch (const exception &e)
        {
            logger.log("处理文件时出错:" + string(e.what()) + "文件:" + *entry.path);
        }
    }

    // 使用函数指针进行文件比较
    static bool compareFileInfo(const FileInfo *a, const FileInfo *b)
    {
        if (a->size != b->size)
            return a->size < b->size;
        if (a->lastWriteTime != b->lastWriteTime)
            return a->lastWriteTime < b->lastWriteTime;
        return *a->path < *b->path;
    }

public:
    DuplicateFileFinder(Logger &log, const string &directory)
        : logger(log), targetDirectory(directory),
          readBytesFunc(readFirstBytes) {}

    // 广度优先遍历目录 - 优化版本
    void traverseDirectoryBFS()
    {
        logger.log("开始广度优先遍历目录:" + targetDirectory);

        // 预统计文件总数
        totalFiles = countTotalFiles();
        logger.log("预计总文件数:" + to_string(totalFiles));

        queue<string> dirQueue;
        dirQueue.push(targetDirectory);

        const size_t maxThreads = thread::hardware_concurrency();
        vector<thread> threads;
        atomic<bool> stop{false};

        // 工作线程函数
        auto workerFunc = [this, &stop, &dirQueue]()
        {
            while (!stop)
            {
                string currentDir;
                {
                    static mutex queueMutex;
                    lock_guard<mutex> lock(queueMutex);
                    if (dirQueue.empty())
                    {
                        this_thread::sleep_for(milliseconds(10));
                        if (dirQueue.empty())
                            break;
                        continue;
                    }
                    currentDir = move(dirQueue.front());
                    dirQueue.pop();
                }

                try
                {
                    vector<Filesystem::FileEntry> entries =
                        Filesystem::listDirectory(currentDir);

                    for (auto &entry : entries)
                    {
                        if (entry.isDirectory)
                        {
                            static mutex queueMutex;
                            lock_guard<mutex> lock(queueMutex);
                            dirQueue.push(move(*entry.path));
                        }
                        else
                        {
                            processFile(move(entry));
                        }
                    }
                }
                catch (const exception &e)
                {
                    logger.log("遍历目录时出错:" + string(e.what()) +
                               "目录:" + currentDir);
                }
            }
        };

        // 启动工作线程
        for (size_t i = 0; i < maxThreads; ++i)
        {
            threads.push_back(thread(workerFunc));
        }

        // 等待所有线程完成
        for (auto &t : threads)
        {
            if (t.joinable())
            {
                t.join();
            }
        }

        stop = true;

        logger.log("目录遍历完成, 共找到" + to_string(allFiles.size()) + " 个文件");
    }

    // 统计总文件数
    size_t countTotalFiles()
    {
        size_t count = 0;
        queue<string> dirQueue;
        dirQueue.push(targetDirectory);

        while (!dirQueue.empty())
        {
            string currentDir = move(dirQueue.front());
            dirQueue.pop();

            try
            {
                auto entries = Filesystem::listDirectory(currentDir);
                for (auto &entry : entries)
                {
                    if (entry.isDirectory)
                    {
                        dirQueue.push(move(*entry.path));
                    }
                    else
                    {
                        count++;
                    }
                }
            }
            catch (...)
            {
                // 忽略统计时的错误
            }
        }
        return count;
    }

    // 优化的查找重复文件方法
    void findDuplicates(const string &outputFile)
    {
        logger.log("开始查找重复文件");

        // 使用指针向量避免数据拷贝
        vector<FileInfo *> filePointers;
        filePointers.reserve(allFiles.size());
        for (auto &file : allFiles)
        {
            filePointers.push_back(&file);
        }

        // 第一步:按大小分组 - 使用多重指针
        unordered_map<size_t, vector<FileInfo *>> sizeGroups;
        for (auto *filePtr : filePointers)
        {
            sizeGroups[filePtr->size].push_back(filePtr);
        }

        logger.log("按文件大小分组完成, 共" + to_string(sizeGroups.size()) + "个大小组");

        // 第二步:使用函数指针读取前100字节并比较
        vector<vector<FileInfo *>> potentialDuplicates;

        for (auto &pair : sizeGroups)
        {
            vector<FileInfo *> &files = pair.second;
            if (files.size() > 1)
            {
                // 使用函数指针读取前100字节
                vector<thread> readThreads;
                for (auto *file : files)
                {
                    readThreads.push_back(thread([this, file]()
                                                 { file->firstBytes = readBytesFunc(*file->path, 100); }));
                }
                for (auto &t : readThreads)
                {
                    if (t.joinable())
                        t.join();
                }

                // 按前100字节分组
                unordered_map<string, vector<FileInfo *>> byteGroups;
                for (auto *file : files)
                {
                    if (file->firstBytes)
                    {
                        string bytesStr(file->firstBytes->begin(),
                                        file->firstBytes->end());
                        byteGroups[bytesStr].push_back(file);
                    }
                }

                for (auto &bytePair : byteGroups)
                {
                    vector<FileInfo *> &byteFiles = bytePair.second;
                    if (byteFiles.size() > 1)
                    {
                        potentialDuplicates.push_back(byteFiles);
                    }
                }
            }
        }

        logger.log("前100字节比较完成, 找到" + to_string(potentialDuplicates.size()) + "个潜在重复组");

        // 第三步:计算文件内容哈希值
        ofstream output(outputFile);
        if (!output.is_open())
        {
            throw runtime_error("无法打开输出文件:" + outputFile);
        }

        atomic<int> duplicateGroupCount{0};

        // 并行处理每个潜在重复组
        vector<thread> groupThreads;
        mutex outputMutex;

        for (auto &group : potentialDuplicates)
        {
            groupThreads.push_back(thread([&, this]()
                                          {
                // 并行计算哈希值
                vector<thread> hashThreads;
                for (auto* file : group) {
                    hashThreads.push_back(thread([this, file]() {
                        file->contentHash = calculateContentHash(*file->path);
                    }));
                }
                for (auto& t : hashThreads) {
                    if (t.joinable()) t.join();
                }

                // 按哈希值分组
                unordered_map<string, vector<FileInfo*>> hashGroups;
                for (auto* file : group) {
                    if (file->contentHash && !file->contentHash->empty()) {
                        hashGroups[*file->contentHash].push_back(file);
                    }
                }

                // 输出重复文件
                for (auto& hashPair : hashGroups) {
                    vector<FileInfo*>& hashFiles = hashPair.second;
                    if (hashFiles.size() > 1) {
                        int groupNum = ++duplicateGroupCount;
                        
                        lock_guard<mutex> lock(outputMutex);
                        output << "=== 重复文件组" << groupNum 
                               << "(内容哈希:" << hashPair.first << ")===" << endl;
                        for (auto* file : hashFiles) {
                            output << "路径:" << *file->path << endl;
                            output << "文件名:" << *file->filename << endl;
                            output << "大小:" << file->size << "字节" << endl;
                            output << "修改时间:" << timeToString(file->lastWriteTime) << endl;
                            output << "访问时间:" << timeToString(file->lastAccessTime) << endl;
                            output << "---" << endl;

                            logger.log("发现重复文件:" + *file->path + 
                                      "(大小:" + to_string(file->size) + " 字节)");
                        }
                        output << endl;
                    }
                } }));
        }

        for (auto &t : groupThreads)
        {
            if (t.joinable())
                t.join();
        }

        logger.log("重复文件查找完成, 共找到 " + to_string(duplicateGroupCount) + " 组重复文件");
        output.close();

        // 清理字符串池
        stringPool.clear();
    }
};

int main(int argc, char *argv[])
{
    try
    {
        string targetDirectory;
        if (argc > 1)
        {
            targetDirectory = argv[1];
        }
        else
        {
            cout << "请输入要扫描的目录路径:";
            getline(cin, targetDirectory);
        }

        if (targetDirectory.empty() || !Filesystem::exists(targetDirectory))
        {
            cerr << "目录不存在:" << targetDirectory << endl;
            return 1;
        }

        // 创建日志文件
        Logger logger("duplicate_finder_log.txt");
        auto startTime = system_clock::now();
        auto startTimeT = system_clock::to_time_t(startTime);

        logger.log("程序开始运行");
        logger.log("扫描目录:" + targetDirectory);
        logger.log("开始时间:" + string(ctime(&startTimeT)));

        cout << "开始扫描目录:" << targetDirectory << endl;
        cout << "日志文件:duplicate_finder_log.txt" << endl;
        cout << "结果文件:duplicate_files_result.txt" << endl;

        // 创建查找器并执行
        DuplicateFileFinder finder(logger, targetDirectory);

        cout << "正在遍历目录...." << endl;
        finder.traverseDirectoryBFS();

        cout << "正在查找重复文件..." << endl;
        finder.findDuplicates("duplicate_files_result.txt");

        auto endTime = system_clock::now();
        auto endTimeT = system_clock::to_time_t(endTime);
        auto duration = duration_cast<seconds>(endTime - startTime);

        logger.log("程序结束运行");
        logger.log("结束时间:" + string(ctime(&endTimeT)));
        logger.log("运行时间:" + to_string(duration.count()) + "秒");

        cout << "扫描完成!" << endl;
        cout << "运行时间:" << duration.count() << "秒" << endl;
        cout << "请查看 duplicate_files_result.txt 了解重复文件详情" << endl;
        cout << "请查看 duplicate_finder_log.txt 了解运行日志" << endl;
    }
    catch (const exception &e)
    {
        cerr << "程序运行出错:" << e.what() << endl;
        return 1;
    }
    return 0;
}
