package com.example.yckjbigdataservice.controller;

import com.example.yckjbigdataservice.utils.HdfsFileScanner;
import lombok.extern.slf4j.Slf4j;
import org.apache.hadoop.fs.*;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestParam;
import org.springframework.web.bind.annotation.RestController;

import java.io.*;
import java.nio.file.Files;
import java.nio.file.Paths;
import java.time.LocalDate;
import java.time.LocalDateTime;
import java.time.format.DateTimeFormatter;
import java.util.*;
import java.util.stream.Stream;

@RequestMapping("/hdfs")
@RestController
@Slf4j
public class HdfsController {

    private final org.apache.hadoop.conf.Configuration confHdfsLoad;

    public HdfsController(@Qualifier("confHdfsLoad") org.apache.hadoop.conf.Configuration confHdfsLoad) {
        this.confHdfsLoad = confHdfsLoad;
    }

    @GetMapping("/dir/ls")
    public String hiveQuery(@RequestParam("path") String path, @RequestParam("localBakPath") String localBakPath) {

        /*UserGroupInformation.setConfiguration(confHdfsLoad);
        UserGroupInformation.loginUserFromKeytab("user@REALM", "/path/to/user.keytab");*/

        int count = 0;
        // 初始化 FileSystem
        /*Configuration conf = new Configuration();
        FileSystem fs = FileSystem.get(new URI(hdfsUri), conf);*/
        try (FileSystem fSystem = FileSystem.get(confHdfsLoad);){
            //FileSystem fs = FileSystem.get(URI.create("hdfs://namenode:port"), conf); // 使用正确的namenode地址和端口

            //listFiles(fSystem, new Path(path)); // 替换为你的HDFS路径

            Set<String> excludeDirs = new HashSet<>();
            excludeDirs.add("_tmp");
            excludeDirs.add(".trash");
            excludeDirs.add("ppx");

            //List<String> suffixFilters = List.of(".orc", ".parquet");
            java.util.List<String> suffixFilters = new ArrayList<>();
            suffixFilters.add(".json");
            //suffixFilters.add()
            int maxDepth = 3;

            HdfsFileScanner scanner = new HdfsFileScanner(fSystem, excludeDirs, suffixFilters, maxDepth);

            List<Path> matchedFiles = scanner.scan(new Path(path));
            for (Path file : matchedFiles) {
                //System.out.println("Matched file: " + file.toString());
                count ++;
                log.info("📍 全路径:        " + file.toString());
                log.info("📄 文件名:        " + file.getName());
                log.info("📁 父路径:        " + file.getParent());
                log.info("📂 上级目录名:     " + file.getParent().getName());
                log.info("🌐 协议:          " + file.toUri().getScheme());
                log.info("🖧 主机+端口:      " + file.toUri().getAuthority());
                log.info("🔗 文件路径部分:   " + file.toUri().getPath());

               // Path path = new Path(hdfsFile);

                // 1. 文件是否存在
                if (!fSystem.exists(file)) {
                    throw new FileNotFoundException("文件不存在: " + file);
                }

                // 2. 备份：/file.txt → /file.txt.bak
                Path backupPath = new Path(file + ".bak");
                if (!fSystem.exists(backupPath)) {
                    // 如果两个文件系统不一样就用下面这个方式
                    FileUtil.copy(fSystem, file, fSystem, backupPath, false, confHdfsLoad);
                    log.info("✅ 已创建备份: " + backupPath);
                } else {
                    log.info("⚠️ 备份已存在，跳过: " + backupPath);
                }

                // 3. 下载 HDFS 文件到本地
                /*File localFile = File.createTempFile("hdfs_tmp_", ".conf");
                fSystem.copyToLocalFile(false, file, new Path(localFile.getAbsolutePath()));
                System.out.println("📥 下载完成: " + localFile.getAbsolutePath());*/
                java.nio.file.Path localPath = Paths.get(localBakPath , file.getName());
                fSystem.copyToLocalFile(false, file, new Path(localPath.toAbsolutePath().toString()));
                log.info("📥 下载完成: " + localPath.toFile().getAbsolutePath());

                String oldStr1 = "hdfs://CN02:25000";
                String oldStr2 = "hdfs://CN03:25000";

                // 判断是否存在关键信息
                if(fileContainsNone(localPath.toFile().getAbsolutePath(), oldStr1, oldStr2)) {
                    log.info("文件 file = " +file+"不存在单节点的信息");
                }


                // 4. 替换内容（文件中替换字符串）
                /*File replacedFile = new File(localFile.getAbsolutePath() + ".replaced");
                try (
                        BufferedReader reader = new BufferedReader(new FileReader(localFile));
                        BufferedWriter writer = new BufferedWriter(new FileWriter(replacedFile))
                ) {
                    String line;
                    while ((line = reader.readLine()) != null) {
                        writer.write(line.replace(target, replacement));
                        writer.newLine();
                    }
                }
                System.out.println("🔧 替换完成，写入新文件: " + replacedFile.getAbsolutePath());*/
                //String command = "hdfs dfs -ls " + hdfsPath;
                // String command = "hdfs dfs -ls /user/hdfs | awk '{print $8}'";
                // String command = "hdfs dfs -ls -R /user/hdfs | awk '{print $8}'";

                // 构造命令（注意转义）
                /*String command = String.join(" && ",
                        String.format("hdfs dfs -get -f %s %s", hdfsFilePath, tmpLocal),
                        String.format("sed 's|%s|%s|g' %s > %s", escapeShell(target), escapeShell(replacement), tmpLocal, tmpReplace),
                        String.format("hdfs dfs -cp %s %s.bak", hdfsFilePath, hdfsFilePath),
                        String.format("hdfs dfs -put -f %s %s", tmpReplace, hdfsFilePath)
                );*/

                /*ProcessBuilder builder = new ProcessBuilder("bash", "-c", command);
                builder.redirectErrorStream(true); // 合并 stdout/stderr
                Process process = builder.start();
                try (BufferedReader reader = new BufferedReader(
                        new InputStreamReader(process.getInputStream()))) {
                    String line;
                    System.out.println("📄 HDFS 文件列表：");
                    while ((line = reader.readLine()) != null) {
                        //System.out.println(line); // Java 对每一行进行 split("\\s+") 处理，提取
                        log.info(line); // 打印每行的执行结果
                    }
                }

                int exitCode = process.waitFor();
                if (exitCode != 0) {
                    throw new RuntimeException("❌ 命令执行失败，退出码：" + exitCode);
                }*/
                //String command = "cp \"$file\" \"${file}.bak.$(date +%Y%m%d_%H%M%S)\" && sed -i \"s|old_string|new_string|g\" \"$file\"";
                //String command = String.format("sed 's|%s|%s|g' %s > %s", escapeShell(target), escapeShell(replacement), tmpLocal, tmpReplace);
                // 构造完整命令字符串（带备份日期）
                //String command = "sed -i.bak_$(date '+%Y-%m-%d') 's|" + escapeShell(oldStr) + "|" + escapeShell(newStr) + "|g' " + file;
                String newStr1 = "hdfs://hacluster";


                // 获取当前日期（与 sed 命令中的 date +%Y-%m-%d 一致）
                //String dateStr = LocalDate.now().format(DateTimeFormatter.ofPattern("yyyy-MM-dd"));
                String dateStr = LocalDateTime.now().format(DateTimeFormatter.ofPattern("yyyy-MM-dd_HH_mm_ss"));
                /*String cmd = "sed -i.bak_" + LocalDate.now().format(DateTimeFormatter.ofPattern("yyyy-MM-dd")) +
                        " 's|old_string|new_string|g' " + file;*/
                String command = "sed -i.bak_$(date '+%Y-%m-%d_%H_%M_%S') " +
                        "-e 's|" + escapeShell(oldStr1) + "|" + escapeShell(newStr1) + "|g' " +
                        "-e 's|" + escapeShell(oldStr2) + "|" + escapeShell(newStr1) + "|g' " +
                        localPath.toAbsolutePath();

                log.info("📌 执行命令: " + command);
                executeCommand(command);

                // 获取备份文件名

                // 计算备份文件路径
                String backupFile = localPath.toAbsolutePath() + ".bak_" + dateStr;
                System.out.println("✅ sed 备份文件路径为: " + backupFile);

                // 5. 覆盖写回 HDFS 文件
                try (FSDataOutputStream out = fSystem.create(file, true);
                     BufferedReader in = new BufferedReader(new FileReader(localPath.toFile()))) {
                    String line;
                    int index = 0;
                    while ((line = in.readLine()) != null) {
                        if(index == 0){
                            out.writeBytes(line);
                        }else {
                            out.writeBytes("\n"+line);
                        }
                        index ++;
                    }
                }
                System.out.println("✅ 替换后文件已覆盖上传到 HDFS: " + file);

                // 清理
                //localFile.delete();
                //replacedFile.delete();


            }
            log.info("总共处理{}条数据",count);
            //scanner.close();

        } catch (Exception e) {
            throw new RuntimeException(e);
        }
        return "ok";
    }

    public static boolean fileContains(String filePath, String keyword) throws IOException {
        try (Stream<String> lines = Files.lines(Paths.get(filePath))) {
            return lines.anyMatch(line -> line.contains(keyword));
        }
    }

    // boolean result = fileContainsAny("/tmp/app.conf", "zk1:2181", "hdfs://cn03");
    public static boolean fileContainsAny(String filePath, String... keywords) throws IOException {
        try (Stream<String> lines = Files.lines(Paths.get(filePath))) {
            return lines.anyMatch(line ->
                    Stream.of(keywords).anyMatch(line::contains)
            );
        }
    }

    // 如果文件中不包含任何关键词，则返回 true
    public static boolean fileContainsNone(String filePath, String... keywords) throws IOException {
        try (Stream<String> lines = Files.lines(Paths.get(filePath))) {
            return lines.noneMatch(line ->
                    Stream.of(keywords).anyMatch(line::contains)
            );
        }
    }

    private static String escapeShell1(String input) {
        // 简单转义 |，可根据需要增强
        return input.replace("'", "'\"'\"'");
    }

    private static void executeCommand(String cmd) throws Exception {
        ProcessBuilder pb = new ProcessBuilder("bash", "-c", cmd);
        pb.redirectErrorStream(true);
        Process process = pb.start();

        try (BufferedReader reader = new BufferedReader(new InputStreamReader(process.getInputStream()))) {
            String line;
            while ((line = reader.readLine()) != null) {
                System.out.println("🟢 " + line);
            }
        }

        int code = process.waitFor();
        if (code != 0) {
            throw new RuntimeException("❌ Shell command failed with exit code: " + code);
        }
    }

    /*private static String escapeShell(String text) {
        return text.replace("'", "'\"'\"'");  // 适用于 sed 中 shell 参数转义
    }*/

    public static void listFiles(FileSystem fs, Path path) throws Exception {
        FileStatus[] fileStatuses = fs.listStatus(path);
        for (FileStatus fileStatus : fileStatuses) {
            log.info("路径："+fileStatus.getPath().toString() + " " + (fileStatus.isDirectory() ? "[D]" : "[F]"));
            if (fileStatus.isDirectory()) {
                listFiles(fs, fileStatus.getPath()); // 递归列出目录内容
            }
        }
    }

    /**
     * 替换 HDFS 文件中的指定字符串，并备份原文件
     */
    public static void replaceInFile(FileSystem fs, Path filePath, String target, String replacement) throws IOException {
        if (!fs.exists(filePath)) {
            throw new FileNotFoundException("File not found: " + filePath);
        }

        // 备份原文件
        Path backupPath = new Path(filePath.toString() + ".bak");
        if (!fs.exists(backupPath)) {
            FileUtil.copy(fs, filePath, fs, backupPath, false, fs.getConf());
            System.out.println("Backup created: " + backupPath);
        } else {
            System.out.println("Backup already exists: " + backupPath);
        }

        // 读取原文件内容
        StringBuilder contentBuilder = new StringBuilder();
        try (BufferedReader reader = new BufferedReader(new InputStreamReader(fs.open(filePath)))) {
            String line;
            while ((line = reader.readLine()) != null) {
                contentBuilder.append(line.replace(target, replacement)).append("\n");
            }
        }

        // 写入替换后的内容
        try (FSDataOutputStream output = fs.create(filePath, true)) {
            output.writeBytes(contentBuilder.toString());
        }

        System.out.println("Replace done for file: " + filePath);
    }


    /*
    java -cp your.jar HdfsSedReplace \
    /user/hdfs/data.txt \
    "original_string" \
    "new_string" \
    hdfs://namenode:8020
     */
    public static void main(String[] args) throws IOException, InterruptedException {
        if (args.length < 4) {
            System.err.println("Usage: HdfsSedReplace <hdfsPath> <target> <replacement> <hdfsUri>");
            System.exit(1);
        }

        String hdfsPath = args[0];
        String target = args[1];
        String replacement = args[2];
        String hdfsUri = args[3];

        // 1. 生成临时本地路径
        String tmpFile = "/tmp/" + UUID.randomUUID() + ".tmp";
        String tmpReplacedFile = tmpFile + ".replaced";

        // 2. 下载 HDFS 文件到本地
        runCommand(String.format("hdfs dfs -fs %s -get %s %s", hdfsUri, hdfsPath, tmpFile));

        // 3. 使用 sed 替换
        String sedCmd = String.format("sed 's/%s/%s/g' %s > %s", escapeShell(target), escapeShell(replacement), tmpFile, tmpReplacedFile);
        runCommand(sedCmd);

        // 4. 备份 HDFS 原文件
        String backupPath = hdfsPath + ".bak";
        runCommand(String.format("hdfs dfs -fs %s -cp %s %s", hdfsUri, hdfsPath, backupPath));

        // 5. 上传替换后的文件到 HDFS 覆盖原文件
        runCommand(String.format("hdfs dfs -fs %s -put -f %s %s", hdfsUri, tmpReplacedFile, hdfsPath));

        // 6. 清理本地临时文件
        /*Files.delete(Path.of(tmpFile));
        Files.delete(Path.of(tmpReplacedFile));*/

        System.out.println("✔ 替换完成，已覆盖原文件并备份为：" + backupPath);
    }

    private static void runCommand(String cmd) throws IOException, InterruptedException {
        System.out.println("[执行] " + cmd);
        Process process = new ProcessBuilder("bash", "-c", cmd)
                .redirectErrorStream(true)
                .start();
        try (BufferedReader reader = new BufferedReader(new InputStreamReader(process.getInputStream()))) {
            String line; while ((line = reader.readLine()) != null) System.out.println(line);
        }
        int exitCode = process.waitFor();
        if (exitCode != 0) {
            throw new RuntimeException("命令执行失败：" + cmd);
        }
    }

    private static String escapeShell(String text) {
        return text.replace("'", "'\"'\"'"); // escape for single quote
    }
}
