package cn.edu.cug.cs.gtl.plugins.catalog.utils;

import cn.edu.cug.cs.gtl.common.Pair;
import cn.edu.cug.cs.gtl.io.FileDataSplitter;
import cn.edu.cug.cs.gtl.protos.*;
import cn.edu.cug.cs.gtl.utils.DataCatalogManager;
import cn.edu.cug.cs.gtl.utils.DataGroupManager;
import cn.edu.cug.cs.gtl.utils.DataItemManager;
import cn.edu.cug.cs.gtl.utils.impl.DataItemManagerImpl;
import cn.edu.cug.cs.gtl.utils.postgresql.DataCatalogManager4Postgresql;
import cn.edu.cug.cs.gtl.utils.postgresql.DataGroupManager4Postgresql;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.poi.hwpf.HWPFDocument;
import org.apache.poi.hwpf.extractor.WordExtractor;
import org.apache.poi.xwpf.extractor.XWPFWordExtractor;
import org.apache.poi.xwpf.usermodel.XWPFDocument;

import java.io.*;
import java.net.URI;
import java.nio.file.Files;
import java.nio.file.StandardCopyOption;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.List;
import java.util.UUID;

public class HdfsFileUtils {

    static DataGroupManager dataGroupManager = new DataGroupManager4Postgresql();
    static DataCatalogManager dataCatalogManager = new DataCatalogManager4Postgresql();
    static DataItemManager dataItemManager;

    static {
        dataItemManager = new DataItemManagerImpl();
    }

    public static boolean uploadFile(String parentName, String hdfsUri, String localPath, String hdfsPath) throws Exception {
        Configuration conf = new Configuration();
        conf.set("dfs.client.use.datanode.hostname", "true");
        FileSystem fs = FileSystem.get(URI.create(hdfsUri), conf, "hdfs");

        recursiveUpload(parentName, fs, new File(localPath), hdfsPath);
        boolean uploadSuccess = fs.exists(new Path(hdfsPath));
        fs.close();

        return uploadSuccess;
    }

    private static void recursiveUpload(String parentName, FileSystem fs, File localFile, String hdfsPath) throws Exception {
        if (localFile.isDirectory()) {
            // 创建目录
            fs.mkdirs(new Path(hdfsPath));

            File[] files = localFile.listFiles();
            if (files[0].isFile()) {    //dir下面都是文件
                DataGroup dataGroup = DataGroup.newBuilder().setName(localFile.getName()).setAlias(localFile.getName()).setDescription("").build();
                dataCatalogManager.addDataGroupToCatalog(parentName, dataGroup);
            } else {                      //dir下面都是目录
                DataCatalog dataCatalog = DataCatalog.newBuilder().setName(localFile.getName()).setAlias(localFile.getName()).setDescription("").build();
                dataCatalogManager.append(parentName, dataCatalog);
            }
            // 递归上传
            for (File file : files) {
                recursiveUpload(localFile.getName(), fs, file, hdfsPath + "/" + file.getName());
            }
        } else {
            // 上传文件
            fs.copyFromLocalFile(new Path(localFile.getAbsolutePath()), new Path(hdfsPath));
            System.out.println("Uploaded: " + localFile.getName());

            // 保存文件至数据库
            String fileAlias = localFile.getName();
            String extension = fileAlias.substring(fileAlias.lastIndexOf(".")); //截取原始文件名的后缀
            String fileName = UUID.randomUUID().toString() + extension; //构造新文件名称，uuid方式
            FileKey fileKey = FileKey.newBuilder().setUrl(hdfsPath).setFileName(fileName).setFileAlias(fileAlias).build();
            DataKey dataKey = DataKey.newBuilder().setFileKey(fileKey).build();
            DataItem newItem = DataItem.newBuilder().setName(fileName).setAlias(fileAlias).setFormat(extension).setKey(dataKey).build();

            dataItemManager.insertDataItem(newItem);
            dataGroupManager.addDataItemToGroup(parentName, newItem.getName());

        }
    }

    public static void mountFile(FileSystem fs, String parentName, String hdfsPath) throws Exception {
        recursiveMount(fs, parentName, hdfsPath);
        fs.close();
    }

    private static void recursiveMount(FileSystem fs, String parentName, String hdfsPath) throws Exception {
        // 先处理本层目录，如hdfsPath="/data/lake/test/"，先把test目录保存
        String dirName = hdfsPath.substring(hdfsPath.lastIndexOf('/') + 1);
        FileStatus[] fileStatuses = fs.listStatus(new Path(hdfsPath));

        if (fileStatuses[0].isFile()) {    //dir下面都是文件
            DataGroup dataGroup = DataGroup.newBuilder().setName(dirName).setAlias(dirName).setDescription("").build();
            dataCatalogManager.addDataGroupToCatalog(parentName, dataGroup);
        } else {                      //dir下面都是目录
            DataCatalog dataCatalog = DataCatalog.newBuilder().setName(dirName).setAlias(dirName).setDescription("").build();
            dataCatalogManager.append(parentName, dataCatalog);
        }

        for (FileStatus fileStatus : fileStatuses) {
            String childPath = fileStatus.getPath().toString();
            String childName = childPath.substring(childPath.lastIndexOf('/') + 1);

            if (fileStatus.isDirectory()) {
                // 递归处理子目录
                recursiveMount(fs, dirName, childPath);
            } else {
                // 处理文件
                System.out.println("Found file: " + childName);

                // 保存文件至数据库
                String fileAlias = childName;
                String extension = "";    //文件名后缀默认为空
                // 判断文件名是否包含后缀名
                if (fileAlias.contains(".")) {
                    extension = fileAlias.substring(fileAlias.lastIndexOf(".")); //截取原始文件名的后缀
                }
                String fileName = UUID.randomUUID().toString() + extension; // 构造新文件名称，uuid方式
                FileKey fileKey = FileKey.newBuilder()
                        .setUrl(childPath)
                        .setFileName(fileName)
                        .setFileAlias(fileAlias)
                        .build();
                DataKey dataKey = DataKey.newBuilder()
                        .setFileKey(fileKey)
                        .build();
                DataItem newItem = DataItem.newBuilder()
                        .setName(fileName)
                        .setAlias(fileAlias)
                        .setFormat(extension)
                        .setKey(dataKey)
                        .build();

                dataItemManager.insertDataItem(newItem);
                dataGroupManager.addDataItemToGroup(dirName, newItem.getName());
            }
        }
    }

    // 返回0则是目录，返回1则是文件，返回-1则发生错误
    public static int isFileOrDir(FileSystem fs, String hdfsPath) {
        try {
            Path path = new Path(hdfsPath);
            if (fs.exists(path)) {
                FileStatus fileStatus = fs.getFileStatus(path);
                if (fileStatus.isDirectory()) return 0;
                else return 1;
            } else {
                System.err.println("该路径不存在");
                return -1;
            }
        } catch (IOException e) {
            throw new RuntimeException(e);
        }
    }

    // HDFS文件系统中文件夹合法性判断
    public static boolean checkHDFSFolder(FileSystem fs, Path hdfsFolder) throws IOException {
        FileStatus[] fileStatuses = fs.listStatus(hdfsFolder);

        if (fileStatuses == null || fileStatuses.length == 0) {
            return false; // 空文件夹不满足条件
        }

        boolean hasFiles = false;
        boolean hasFolders = false;

        for (FileStatus fileStatus : fileStatuses) {
            if (fileStatus.isDirectory()) {
                hasFolders = true;
                // 递归检查子文件夹
                if (!checkHDFSFolder(fs, fileStatus.getPath())) {
                    return false;
                }
            } else {
                hasFiles = true;
            }
        }

        return !(hasFiles && hasFolders); // 如果同时包含文件和文件夹则不满足条件
    }

    /**
     * 从HDFS中读取文件，并解析为TSV文件格式
     *
     * @param hdfsFilePath HDFS文件路径
     * @return 包含标签和序列值的列表
     * @throws Exception
     */
    public static List<Pair<String, double[]>> readTSVFileFromHDFS(String hdfsFilePath) throws Exception {
        Configuration conf = new Configuration();

        //通过域名访问hdfs，让namenode返回给我们datanode的域名。如果不设置，namenode返回的datanode的ip地址是内网地址，客户端无法访问
        conf.set("dfs.client.use.datanode.hostname", "true");

        Path hdfsPath = new Path(hdfsFilePath);
        FileSystem fs = FileSystem.get(hdfsPath.toUri(), conf, "root");

        try (FSDataInputStream inputStream = fs.open(hdfsPath)) {
            // 将 HDFS 文件内容解析为 TSV 格式并返回
            return readTSVFileFromInputStream(inputStream);
        } finally {
            fs.close();
        }
    }

    /**
     * 从输入流中读取数据，解析为TSV文件格式
     *
     * @param inputStream 输入流
     * @return 包含标签和序列值的列表
     * @throws IOException
     */
    public static List<Pair<String, double[]>> readTSVFileFromInputStream(InputStream inputStream) throws IOException {
        BufferedReader br = new BufferedReader(new InputStreamReader(inputStream));
        String line = br.readLine();
        int length = 0;

        List<Pair<String, double[]>> result = new ArrayList<>();

        while (line != null) {
            // 读取一行，以\t分隔
            String[] columns = line.split("\t");
            length = columns.length - 1;
            double[] values = new double[length];

            // 第一个元素为标签
            String label = columns[0];

            // 接下来的元素为时序值
            for (int i = 1; i < columns.length; ++i) {
                values[i - 1] = Double.parseDouble(columns[i]);
            }

            result.add(new Pair<>(label, values));
            line = br.readLine();
        }

        return result;
    }

    /**
     * 从HDFS中读取文件，并解析为Text文本文件
     *
     * @param hdfsFilePath HDFS文件路径
     * @return 字符串
     * @throws Exception
     */
    public static String readTextFileFromHDFS(String hdfsFilePath) throws Exception {
        Configuration conf = new Configuration();

        //通过域名访问hdfs，让namenode返回给我们datanode的域名。如果不设置，namenode返回的datanode的ip地址是内网地址，客户端无法访问
        conf.set("dfs.client.use.datanode.hostname", "true");

        Path hdfsPath = new Path(hdfsFilePath);
        FileSystem fs = FileSystem.get(hdfsPath.toUri(), conf, "root");

        try (FSDataInputStream inputStream = fs.open(hdfsPath)) {
            // 将 HDFS 文件内容解析为 TSV 格式并返回
            return readTextFileFromInputStream(inputStream);
        } finally {
            fs.close();
        }
    }

    /**
     * 从输入流中读取数据，解析为Text文件格式
     *
     * @param inputStream 输入流
     * @return 字符串
     * @throws IOException
     */
    public static String readTextFileFromInputStream(InputStream inputStream) throws IOException {
        StringBuilder content = new StringBuilder();
        try (BufferedReader br = new BufferedReader(new InputStreamReader(inputStream));) {
            String line;
            while ((line = br.readLine()) != null) {
                content.append(line).append("\n");
            }
        }
        return content.toString();
    }

    /**
     * 从HDFS中读取文件，并解析为DOC文件格式
     * @param hdfsFilePath
     * @return 字符串
     * @throws Exception
     */
    public static String readDocFileFromHDFS(String hdfsFilePath) throws Exception {
        Configuration conf = new Configuration();
        conf.set("dfs.client.use.datanode.hostname", "true");
        Path hdfsPath = new Path(hdfsFilePath);

        try (FileSystem fs = FileSystem.get(hdfsPath.toUri(), conf, "root");
             FSDataInputStream inputStream = fs.open(hdfsPath);
             HWPFDocument document = new HWPFDocument(inputStream);
             WordExtractor extractor = new WordExtractor(document)) {
            return extractor.getText();
        }
    }

    /**
     * 从HDFS中读取文件，并解析为DOCX文件格式
     * @param hdfsFilePath
     * @return 字符串
     * @throws Exception
     */
    public static String readDocxFileFromHDFS(String hdfsFilePath) throws Exception {
        Configuration conf = new Configuration();
        conf.set("dfs.client.use.datanode.hostname", "true");
        Path hdfsPath = new Path(hdfsFilePath);

        try (FileSystem fs = FileSystem.get(hdfsPath.toUri(), conf, "root");
             FSDataInputStream inputStream = fs.open(hdfsPath);
             XWPFDocument document = new XWPFDocument(inputStream);
             XWPFWordExtractor extractor = new XWPFWordExtractor(document)) {
            return extractor.getText();
        }
    }

}

