package edu.zju.gis.dbfg.fileserver.util;


import edu.zju.gis.dbfg.fileserver.entity.ConstructedDataField;
import lombok.extern.slf4j.Slf4j;
import org.json.JSONArray;
import org.json.JSONObject;

import javax.validation.constraints.NotNull;
import java.io.BufferedReader;
import java.io.File;
import java.io.IOException;
import java.io.InputStreamReader;
import java.nio.file.*;
import java.nio.file.attribute.BasicFileAttributes;
import java.util.ArrayList;
import java.util.List;

/**
 * @author yanlo yanlong_lee@qq.com
 * @version 1.0 2018/08/06
 */
@Slf4j
public final class FileUtil {
    /**
     * 判断路径是否存在
     */
    public static boolean exist(String path) {
        return Files.exists(Paths.get(path));
    }

    /**
     * 删除路径（文件或目录）
     */
    public static void deletePath(String... paths) throws IOException {
        if (paths != null) {
            for (String path : paths)
                deletePath(new File(path));
        }
    }

    /**
     * 删除路径（文件或目录）
     */
    public static void deletePath(File... paths) throws IOException {
        if (paths != null) {
            for (File path : paths) {
                if (path.exists()) {
                    if (path.isFile())
                        Files.delete(path.toPath());
                    else {
                        File[] subPaths = path.listFiles();
                        if (subPaths != null)
                            for (File subPath : subPaths)
                                deletePath(subPath);
                        Files.delete(path.toPath());
                    }
                }
            }
        }
    }

    /**
     * 删除同名不同后缀的文件（文件或目录）
     */
    public static void deleteByName(String dirPath, String fileName) throws IOException {
        File dir = new File(dirPath);
        if (dir.exists() && dir.isDirectory()) {
            File[] files = dir.listFiles();
            for (File file : files) {
                String noExtName = file.getName().substring(0, file.getName().lastIndexOf("."));
                if (noExtName.equals(fileName))
                    Files.delete(file.toPath());
            }
        }
    }

    /**
     * 删除HDFS路径（文件或目录）
     */
    public static void deleteHDFSPath(String... paths) throws IOException {
//        if (paths != null) {
//            for (String path : paths) {
//                HDFSHelper hdfsHelper = HDFSHelper.getInstance();
//                hdfsHelper.deletePath(new org.apache.hadoop.fs.Path(path), true);
//            }
//        }
    }

    /**
     * 重命名文件或目录
     */
    public static boolean rename(String src, String dest) {
        return rename(new File(src), new File(dest));
    }

    /**
     * 重命名文件或目录（文件或目录不存在时会自行创建）
     */
    public static boolean rename(File src, File dest) {
        return src.renameTo(dest);
    }

    /**
     * 覆盖式写入文件
     */
    public static void write(String file, Iterable<String> lines) throws IOException {
        write(Paths.get(file), lines);
    }

    /**
     * 覆盖式写入文件
     */
    public static void write(Path path, Iterable<String> lines) throws IOException {
        Files.createDirectories(path.getParent());
        Files.write(path, lines);
    }

    /**
     * 每一条记录作为一行，向文件追加内容（文件或目录不存在时会自行创建）
     */
    public static void append(String file, Iterable<String> lines) throws IOException {
        append(Paths.get(file), lines);
    }

    /**
     * 每一条记录作为一行，向文件追加内容（文件或目录不存在时会自行创建）
     */
    public static void append(Path path, Iterable<String> lines) throws IOException {
        Files.createDirectories(path.getParent());
        if (!Files.exists(path))
            Files.write(path, lines);
        Files.write(path, lines, StandardOpenOption.APPEND);
    }

    /**
     * 默认为HDFS路径，当有file://前缀时，表示本地路径
     *
     * @param size          读取的记录数
     * @param skipFirstLine 是否跳过第一行
     */
    public static List<String> readByLine(String fileOrDir, Integer size, boolean skipFirstLine) throws IOException {
        List<String> lines = new ArrayList<>(size == null ? 10 : size);
        if (fileOrDir.startsWith("file://")) {//结果在本地
            fileOrDir = fileOrDir.substring("file://".length());
            Path dir = Paths.get(fileOrDir);
            if (Files.exists(dir))
                Files.walkFileTree(dir, new SimpleFileVisitor<Path>() {
                    @Override
                    public FileVisitResult preVisitDirectory(Path dir, BasicFileAttributes attrs) {
                        if (dir.getFileName().toString().startsWith("."))
                            return FileVisitResult.SKIP_SUBTREE;
                        else
                            return FileVisitResult.CONTINUE;
                    }

                    @Override
                    public FileVisitResult visitFile(Path file, BasicFileAttributes attrs) throws IOException {
                        if (!file.getFileName().toString().startsWith(".")) {
                            try (BufferedReader reader = Files.newBufferedReader(file)) {
                                String line;
                                if (skipFirstLine)
                                    reader.readLine();
                                while ((line = reader.readLine()) != null) {
                                    lines.add(line);
                                    if (size != null && lines.size() >= size)
                                        return FileVisitResult.TERMINATE;
                                }
                            } catch (IOException e) {
                                log.error("文件`" + file.toString() + "`读取异常", e);
                            }
                        }
                        return FileVisitResult.CONTINUE;
                    }
                });
        } else {
//            HDFSHelper hdfsHelper = HDFSHelper.getInstance();
//            org.apache.hadoop.fs.Path dir = new org.apache.hadoop.fs.Path(fileOrDir);
//            List<org.apache.hadoop.fs.Path> files = getFiles(hdfsHelper, dir);
//            for (org.apache.hadoop.fs.Path file : files) {
//                try (BufferedReader reader = new BufferedReader(new InputStreamReader(hdfsHelper.read(file)))) {
//                    String line;
//                    if (skipFirstLine)
//                        reader.readLine();
//                    while ((line = reader.readLine()) != null)
//                        if (size == null || lines.size() < size)
//                            lines.add(line);
//                }
//                if (size != null && lines.size() >= size)
//                    break;
//            }
        }
        return lines;
    }

    public static JSONArray getDataAsJSONArrayWithHeads(List<String> lines, String[] heads) {
        boolean isCsv = lines.stream().limit(20).noneMatch(s -> s.contains("\t"));
        JSONArray jsons = new JSONArray();
        for (int i = 1; i < lines.size(); i++) {
            String[] values = lines.get(i).split(isCsv ? "," : "\t", -1);
            JSONObject json = new JSONObject();
            for (int f = 0; f < heads.length; f++)
                json.put(heads[f], values[f]);
            jsons.put(json);
        }
        return jsons;
    }

    public static JSONArray getDataAsJSONArray(List<String> lines) {
        boolean isCsv = lines.stream().limit(20).noneMatch(s -> s.contains("\t"));
        int len = lines.get(0).split(isCsv ? "," : "\t", -1).length;
        String[] fields = new String[len];
        for (int i = 0; i < len; i++) {
            fields[i] = "col_" + i;
        }
        JSONArray jsons = new JSONArray();
        for (int i = 1; i < lines.size(); i++) {
            String[] values = lines.get(i).split(isCsv ? "," : "\t", -1);
            JSONObject json = new JSONObject();
            for (int f = 0; f < fields.length; f++)
                json.put(fields[f], values[f]);
            jsons.put(json);
        }
        return jsons;
    }

    public static List<String[]> getDataAsArray(List<String> lines) {
        boolean isCsv = lines.stream().limit(20).noneMatch(s -> s.contains("\t"));
        int len = lines.get(0).split(isCsv ? "," : "\t", -1).length;
        String[] fields = new String[len];
        for (int i = 0; i < len; i++) {
            fields[i] = "col_" + i;
        }
        List<String[]> data = new ArrayList<>();
        data.add(fields);
        lines.forEach(s -> data.add(s.split(isCsv ? "," : "\t", -1)));
        return data;
    }

    public static List<ConstructedDataField> getConstructedFields(String hdfsPath, Boolean hasHead) throws IOException {
        String firstLine = readByLine(hdfsPath, 5, false).get(0);
        boolean isCsv = !firstLine.contains("\t");
        String[] items = firstLine.split(isCsv ? "," : "\t");
        List<ConstructedDataField> fields = new ArrayList<>();
        if (hasHead) {
            for (int i = 0; i < items.length; i++)
                fields.add(new ConstructedDataField(items[i]));
        } else {
            for (int i = 0; i < items.length; i++)
                fields.add(new ConstructedDataField());
        }
        return fields;
    }

//    public static List<org.apache.hadoop.fs.Path> getFiles(HDFSHelper helper, @NotNull org.apache.hadoop.fs.Path...
//            paths) throws IOException {
//        List<org.apache.hadoop.fs.Path> result = new ArrayList<>();
//        for (org.apache.hadoop.fs.Path path : paths) {
//            if (!helper.exists(path))
//                continue;
//            if (helper.isFile(path))
//                result.add(path);
//            else
//                result.addAll(getFiles(helper, helper.listFiles(path)));
//        }
//        return result;
//    }
}
