package HDFS;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.*;
import org.apache.hadoop.io.IOUtils;

import java.io.ByteArrayInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.net.URI;
import java.util.ArrayList;
import java.util.List;

public class HdfsUtils {
    private static FileSystem fs = null;
    //change it to yourself ip
    private static String uri = "hdfs://192.168.200.200:9000";

    static {
        Configuration conf = new Configuration();
        String usr = "root";
        try {
            fs = FileSystem.get(URI.create(uri), conf, usr);
        } catch (IOException e) {
            e.printStackTrace();
        } catch (InterruptedException e) {
            e.printStackTrace();
        }
    }

    public static boolean mkdir(String completeDir) throws Exception {
        if (isBlank(completeDir)) {
            return false;
        }
        if (!fs.exists(new Path(completeDir))) {
            return fs.mkdirs(new Path(completeDir));
        }

        return false;
    }

    public static boolean deleteDir(String completeDir) throws Exception {
        if (isBlank(completeDir)) {
            return false;
        }
        if (fs.exists(new Path(completeDir))) {
            return fs.delete(new Path(completeDir), true);
        }

        return false;
    }

    public static boolean rename(String oldPath, String newPath) throws Exception {
        if (isBlank(oldPath) || isBlank(newPath)) {
            return false;
        }
        return fs.rename(new Path(oldPath), new Path(newPath));
    }


    public static List<String> listAll(String completeDir) throws Exception {
        if (isBlank(completeDir)) {
            return new ArrayList<String>();
        }
        FileStatus[] stats = fs.listStatus(new Path(completeDir));
        List<String> names = new ArrayList<String>();
        for (int i = 0; i <stats.length; ++i) {
            if (stats[i].isFile()) {
                // regular file
                names.add(stats[i].getPath().toString());
            } else if (stats[i].isDirectory()) {
                // dir
                names.add(stats[i].getPath().toString());
                // recursively get children dir
                names.addAll(listAll(stats[i].getPath().toString()));
            } else if (stats[i].isSymlink()) {
                // is s symlink in linux
                names.add(stats[i].getPath().toString());
            }
        }
        return names;
    }


    public static boolean uploadLocalFile2HDFS(String localFilePath, String hdfsDirPath) throws Exception {
        if (isBlank(localFilePath) || isBlank(hdfsDirPath)) {
            return false;
        }
        Path src = new Path(localFilePath);
        Path dst = new Path(hdfsDirPath);
        fs.copyFromLocalFile(src, dst);
        return true;
    }

    public static boolean downloadHDFS2LocalFile(String hdfsFilePath, String localDirPath) throws Exception {
        if (isBlank(hdfsFilePath) || isBlank(localDirPath)) {
            return false;
        }
        Path src = new Path(hdfsFilePath);
        Path dst = new Path(localDirPath);
        fs.copyToLocalFile(src, dst);
        return true;
    }


    public static boolean createNewHDFSFile(String newFile, String content) throws Exception {
        if (isBlank(newFile) || isBlank(content)) {
            return false;
        }
        FSDataOutputStream os = fs.create(new Path(newFile));
        os.write(content.getBytes("UTF-8"));
        if (os != null) {
            os.close();
        }

        return true;
    }


    public static boolean deleteHDFSFile(String hdfsFile) throws Exception {
        if (isBlank(hdfsFile)) {
            return false;
        }
        Path path = new Path(hdfsFile);
        return fs.delete(path, true);
    }


    public static byte[] readHDFSFile(String hdfsFile) throws Exception {
        if (isBlank(hdfsFile)) {
            return null;
        }
        // check if the file exists
        Path path = new Path(hdfsFile);
        if (fs.exists(path)) {
            FSDataInputStream is = fs.open(path);
            // get the file info to create the buffer
            FileStatus stat = fs.getFileStatus(path);
            // create the buffer
            byte[] buffer = new byte[Integer.parseInt(String.valueOf(stat.getLen()))];
            is.readFully(0, buffer);
            is.close();
            return buffer;
        } else {
            throw new Exception("the file is not found .");
        }
    }


    public static boolean append(String hdfsFile, String content) throws Exception {
        if (isBlank(hdfsFile) || isBlank(content)) {
            return false;
        }
        Configuration conf = new Configuration();
        // solve the problem when appending at single datanode hadoop env
        conf.set("dfs.client.block.write.replace-datanode-on-failure.policy", "NEVER");
        conf.set("dfs.client.block.write.replace-datanode-on-failure.enable", "true");
        FileSystem fs = FileSystem.get(URI.create(hdfsFile), conf, "root");
        // check if the file exists
        Path path = new Path(hdfsFile);
        if (fs.exists(path)) {
            try {
                InputStream in = new ByteArrayInputStream(content.getBytes());
                OutputStream out = fs.append(new Path(hdfsFile));
                IOUtils.copyBytes(in, out, 4096, true);
                out.close();
                in.close();
                fs.close();
            } catch (Exception e) {
                fs.close();
                throw e;
            }
        } else {
            HdfsUtils.createNewHDFSFile(hdfsFile, content);
        }
        return true;
    }

    public static boolean isBlank(String str) {
        boolean result = false;
        if (str == null || "".equals(str.trim())) {
            result = true;
        }
        return result;
    }

    public static String getCompleteDirOfHDFS(String dir) {
        if (isBlank(dir)) {
            return dir + Path.SEPARATOR;
        }
        return uri + dir;
    }
}
