package com.yk.mr.util;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;

import java.io.IOException;

public class HdfsFileIO {

    private static final HdfsFileIO hdfsFileIO=new HdfsFileIO();
    /**
     * 文件上传
     * @param filePath
     * @param dst
     * @param conf
     * @return
     */
    public boolean putToHDFS(String filePath, String dst, Configuration conf) {
        Path dstPath = new Path(dst);
        try {
            FileSystem hdfs = dstPath.getFileSystem(conf);
            hdfs.copyFromLocalFile(false,new Path(filePath),dstPath);
        } catch (IOException e) {
            e.printStackTrace();
        }
        return true;
    }

    /**
     * 文件下载
     * @param filePath
     * @param dst
     * @param conf
     * @return
     */
    public boolean getFromHDFS(String filePath, String dst, Configuration conf) {
        Path dstPath = new Path(dst);
        try {
            FileSystem hdfs =dstPath.getFileSystem(conf);
            hdfs.copyToLocalFile(false,new Path(filePath),dstPath);
        } catch (IOException e) {
            e.printStackTrace();
            return false;
        }
        return true;
    }

    /**
     * 文件删除
     * @param path
     * @param conf
     * @return
     */
    public boolean checkAndDel(String path, Configuration conf) {
        Path dstPath = new Path(path);
        try {
            FileSystem hdfs = dstPath.getFileSystem(conf);
            if (hdfs.exists(dstPath)){
                hdfs.delete(dstPath,true);
            }else{
                return false;
            }

        } catch (IOException e) {
            e.printStackTrace();
            return false;
        }
        return true;
    }

    public static HdfsFileIO getHdfsFileIO() {
        return hdfsFileIO;
    }
}
