package com.mdyy.util;


import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.*;
import org.apache.hadoop.io.IOUtils;
import org.apache.hadoop.util.Progressable;

import java.io.*;

public class HDFSUtil {


       // 上传
    /*public synchronized static void uploadFile(InputStream in, String fileName)
            throws IOException {

        System.setProperty("HADOOP_USER_NAME", "root");
        Configuration conf = new Configuration();
        conf.set("dfs.blocksize","128m");
        conf.set("dfs.replication","2");
        conf.set("fs.defaultFS","hdfs://node-1:9000");

        FileSystem hdfs = FileSystem.get(conf);
        // 设置要上传文件的路径
        final Path uploadPath = new Path("/data/log/" + fileName);
        // 进行文件上传
        FSDataOutputStream out = hdfs.create(uploadPath);
        // 开始进行文件上传
        IOUtils.copyBytes(in, out, conf);
        // 关闭
        IOUtils.closeStream(in);
        IOUtils.closeStream(out);
    }*/


    public synchronized static void uploadFile( InputStream in,Long size, String fileName)
            throws IOException {

        System.setProperty("HADOOP_USER_NAME", "root");
        Configuration conf = new Configuration();
        conf.set("dfs.blocksize","128m");
        conf.set("dfs.replication","2");
        conf.set("fs.defaultFS","hdfs://node-1:9000");

        FileSystem hdfs = FileSystem.get(conf);
        final float fileSize = size/65536;
        // 设置要上传文件的路径
        final Path uploadPath = new Path("/data/log/" + fileName);
        // 进行文件上传
        FSDataOutputStream out = hdfs.create(uploadPath, new Progressable() {
            long fileCount = 0;
            @Override
            public void progress() {
                fileCount ++;
               // System.out.println("总进度：" + (fileCount/fileSize)*100 + " %");
            }
        });

        // 开始进行文件上传
        IOUtils.copyBytes(in, out, conf);
        // 关闭
        IOUtils.closeStream(in);
        IOUtils.closeStream(out);
    }

    // 下载
    public static void downLoadFile(String fileName, OutputStream out) throws IOException {
        System.setProperty("HADOOP_USER_NAME", "root");
        Configuration conf = new Configuration();
        conf.set("dfs.blocksize","128m");
        conf.set("dfs.replication","2");
        conf.set("fs.defaultFS","hdfs://node-1:9000");

        FileSystem hdfs = FileSystem.get(conf);
        // 定位要访问的文件
        Path remoteFile = new Path("/data/log/" + fileName);
        // 与定位文件连接输入流管道
        FSDataInputStream in = hdfs.open(remoteFile);
        // 进行文件下载
        IOUtils.copyBytes(in, out, conf);
        // 关闭流
        IOUtils.closeStream(in);
        IOUtils.closeStream(out);
    }

    public static void deleteFile(String fileName) throws IOException {
        System.setProperty("HADOOP_USER_NAME", "root");
        Configuration conf = new Configuration();
        conf.set("dfs.blocksize","128m");
        conf.set("dfs.replication","2");
        conf.set("fs.defaultFS","hdfs://node-1:9000");

        FileSystem hdfs = FileSystem.get(conf);
        Path remoteFile = new Path("/data/log/" + fileName);
        hdfs.delete(remoteFile,false);



    }
    // 文件列表展示
    public static FileStatus[] listFile() throws IOException {
        System.setProperty("HADOOP_USER_NAME", "root");
        Configuration conf = new Configuration();
        conf.set("dfs.blocksize","128m");
        conf.set("dfs.replication","2");
        conf.set("fs.defaultFS","hdfs://node-1:9000");

//        conf.set("fs.defaultFS","hdfs://node-1:9000");
        FileSystem hdfs = FileSystem.get(conf);
        // 查看hdfs中某个文件目录
        Path remotePath = new Path("/data/log/");
        // 获取目录下的文件列表
        FileStatus[] files = hdfs.listStatus(remotePath);

        return files;
    }

    // 删除
    public static void deleteFile() throws IOException {
        System.setProperty("HADOOP_USER_NAME", "root");
        Configuration conf = new Configuration();
        conf.set("dfs.blocksize","128m");
        conf.set("dfs.replication","2");
        conf.set("fs.defaultFS","hdfs://node-1:9000");

        FileSystem hdfs = FileSystem.get(conf);
    }
}
