package com.briup.hdfs;

import org.apache.commons.io.IOUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.*;
import org.apache.hadoop.fs.permission.FsAction;
import org.apache.hadoop.fs.permission.FsPermission;
import org.apache.hadoop.hdfs.client.HdfsDataInputStream;
import org.apache.hadoop.hdfs.protocol.DatanodeInfo;
import org.apache.hadoop.hdfs.protocol.LocatedBlock;
import org.apache.hadoop.util.Progressable;

import java.awt.image.BufferedImage;
import java.io.*;
import java.util.Arrays;
import java.util.List;

/**
 * TODO
 *
 * @author 最美如初
 * @version 1.0
 * @date 2021/7/16
 * @description : 操作hdfs文件系统
 *
 */
public class SecondWHdfs {
    //合并本地多个小文件上传到集群中
    public void mergeLocal_to_hdfs()throws Exception{
        //获取本地文件系统对象，如果get方式获取，后面本地路径使用file://C:/xx
        Configuration conf=new Configuration();
        LocalFileSystem lfs = FileSystem.getLocal(conf);
        Path local_path = new Path("src/");
        FileStatus[] fst = lfs.listStatus(local_path);
        //hdfs文件系统对象
        FileSystem fs = getFileSystem();
        Path hdfs_path = new Path("/user/zhang/briup1.txt");
        FSDataOutputStream hdfs_fos = fs.create(hdfs_path);
        for (FileStatus status:fst){
            Path p = status.getPath();
            FSDataInputStream local_fis = lfs.open(p);
            IOUtils.copy(local_fis,hdfs_fos);
            IOUtils.closeQuietly(local_fis);
        }
        IOUtils.closeQuietly(hdfs_fos);
        if (fs!=null)fs.close();
        if (lfs!=null)lfs.close();
    }
    //合并hdfs中的多个小文件，并把合并的内容写入本地文件
    public void mergeHdfs_to_local()throws Exception{
        FileSystem fs = getFileSystem();
        Path p = new Path("/user/zhang");
        FileOutputStream fos = new FileOutputStream("src/", true);
        //获取指定路径的元信息
        FileStatus[] files = fs.listStatus(p);
        for (FileStatus status:files){
            Path path = status.getPath();
            if (fs.isDirectory(path))continue;
            FSDataInputStream fsdis = fs.open(path);
            //将hdfs中路径读取的字节流传给本地文件输出字节流中写入文件
            IOUtils.copy(fsdis,fos);
            //关闭流
            IOUtils.closeQuietly(fsdis);
        }
        if (fos!=null)fos.close();
        if (fs!=null)fs.close();
    }
    public void list_block_hdfs()throws Exception{
        FileSystem fs = getFileSystem();
        HdfsDataInputStream fis = (HdfsDataInputStream) fs.open(new Path("/user/zhang/a.jpg"));
        //HdfsDataInputStream内嵌入有流DFSInputStream
//        InputStream is=fis.getWrappedStream();
//        System.out.println(is.getClass().getName());
        List<LocatedBlock> list=fis.getAllBlocks();
        for(LocatedBlock block:list) {
            System.out.println(block.getBlock().getBlockPoolId());
            System.out.println(block.getBlock().getNumBytes());
            System.out.println(block.getBlock().getBlockName());
            System.out.println(block.getBlock().getBlockId());
            DatanodeInfo[] dif = block.getLocations();
            System.out.println(Arrays.toString(dif));
        }
    }
    public void listFiles_hdfs()throws Exception{
        FileSystem fs = getFileSystem();
        //第二参数表示是否递归显示指定目录下的所有文件
        RemoteIterator<LocatedFileStatus> iter = fs.listFiles(new Path("/user/zhang"), true);
        while(iter.hasNext()){
            LocatedFileStatus file = iter.next();
            System.out.println(file.getPath().getName());
            //获取块的位置信息
            BlockLocation[] bl = file.getBlockLocations();
            for (BlockLocation n:bl){
                System.out.println(Arrays.toString(n.getStorageIds())+"--"+Arrays.toString(n.getHosts()));
            }
            System.out.println("***********");
        }
    }
    public void delete_hdfs()throws Exception{
        FileSystem fs=getFileSystem();
        fs.delete(new Path("/user/zhang/a.jpg"),true);
    }
    public void delete_hdfs1()throws Exception{
        FileSystem fs = getFileSystem();
        fs.delete(new Path("/user/zhang/a.jpg"),false);
    }
    public void mkdir_hdfs()throws Exception{
        FileSystem fs=getFileSystem();
        fs.mkdirs(new Path("/user/zhanggs/input/flowcount_input"));
        fs.mkdirs(new Path("/user/zhanggs/input/wordcount_input"));
    }
    public void mkdir_hdfs1()throws Exception {
        FileSystem fs = getFileSystem();
        FsPermission permission=new FsPermission(FsAction.ALL,FsAction.READ_EXECUTE,FsAction.READ_EXECUTE);
        fs.mkdirs(new Path("/user/zhang/input/flowcount_input"),permission);
        fs.mkdirs(new Path("/user/zhang/input/wordcount_input"),permission);
    }
    //获取指定文件或者目录的元信息
    public void read_status() throws IOException {
        Configuration conf=new Configuration();
        conf.set("fs.defaultFS","hdfs://172.16.0.4:9000");
        FileSystem fs = FileSystem.get(conf);
        FileStatus[] fstatus = fs.listStatus(new Path("a.txt"));
        for (FileStatus st:fstatus) {
            //获取权限
            System.out.println(st.getPermission());
            //获取文件拥有者
            System.out.println(st.getOwner());
            //获取文件所属组
            System.out.println(st.getGroup());
            //获取文件的操作时间
            System.out.println(st.getAccessTime());
            //获取文件的块大小
            System.out.println(st.getBlockSize());
            System.out.println(st.getLen());
            System.out.println(st.getModificationTime());
            System.out.println(st.getReplication());
        }
    }
    public void write_hdfs1() throws IOException {

        FileSystem fs = getFileSystem();
        //FSDataOutputStream fos=fs.create();
        FSDataOutputStream fos = fs.create(new Path("/user/zhang/pa.txt"));
        PrintWriter pw = new PrintWriter(fos);
        pw.println("test......briup");
        pw.flush();
        pw.close();
        fos.close();
    }
    public void write_hdfs() throws IOException {
        Configuration conf=new Configuration();
        conf.set("fs.defaultFS","hdfs://172.16.0.4:9000");
        FileSystem fs = FileSystem.get(conf);

        Path path=new Path("/user/cw/wc.txt");
        FsPermission permission=new FsPermission(FsAction.ALL,FsAction.READ_EXECUTE,FsAction.NONE);
        //构建写的流
        FSDataOutputStream fos = fs.create(path, permission, true, 1024, (short) 1, 1024 * 1024 * 1024, new Progressable() {
            public void progress() {
                System.out.println("...");
            }
        });
        PrintWriter pw=new PrintWriter(fos);
        pw.println("cw");
        pw.println("wc");
        pw.flush();
        //关闭资源
        pw.close();
        fos.close();
        fs.close();
    }
    public void read_hdfs()throws Exception{
        Configuration conf=new Configuration();
        conf.set("fs.defaultFS","hdfs://172.16.0.4:9000");
        FileSystem fs = FileSystem.get(conf);
        //获取读的流
        FSDataInputStream fis = fs.open(new Path("/user/zhang/input/flowcount_input/1.txt"));
        BufferedReader br = new BufferedReader(new InputStreamReader(fis));
        String str="";
        while((str=br.readLine())!=null){
            System.out.println(str);
        }
    }
    public void upload_hdfs()throws Exception{
        Configuration conf=new Configuration();
        conf.set("fs.defaultFS","hdfs://172.16.0.4:9000");
        FileSystem fs = FileSystem.get(conf);

        Path path=new Path("/user/cw/wc.txt");
        //fs.copyFromLocalFile(new Path("D:\\元气壁纸缓存\\img\\FF206863CC0517BCB38D7EF5CE7F65B0.jpg"),new Path("/user/cw/a.jpg"));
        fs.copyFromLocalFile(new Path("D:\\元气壁纸缓存\\img\\5057354AA3A37876D34EEFB6BD2FEC97.jpg"),new Path("/user/zhang/a.jpg"));
        if (fs!=null)fs.close();
    }
    public void upload_hdfs1()throws Exception{
        FileSystem fs = getFileSystem();
        //fs.copyFromLocalFile(new Path("D:\\元气壁纸缓存\\img\\FF206863CC0517BCB38D7EF5CE7F65B0.jpg"),new Path("/user/cw/a.jpg"));
        fs.copyFromLocalFile(false,new Path("C:\\Users\\最美如初\\Desktop\\input\\flowcount_input\\2.txt"),new Path("/user/zhang/input/flowcount_input/1.txt"));
        fs.copyFromLocalFile(false,false,new Path("C:\\Users\\最美如初\\Desktop\\input\\wordcount_input\\1.txt"),new Path("/user/zhang/input/wordcount_input/1.txt"));

    }
    public  FileSystem getFileSystem() throws IOException {
        Configuration conf = new Configuration();
        conf.set("fs.defaultFS", "hdfs://172.16.0.4:9000");
        return FileSystem.get(conf);
    }
    public static void main(String[] args) throws Exception {
       // new SecondWHdfs().write_hdfs();
        //new SecondWHdfs().load_hdfs();
        //new SecondWHdfs().write_hdfs1();
        //new SecondWHdfs().mkdir_hdfs();
        new SecondWHdfs().read_hdfs();
        //new SecondWHdfs().mkdir_hdfs1();
        //new SecondWHdfs().upload_hdfs();
        //new SecondWHdfs().delete_hdfs();
        //new SecondWHdfs().delete_hdfs1();
        //new SecondWHdfs().upload_hdfs1();
        //new SecondWHdfs().listFiles_hdfs();
        //new SecondWHdfs().list_block_hdfs();
    }
}
