package cn.hfuu;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.*;
import org.apache.hadoop.io.IOUtils;
import org.apache.hadoop.yarn.webapp.hamlet2.Hamlet;

import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.net.URI;
import java.net.URISyntaxException;
import java.net.URL;

/**
 * @author oldliu
 * @since 1.0
 * <p>
 * <p>
 * export LANG=C.UTF-8
 */
public class CreateHdfs {
    static {
        URL.setURLStreamHandlerFactory(new FsUrlStreamHandlerFactory());
    }

    //java Hello 1 2 3 4
    public static void main(String[] args) throws Exception {
        if (args.length == 0) {
            System.out.println("please use CreateHdfs 1 or 2 ");
            return;
        }
        switch (args[0]) {
            case "1":
                createFile();
                break;
            case "2":
                showFolder();
                break;
            case "3":
                listFileInfor();
                break;
            case "4":
                fileRead();
                break;
            default:
                System.out.println("没有这个功能");
                break;
        }
        //listFileInfor();
        // createFile();
        // showFolder();

        //fileRead();
    }

    public static void showFolder() throws IOException, URISyntaxException, InterruptedException {
        Configuration conf = new Configuration();
        conf.set("fs.defaultFS", "hdfs://hadoopmaster:9000");
        //?System.setProperty("HADOOP_USER_NAME", "root");
        FileSystem fs = FileSystem.get(new URI("hdfs://hadoopmaster:9000"), conf, "root");

        // 1.新建目录
        Path path = new Path("/heda/hadoop/hello");
        if (!fs.exists(path)) {
            fs.mkdirs(path);
        } else {
            System.out.println("路径已存在");
        }
    }

    public static void createFile() throws IOException {
        String url = "hdfs://hadoopmaster:9000";
        Configuration conf = new Configuration();
//指定使用HDFS访问
        conf.set("fs.defaultFS", url);
//进行客户端身份的设置(root为虚拟机的用户名，hadoop集群节点的其中一个都可以)
        System.setProperty("HADOOP_USER_NAME", "root");
// 通过FiLeSystem的静态get()方法获取HDFS文件系统客户端对象
        FileSystem fs = FileSystem.get(conf);
        System.out.println(fs.getClass() + "是具体的类");
        System.out.println(fs);

        boolean laoliu = fs.mkdirs(new Path("/laoliu3"));
        System.out.println("laoliu:" + laoliu);
        fs.close();
    }

    public static void listFileInfor() throws Exception {

        String url = "hdfs://hadoopmaster:9000/heda2022";
        Configuration cfg = new Configuration();
        //cfg.set("fs.hdfs.impl", "org.apache.hadoop.hdfs.DistributedFileSystem");
        cfg.set("fs.defaultFS", url);
        FileSystem fs = FileSystem.get(URI.create(url), cfg);
        Path paths[] = new Path[1];
        paths[0] = new Path(url);
        FileStatus[] ft = fs.listStatus(paths);
        Path[] p = FileUtil.stat2Paths(ft);
        System.out.println("p=" + p.length);
        for (Path t1 : p) {
            System.out.println(t1);
        }
    }

    public static void fileRead() throws IOException {
        String url = "hdfs://hadoopmaster:9000";
        Configuration conf = new Configuration();
//指定使用HDFS访问
        conf.set("fs.defaultFS", url);
        //>>conf.set("dfs.client.use.datanode.hostname", "true");
// 通过FiLeSystem的静态get()方法获取HDFS文件系统客户端对象
        FileSystem fs = FileSystem.get(conf);
        FSDataInputStream in = fs.open(new Path("/heda2022/pg74482.txt"));
        //字节流====》字符流
        BufferedReader br = new BufferedReader(new InputStreamReader(in));
        String str = null;
        int line = 0;
        while ((str = br.readLine()) != null) {
            System.out.println("第" + (++line) + "行:" + str);
        }
//        int ch = 0;
//        while ((ch = in.read()) != -1) {
//            System.out.print((char) ch);
//        }

        in.close();
        fs.close();
//        String file = "hdfs://192.168.100.240:9000/logs/hadoop-root-historyserver-master.out";
//        InputStream in = null;
//        try {
//            in = new URL(file).openStream();
//            IOUtils.copyBytes(in, System.out, 4096, false);
//
//        } catch (Exception e) {
//            // TODO Auto-generated catch block
//            e.printStackTrace();
//        } finally {
//            IOUtils.closeStream(in);
//        }
    }
}
