package com.gsy.hadoop;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;

import java.io.IOException;
import java.net.URI;
import java.net.URISyntaxException;

public class HdfsDemo {
    /**
     * 创建目录
     * @param args
     */
    public static void main(String[] args) {
//        1.连接HDFS
        System.setProperty("hadoop.home.dir","D:\\hadoop\\hadoop-2.6.0-cdh5.9.3");
        String hdfsStr = "hdfs://47.93.212.121";
        Configuration configuration = new Configuration();
        configuration.set("fs.defaultFS",hdfsStr);
        configuration.set("dfs.replication","1");
//        configuration.set();
//        configuration.set("fs.defaultFS","hdfs://101.201.125.14");
//        configuration.set("fs.defaultFS","hdfs://192.168.44.128:9000");
//        configuration.set("fs.defaultFS","hdfs://localhost:9000");
        configuration.set("dfs.client.use.datanode.hostname","true");
        FileSystem fileSystem = null;
        try {
            fileSystem = FileSystem.get(new URI(hdfsStr),configuration,"root");

//            fileSystem.mkdirs(new Path("/buaa"));
//            RemoteIterator remoteIterator = fileSystem.listFiles(new Path("/"), true);
//            while (remoteIterator.hasNext()){
//                System.out.println(remoteIterator.next().toString());
//            }
            fileSystem.copyFromLocalFile(new Path("D:\\test\\example-input.txt"),new Path("/buaa/"));
//            fileSystem.copyFromLocalFile(new Path("d:/test/moss"),new Path("/buaa") );

        } catch (IOException | URISyntaxException e) {
            e.printStackTrace();
        } catch (InterruptedException e) {
            e.printStackTrace();
        } finally {
            try {
                if (fileSystem != null){
                    fileSystem.close();
                }
            } catch (IOException e) {
            }

        }

//        2.创建文件或目录

//        3.关闭资源
    }
}
