import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;

import java.net.URI;

/**
 * @author xyr
 * @date 2021/10/12 8:12
 */
public class DemoOne {
    public static void main(String[] args) {
        //案例一：向hdfs上写文件111


        //windows模拟访问LINUX的路径
        System.setProperty("hadoop.home.dir","D:\\code\\hadoop-2.7.3");
        System.setProperty("HADOOP_USER_NAME","root");
        //捕捉异常并抛出
        try{
            //hdfs的路径，自己的路径，mob连接的ip
            String url ="hdfs://192.168.9.11:9000";
            //加载hdfs的配置
            Configuration conf =new Configuration();
            //与hdfs建立连接
            FileSystem fileSystem =FileSystem.get(URI.create(url),conf);
            //把写的数据转为字节类型
            byte [] bytes ="Hello World 17".getBytes();
            //设置hdfs的路径
            Path dfspath = new Path("/test/17.txt");
            //建立hdfs的路径
            FSDataOutputStream fsDataInputStream= fileSystem.create(dfspath);
            //向hdfs写入数据
            fsDataInputStream.write(bytes);
        }catch (Exception e){
            e.printStackTrace();
        }


    }

}

