import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import java.net.URI;

public class DemoOne {

    public static void main(String[] args) {

        //案例1：向hdfs上传文件
        //windows模拟访问Linux的路径
        System.setProperty("hadoop.home.dir","D:\\code2\\hadoop-2.7.3");
        System.setProperty("HADOOP_USER_NAME","root");

        //捕捉异常，并抛出
        try {
            //hdfs的路径，请写自己的路径，mob连接的ip地址
            String url = "hdfs://192.168.75.11:9000";
            //加载hdfs的配置
            Configuration conf = new Configuration();
            //与hdfs建立连接
            FileSystem fileSystem = FileSystem.get(URI.create(url), conf);
            //把我们写的数据 转换成字节类型
            byte[] bytes = "Hello World 17".getBytes();
            //设置hdfs的路径
            Path dfsPath = new Path("/test/17.txt");
            //创建hdfs的路径
            FSDataOutputStream fsDataOutputStream = fileSystem.create(dfsPath);
            //向hdfs写入数据
            fsDataOutputStream.write(bytes);

        }catch (Exception e){
            e.printStackTrace();
        }

    }
}
