package com.itcj.Hadoop;

import org.apache.commons.io.IOUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;



import java.io.FileInputStream;

public class HadoopTest01 {

    public static void main(String[] args) throws Exception {
        Configuration conf = new Configuration();
        conf.set("fs.defaultFS", "hdfs://node01:9000");
        System.setProperty("HADOOP_USER_NAME", "root");
        FileSystem fs = FileSystem.get(conf);

        //第二种快捷方式
        // FileSystem fs=FileSystem.get(new URI("hdfs://node01:9000"),conf,"root");

        fs.create(new Path("/hdfsbyjava"),true);

        //直接调用方法下载和上传，以client为本地
        fs.copyToLocalFile(new Path("/wordcount/a.txt"),new Path("D:\\JavaProject\\JavaEE\\idea_workspace\\zookeeper"));
        fs.copyFromLocalFile(new Path("D:\\JavaProject\\JavaEE\\idea_workspace\\zookeeper\\b.txt"),new Path("/wordcount"));
        //使用流的方式进行上传
        FSDataOutputStream fso = fs.create(new Path("/stream/c.txt"), true);
        FileInputStream fis = new FileInputStream("D:\\JavaProject\\JavaEE\\idea_workspace\\zookeeper\\a.txt");
        IOUtils.copy(fis,fso);



        fs.close();

    }



}
