package com.hdaccp.hadoop.hdfs;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;

import java.net.URI;

public class Demo1 {
    public static void main(String[] args) throws  Exception{
       /* String srcFile=args[0];//必须是本地文件，如：/home/hduser/file/file2.txt
        String dstFile=args[1];//必须是HDFS文件，如：hdfs://node1:9000/input2/file2.txt
        //class:core-site.xml
        Configuration conf = new Configuration();
        FileSystem fs = FileSystem.get(conf);
        Path srcPath=new Path(srcFile);
        Path dstPath=new Path(dstFile);
        fs.copyFromLocalFile(srcPath, dstPath);*/

        Configuration conf = new Configuration();
//		conf.set("fs.defaultFS", "hdfs://192.168.15.9:9000");

        // 1 获取hdfs客户端对象
         FileSystem fs = FileSystem.get(conf);
//        FileSystem fs = FileSystem.get(new URI("hdfs://192.168.15.15:9000"), conf, "root");


        // 2 在hdfs上创建路径
     //   fs.mkdirs(new Path("/user3/t176"));

        fs.copyFromLocalFile(new Path("f:/wc.txt"), new Path("/user/t170"));

        // 3 关闭资源
        fs.close();

        System.out.println("over");


    }


}
