package com.atguigu.Utils;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.junit.Test;

import java.io.IOException;
import java.net.URI;
import java.net.URISyntaxException;
public class HdfsCopyFromLocal {
    @Test
    public void testCopyFromLocalFile() throws IOException, InterruptedException, URISyntaxException {

        // 1 获取文件系统
        Configuration configuration = new Configuration();
        configuration.set("dfs.replication", "2");
        FileSystem fs = FileSystem.get(new URI("hdfs://192.168.1.129:9000"), configuration, "zmh");

        // 2 上传文件
        fs.copyFromLocalFile(new Path("D:\\SysProject\\HdfsClientDemo\\src\\main\\java\\com\\atguigu\\txt\\test4.txt"), new Path("/hadoop/test"));

        // 3 关闭资源
        fs.close();

        System.out.println("over");
    }
}
