package com.atguigu.mapreduce.hdfs;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;

import java.io.IOException;
import java.net.URI;

/**
 * @author laberinto
 * @version 1.0
 * @date 2024/10/6 18:52
 * @package com.atguigu.mapreduce.hdfs
 * @message \\TODO
 * (⌓‿⌓)
 */
public class hdfsConf {
    public static void main(String[] args) throws IOException {
        System.setProperty("HADOOP_USER_NAME","root");
        Path input=new Path("hdfs://master:9000/user/hadoop/input");
        Path output=new Path("hdfs://master:9000/user/hadoop/output");;
        Configuration configuration=new Configuration();
        configuration.set("fs.defaultFS","hdfs://master:9000");
        //configuration.set("fs.hdfs.impl","org.apache.hadoop.hdfs.DistributedFileSystem");
        FileSystem fileSystem=FileSystem.get(configuration);
        //创建文件夹
       //fileSystem.mkdirs(new Path("/user/ECTest"));
//        //删除文件夹
       // fileSystem.delete(new Path("/user/hadoop/output3"),true);
        //上传文件
       fileSystem.copyFromLocalFile(new Path("F:\\stdout.log"),new Path("/user/hadoop/ECTest2"));
        //System.out.println(fileSystem.getClass().getName());
    }


}
