package com.hadoop;


import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.stereotype.Service;


import java.io.IOException;
import java.net.URISyntaxException;

@Service
public class HadoopClient {


    @Value("${hdfs.path}")
    private String hdfsPath ;

    public void mkdir() throws URISyntaxException, IOException, InterruptedException {

        //添加hadoop用户:只有hadoop的用户菜鸟操作hadoop上的文件:我阿里云hadoop是root用户启动的，所以是root
        System.setProperty("HADOOP_USER_NAME", "root");
        //创建一个配置文件默认加载core-site.xml
        Configuration configuration = new Configuration();
        FileSystem fs=FileSystem.get(configuration);
        Path path = new Path("/wudai");
        if(fs.exists(path)){
            fs.delete(path);
        }
        fs.mkdirs(new Path("/wudai"));
        fs.copyFromLocalFile(true,new Path("E:\\啊啊.txt.ok"),new Path("/wudai"));
        fs.copyFromLocalFile(true,new Path("E:\\啊啊.txt"),new Path("/wudai"));
        fs.close();
    }
}
