package com.cjb.cloudhdfs.config;

import com.cjb.common.util.Common;
import lombok.extern.slf4j.Slf4j;
import org.apache.hadoop.fs.FileSystem;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.context.annotation.Bean;
import org.apache.hadoop.conf.Configuration;
import java.io.IOException;
import java.net.URI;
import java.net.URISyntaxException;

/**
 * @Author chenjinbin
 * @Package IntelliJ IDEA
 * @Class HadoopConfig
 * @Date 2023-10-20 下午4:56
 */
@Slf4j
@org.springframework.context.annotation.Configuration
public class HadoopConfig {
    @Value("${hadoop.fs.defaultFS}")
    private String defaultFS;

    @Value("${hadoop.binSrc}")
    private String hadoopHome;

    @Value("${hadoop.hdfsSite}")
    private String hdfsSite;

    @Value("${hadoop.coreSite}")
    private String coreSite;
    @Bean
    public FileSystem fileSystemInit() throws IOException, URISyntaxException, InterruptedException {
        String hadoop = System.getenv("HADOOP_HOME");
        if (Common.isNotEmpty(hadoop)) System.setProperty("hadoop.home.dir", hadoop);
        else System.setProperty("hadoop.home.dir", hadoopHome);
        Configuration conf = new Configuration();
        System.setProperty("HADOOP_USER_NAME", "root");

        conf.setBoolean("dfs.suppd", true);
        conf.set("dfs.replication", "1");
        conf.addResource(hdfsSite);
        conf.addResource(coreSite);
        URI uri=new URI(defaultFS);
        conf.set("fs.defaultFS", defaultFS);
        FileSystem fileSystem = FileSystem.get(uri,conf,"hatelys");
        log.info("fileSystem创建成功");
        return fileSystem;
    }

}
