package cn.cloud.mqtt2redis.configuration.hdfs;

import org.apache.hadoop.fs.FileSystem;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.context.annotation.Bean;

import java.io.IOException;
import java.net.URI;
import java.util.Properties;

@org.springframework.context.annotation.Configuration
//@EnableConfigurationProperties(HdfsProperties.class)
public class HdfsConfig {
    private Logger logger = LoggerFactory.getLogger(HdfsConfig.class);

    @Bean(name= "hdfs.fs.pool")
    @Autowired
    public FileSystem hdfsClientFactory(@Value("${hdfs.url}") String url,
                                        @Value("${hdfs.timeout}") String timeout,
                                        @Value("${hdfs.path}") String path)
            throws IOException {
        Properties properties = System.getProperties();
        properties.setProperty("HADOOP_USER_NAME", "hdfs");

        org.apache.hadoop.conf.Configuration conf = new org.apache.hadoop.conf.Configuration();
        conf.set("fs.defaultFS", url);

        FileSystem hdfs = FileSystem.newInstance(conf);
        //hdfs=FileSystem.get(conf);
        logger.info("hdfs连接成功！");
        logger.info("hdfs地址：" + url);
        return hdfs;
    }

}
