package com.woldier.filesystem.config;

import lombok.RequiredArgsConstructor;
import org.apache.hadoop.fs.FileSystem;
import org.springframework.boot.context.properties.EnableConfigurationProperties;
import org.springframework.context.annotation.Bean;

import java.io.IOException;
import java.net.URI;
import java.net.URISyntaxException;

/**
 * @author woldier
 * @version 1.0
 * @description TODO
 * @date 2023/3/23 16:48
 **/
@org.springframework.context.annotation.Configuration
@EnableConfigurationProperties(value = {HadoopConfigProperties.class})
@RequiredArgsConstructor
public class HadoopAutoConfig {
    private final HadoopConfigProperties hadoopConfigProperties;

    @Bean
    public FileSystem fileSystem() {
        FileSystem fileSystem = null;
        try {
            org.apache.hadoop.conf.Configuration configuration = new org.apache.hadoop.conf.Configuration();
            // 这里我启动的是单节点的 Hadoop,所以副本系数设置为 1,默认值为 3
            configuration.set("dfs.replication", hadoopConfigProperties.getReplication());
            fileSystem = FileSystem.get(new URI(hadoopConfigProperties.getUrl()), configuration, hadoopConfigProperties.getUser());
        } catch (IOException | InterruptedException | URISyntaxException e) {
            e.printStackTrace();
        }
        return fileSystem;
    }

    /**
    *
    * description 注入连接池
    *
    *
    * @return com.woldier.filesystem.config.Pool<org.apache.hadoop.fs.FileSystem>
    * @author: woldier
    * @date: 2023/5/16 14:33
    */
    @Bean
    public Pool<FileSystem> pool() {
        int size = hadoopConfigProperties.getPool().getPoolSize();
        return new Pool<>(size, () -> {
            FileSystem[] fileSystems = new FileSystem[size];
            for (int i = 0; i < size; i++) {
                fileSystems[i] = fileSystem();
            }
            return fileSystems;
        });
    }

}
