package top.sxlai.config;

import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.kerby.config.Conf;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;

import javax.annotation.PreDestroy;
import java.io.IOException;

/** 返回hdfs连接
 * @author : sxl
 * @date : 2024/4/11 16:43
 * @Version: 1.0
 */

@Configuration
public class HdfsFileSystem {

    @Value("${hadoop.hdfsUrl}")
    private String hdfsUrl;

    private FileSystem fileSystem;

    @Bean
    public FileSystem getFileSystem() throws IOException {
        org.apache.hadoop.conf.Configuration conf = new org.apache.hadoop.conf.Configuration();
        conf.set("fs.defaultFS", hdfsUrl); // 设置 HDFS 地址
        FileSystem fs;
        try {
            fs = FileSystem.get(conf);
            // 设置当前工作目录为根目录
            fs.setWorkingDirectory(new Path("/"));
        } catch (IOException e) {
            // 处理异常，可以记录日志或者其他逻辑
            throw new RuntimeException("Failed to initialize FileSystem", e);
        }
        return fs;
    }

    @PreDestroy
    public void closeFileSystem() {
        if (fileSystem != null) {
            try {
                fileSystem.close();
            } catch (IOException e) {
                // 处理关闭异常，可以记录日志或者其他逻辑
                e.printStackTrace();
            }

        }
    }
}