package com.yjhh.chdataservice.component;

import lombok.extern.log4j.Log4j;
import lombok.extern.log4j.Log4j2;
import org.apache.commons.io.IOUtils;
import org.apache.commons.lang.StringUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.stereotype.Component;

import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.net.URI;

/**
 * HDFS管理类
 */
@Component
@Log4j2
public class HDFSManager {
    
    @Value("${hadoop.hdfs.addr}")
    private String hdfsAddr;
    
    private FileSystem fileSystem;
    
    public void initFileSystem() {
        try {
            fileSystem = FileSystem.get(URI.create(hdfsAddr), new Configuration(), "hadoop");
        } catch (IOException e) {
            log.error("initFileSystem", e);
        } catch (InterruptedException e) {
            log.error("initFileSystem", e);
        }
    }
    
    /**
     * 通过流的方式上传文件到hdfs
     * @param parentPath
     * @param fileName
     * @param fileInputStream
     * @return
     * @throws IOException
     */
    public boolean uplaodFile(String parentPath, String fileName, InputStream fileInputStream) throws IOException {
        if (fileSystem == null
                || StringUtils.isBlank(parentPath)
                || StringUtils.isBlank(fileName)
                || fileInputStream == null) {
            return false;
        }
        
        FSDataOutputStream outputStream = fileSystem.create(new Path(parentPath + File.separator + fileName), true);
        IOUtils.copy(fileInputStream, outputStream);
        return true;
    }
    
}
