package com.acmedcare.framework.exchange.center.core.resolve;

import com.acmedcare.framework.exchange.center.entity.dto.Hdfs;
import com.acmedcare.framework.kits.StringUtils;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import lombok.extern.slf4j.Slf4j;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FileSystem;

@Slf4j
public class HdfsReslove implements IPathResolve {

  private final String uri;
  private String name;
  private final String rootPath;
  private final Hdfs hdfs;

  public HdfsReslove(String uri, String name, String rootPath,
      Hdfs hdfs) {
    this.name = name;
    this.uri = uri;
    this.rootPath = rootPath;
    this.hdfs = hdfs;
  }

  public HdfsReslove(String uri, String rootPath,
      Hdfs hdfs) {
    this.uri = uri;
    this.rootPath = rootPath;
    this.hdfs = hdfs;
  }

  @Override
  public String resolve() throws Exception {

    String fileName = name;
    if (StringUtils.isEmpty(fileName)) {

      fileName = uri.substring(uri.lastIndexOf(File.separator));
    }

    Path targetPath = Paths
        .get(rootPath + File.separator + fileName);

    //不存在文件夹时直接创建文件夹
    if (!Files.exists(Paths.get(rootPath))) {
      Files.createDirectories(Paths.get(rootPath));
    }

    downloadFromHdfs(uri, targetPath);

    return targetPath.toString();
  }

  private void downloadFromHdfs(String orgUri, Path targetUri) throws Exception {

    if (Files.exists(targetUri)) {

      Files.delete(targetUri);
    }

    log.info("download path" + hdfs.getUri());
    log.info("org url path" + orgUri);
    if (hdfs == null || StringUtils.isEmpty(hdfs.getUser()) || StringUtils
        .isEmpty(hdfs.getUri())) {

      throw new Exception("hdfs 配置文件不存在,无法解析正确的结构,文件路径:" + orgUri);
    }

    Configuration config = new Configuration();
    config.set("fs.defaultFS", hdfs.getUri(), hdfs.getUser());
    try (FileSystem fileSystem = FileSystem.get(config)) {
      org.apache.hadoop.fs.Path path = new org.apache.hadoop.fs.Path(orgUri);
      if (!fileSystem.exists(path)) {

        throw new Exception("hdfs 文件不存在,请确认是否存在文件,文件地址:" + orgUri);
      }

      FSDataInputStream inputStream = fileSystem.open(path);
      Files.copy(inputStream, targetUri);
    }
  }
}