package com.niodata.dt.fs;

import com.fasterxml.jackson.databind.ObjectMapper;
import com.google.common.base.Preconditions;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.Base64;
import java.util.List;
import java.util.Map;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.hdfs.DistributedFileSystem;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.log4j.Logger;
import org.apache.logging.log4j.util.Strings;


public class HdfsFileSystemBuilder implements FileSystemBuilder {

  private static final Logger logger = Logger.getLogger(HdfsFileSystemBuilder.class);

  private static ObjectMapper objectMapper = new ObjectMapper();


  String keytabDir = Paths.get(System.getProperty("user.dir"), "keytab").toUri().getRawPath();

  @Override
  public FileSystem createFileSystem(String user, Map<String, String> conf)
        throws IOException {
    String hdfsRealUser = conf.get("hdfs.user");
    Preconditions.checkState(Strings.isNotEmpty(hdfsRealUser), "'hdfs.user'是必填配置!");
    String defaultFs = conf.get("fs.defaultFS");
    Preconditions.checkState(Strings.isNotEmpty(defaultFs), "'fs.defaultFS'是必填配置!");
    Boolean authorization = Boolean.parseBoolean(conf.get("hadoop.security.authorization"));
    Preconditions.checkNotNull(authorization, "'hadoop.security.authorization'是必填配置!");
    String authentication = conf.get("hadoop.security.authentication");
    Preconditions.checkState(Strings.isNotEmpty(authentication),
          "'hadoop.security.authentication'是必填配置!");

    Configuration config = new Configuration();
    config.set("hadoop.security.authorization", authorization.toString());
    config.set("hadoop.security.authentication", authentication);

    Object haConfig = conf.get("hdfs.ha.config");
    String namespace = null;
    boolean ha = false;
    if (haConfig != null) {
      String json = objectMapper.writeValueAsString(haConfig);
      List<Map> haConfigList = objectMapper.readValue(json, objectMapper.getTypeFactory()
              .constructCollectionType(List.class, Map.class));
      if (haConfigList.size() > 0) {
        ha = true;
      }
      for (Map map : haConfigList) {
        String key = map.get("key").toString();
        String value = map.get("value").toString();
        config.set(key, value);
        if ("dfs.nameservices".equals(key)) {
          namespace = value;
          continue;
        }
      }
    }
    if (ha) {
      Preconditions.checkState(Strings.isNotEmpty(namespace),
            "HA模式下'dfs.nameservices'是必填配置!");
      config.set("dfs.client.failover.proxy.provider." + namespace,
            "org.apache.hadoop.hdfs.server.namenode.ha.ConfiguredFailoverProxyProvider");
    }

    config.set("fs.defaultFS", defaultFs);
    config.set("fs.hdfs.impl", DistributedFileSystem.class.getName());

    UserGroupInformation ugi;
    if (authorization.booleanValue() && authentication.equalsIgnoreCase(
          UserGroupInformation.AuthenticationMethod.KERBEROS.name())) {
      String krb5Realm = conf.get("java.security.krb5.realm");
      Preconditions.checkState(Strings.isNotEmpty(krb5Realm),
            "kerberos模式下'java.security.krb5.realm'是必填项!");
      String krb5Kdc = conf.get("java.security.krb5.kdc");
      Preconditions.checkState(Strings.isNotEmpty(krb5Kdc),
            "kerberos模式下'java.security.krb5.kdc'是必填项!");
      String keytabFileBase64 = conf.get("keytab.file");
      Preconditions.checkState(Strings.isNotEmpty(keytabFileBase64),
            "kerberos模式下'keytab.file'是必填项!");

      System.setProperty("java.security.krb5.realm", krb5Realm);
      System.setProperty("java.security.krb5.kdc", krb5Kdc);
      String authTolocal = conf.getOrDefault("hadoop.security.auth_to_local", null);
      if (authTolocal != null) {
        if (!authTolocal.endsWith("DEFAULT")) {
          authTolocal = authTolocal.trim() + "\nDEFAULT";
        }
      }
      config.set("hadoop.security.auth_to_local", authTolocal);
      //"RULE:[2:$1@$0](hadoop@.*MLOGCN.INN)s/.*/hadoop/\n" + "DEFAULT");
      String keytabFilePath = checkOrCreateKeytabFile(keytabFileBase64);
      UserGroupInformation.setConfiguration(config);
      //ugi = UserGroupInformation.loginUserFromKeytabAndReturnUGI(hdfsRealUser, keytabFilePath);
      UserGroupInformation.loginUserFromKeytab(hdfsRealUser, keytabFilePath);
      ugi = UserGroupInformation.getLoginUser();
    } else {
      UserGroupInformation.setConfiguration(config);
      ugi = UserGroupInformation.createRemoteUser(hdfsRealUser);
    }

    FileSystem fileSystem = FileSystem.get(config);
    ProxyHdfsFileSystem fs = new ProxyHdfsFileSystem(user, ugi);
    fs.setFileSystem(fileSystem);
    return fs;
  }

  private String checkOrCreateKeytabFile(String keytabFileBase64) throws IOException {
    String id = MD5Util.encode(keytabFileBase64);
    Path dir = Paths.get(keytabDir);
    Files.createDirectories(dir);

    Path keytabPath = Paths.get(keytabDir, id + ".keytab");
    if (!Files.exists(keytabPath)) {
      //data:application/octet-stream;base64,BQIAAABTA
      byte[] bytes = Base64.getDecoder()
            .decode(keytabFileBase64.substring(keytabFileBase64.indexOf("base64,") + 7));
      Files.write(keytabPath, bytes);
    }
    return keytabPath.toUri().getRawPath();
  }
}
