package com.niodata.dp.plugin.core;

import com.google.common.base.Strings;
import com.niodata.dp.api.common.DpTaskApi;
import com.niodata.dt.fs.DpRelativeLocalFileSystem;
import com.niodata.dt.fs.RelLocalFileSystemBuilder;
import java.io.File;
import java.io.FileInputStream;
import java.io.FilenameFilter;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.lang.reflect.ParameterizedType;
import java.net.MalformedURLException;
import java.net.URL;
import java.util.HashMap;
import java.util.Map;
import java.util.Properties;
import java.util.function.BiConsumer;
import org.apache.commons.lang3.exception.ExceptionUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.log4j.Logger;


public class PluginContextFactoryAdapter {

  private static Logger logger = Logger.getLogger(PluginContextFactoryAdapter.class);
  private static PluginContext pluginContext;

  /**
   * get or create context.
   *
   * @param pluginClass pluginclass
   * @param <T> plugin class
   * @return context
   * @throws IOException e
   */
  public static synchronized <T> PluginContext getOrCreateContext(
        Class<? extends AbstractDpTaskPlugin> pluginClass) throws IOException {
    if (pluginContext != null) {
      return pluginContext;
    }
    if (isLocalDebug()) {
      logger.info("local debug model, create LocalContext!");
      pluginContext = createLocalContext(pluginClass);
    } else {
      logger.info("formal running mode, create DefaultContext!");
      pluginContext = createRemoteContext(pluginClass);
    }
    return pluginContext;
  }

  /**
   * create remote context in formal environment.
   */
  public static PluginContext createRemoteContext(Class<? extends AbstractDpTaskPlugin> pluginClass)
        throws IOException {
    Properties runtime = new Properties();
    FileInputStream input = new FileInputStream("runtime.properties");
    runtime.load(input);
    input.close();
    String rpcServer = runtime.getProperty("api.server");
    String rpcPort = runtime.getProperty("api.server.port");
    DpTaskApi dpApi = new DpTaskApi(rpcServer, Integer.parseInt(rpcPort));
    DefaultPluginContext context = null;
    Class clazz = null;

    try {
      if ("spark".equals(runtime.getProperty("job.type"))) {
        clazz = Class.forName("com.niodata.dp.plugin.core.DefaultSparkPluginContext");
      } else {
        clazz = Class.forName("com.niodata.dp.plugin.core.DefaultPluginContext");
      }
      context = (DefaultPluginContext) clazz.getConstructor(DpTaskApi.class).newInstance(dpApi);
    } catch (Exception e) {
      throw new RuntimeException(e);
    }

    fillRuntimeArgs(context);


    Map<String, Object> confMap = null;
    if (Strings.isNullOrEmpty(context.getConfigId())) {
      confMap = dpApi
            .getJobConfig(context.getTaskName(), context.getJobName(), context.getFlowExecId());
    } else {
      confMap = dpApi.getJobConfig(context.getConfigId());
    }
    logger.info(confMap);
    context.init(confMap);
    context.principal = runtime.get("principal").toString();
    setKeyTabFile(context, context.getJobName());


    //set up tmp work filesystem
    String flowExecTmp = runtime.getProperty("az.executions.dir") + "/" + context.flowExecId + "/workTmp";
    File file = new File(flowExecTmp);
    file.mkdirs();
    RelLocalFileSystemBuilder builder = new RelLocalFileSystemBuilder();
    Map<String, String> map = new HashMap<>();
    map.put(DpRelativeLocalFileSystem.FS_DP_REL_LOCAL_ROOTDIR, flowExecTmp);
    FileSystem fs = builder.createFileSystem(context.principal, map);
    context.tmpFileSystem = fs;
    context.workDir = file;

    System.setProperty("hive.grantor", "hadoop");
    System.setProperty("principal", context.principal);

    PluginService pluginService = new PluginServiceImpl(dpApi, context);
    context.pluginService = pluginService;
    try {
      Class<? extends Input> configClazz = (Class<? extends Input>)
            ((ParameterizedType) pluginClass
                  .getGenericSuperclass()).getActualTypeArguments()[0];
      confMap = replaceReferenceJobOutparams(context, confMap, dpApi);
      context.config = PluginConfigParser.parseConfig(configClazz, confMap);
    } catch (Exception e) {
      throw new RuntimeException("parse job config error", e);
    }
    context.hadoopHome = System.getenv("HADOOP_HOME");
    if (context.hadoopHome == null) {
      context.hadoopHome = System.getProperty("HADOOP_HOME");
    }
    boolean hadoopEnable = Boolean.parseBoolean(runtime.getProperty("hadoop.enabled", "true"));
    if (hadoopEnable) {
      if (context.hadoopHome == null) {
        logger.warn("HADOOP_HOME not set");
        context.hadoopHome = "/usr/local/dp/hadoop";
      } else {
        logger.info("HADOOP_HOME checked:" + context.hadoopHome);
      }
      Configuration config = new Configuration();
      //config.set("hadoop.rpc.protection", "authentication");
      try {
        if (System.getenv("KRB5_CONF") != null) {
          System.setProperty("java.security.krb5.conf", System.getenv("KRB5_CONF"));
        }
        if (System.getProperty("java.security.krb5.conf") == null) {
          logger.debug("env KRB5_CONF not found,use default /etc/krb5.conf");
          System.setProperty("java.security.krb5.conf", "/etc/krb5.conf");
        }
        config.addResource(
              new File(context.getHadoopHome() + "/etc/hadoop/core-site.xml").toURI().toURL());
        config.addResource(
              new File(context.getHadoopHome() + "/etc/hadoop/hdfs-site.xml").toURI().toURL());
        config.set("fs.hdfs.impl",
              org.apache.hadoop.hdfs.DistributedFileSystem.class.getName()
        );
        config.set("fs.file.impl",
              org.apache.hadoop.fs.LocalFileSystem.class.getName()
        );
        context.kerberosEnabled = "kerberos"
              .equals(config.get("hadoop.security.authentication", "simple"));
      } catch (MalformedURLException e) {
        e.printStackTrace();
      }
      context.hdfsConfiguration = config;
    }
    dpApi.saveJobRuntimeConfig(context.getTaskName(), context.getJobName(),
          context.getFlowExecId(), confMap);
    return context;
  }

  /**
   * create local context in debug environment.
   */
  public static PluginContext createLocalContext(Class<? extends AbstractDpTaskPlugin> pluginClass)
        throws IOException {

    DefaultPluginContext context = null;
    Properties confMap = new Properties();
    InputStream inputStream = null;
    try {
      inputStream = PluginContextFactoryAdapter.class.getClassLoader()
            .getResourceAsStream("local-debug.properties");
      InputStreamReader reader = new InputStreamReader(inputStream, "UTF-8");
      confMap.load(reader);
    } catch (Exception e) {
      logger.error("read local-debug.properties failed! " + ExceptionUtils.getStackTrace(e));
    } finally {
      if (inputStream != null) {
        inputStream.close();
      }
    }
    logger.info(confMap);

    Class clazz = null;
    try {
      if ("spark".equals(confMap.getProperty("job.type"))) {
        clazz = Class.forName("com.niodata.dp.plugin.core.DefaultLocalSparkPluginContext");
      } else {
        clazz = Class.forName("com.niodata.dp.plugin.core.DefaultLocalPluginContext");
      }
      context = (DefaultPluginContext) clazz.newInstance();
      Map<String, Object> map = new HashMap<>();
      for (Object key : confMap.keySet()) {
        map.put(key.toString(), confMap.get(key));
      }
      context.init(map);
    } catch (Exception e) {
      throw new RuntimeException(e);
    }
    PluginService pluginService = new PluginServiceLocal(context);
    context.pluginService = pluginService;
    try {
      Class<? extends Input> configClazz = (Class<? extends Input>)
            ((ParameterizedType) pluginClass
                  .getGenericSuperclass()).getActualTypeArguments()[0];
      context.config = PluginConfigParser.parseConfig(configClazz, confMap);
    } catch (Exception e) {
      throw new RuntimeException("parse job config error", e);
    }

    return context;
  }

  /**
   * Determine whether is debug model or not by finding the existence of the local-debug.properties
   * file.
   */
  public static boolean isLocalDebug() {
    URL path = PluginContextFactoryAdapter.class.getClassLoader()
          .getResource("local-debug.properties");
    boolean localDebug = (path == null ? false : true);
    return localDebug;
  }


  private static void setKeyTabFile(DefaultPluginContext context, String jobName) {
    String keytab = jobName + ".keytab";
    File[] files = new File(".").listFiles(new FilenameFilter() {
      @Override
      public boolean accept(File dir, String name) {
        return name.startsWith(keytab);
      }
    });
    if (files != null && files.length > 0) {
      context.keyTabFile = files[0].getName();
    }
  }

  private static void debugPrintAllEnvAndSysProperties() {
    System.out.println("debugPrintAllEnvAndSysProperties");
    Properties sysProperties = System.getProperties();
    Map<String, String> envMap = System.getenv();
    Map<String, String> propMap = new HashMap();
    for (Object key : sysProperties.keySet()) {
      propMap.put(key.toString(), sysProperties.getProperty(key.toString()));
    }
    System.out.println("sys props->");
    propMap.forEach(new BiConsumer<String, String>() {
      @Override
      public void accept(String s, String s2) {
        System.out.print(s + "=" + s2 + ",");
      }
    });
    System.out.println("\r\nenv props->");
    envMap.forEach(new BiConsumer<String, String>() {
      @Override
      public void accept(String s, String s2) {
        System.out.print(s + "=" + s2 + ",");
      }
    });
  }


  private static String getEnvOrProperty(String key) {
    Properties sysProperties = System.getProperties();
    if (sysProperties.containsKey(key)) {
      return sysProperties.getProperty(key);
    } else if (System.getenv() != null && System.getenv().containsKey(key)) {
      return System.getenv(key);
    }
    return null;
  }

  private static void fillRuntimeArgs(DefaultPluginContext context) {
    //azkaban.flowid=%s -Dazkaban.execid=%s -Dazkaban.jobid=%s -Dazkaban.jobname
    setJavaOptionsToSysProperty();
    String flowName = getEnvOrProperty("azkaban.flowid");
    String runtTimeJobId = getEnvOrProperty("azkaban.jobid");
    context.flowName = flowName;
    String execId = getEnvOrProperty("azkaban.execid");
    context.flowExecId = execId;
    context.runTimeJobId = runtTimeJobId;
    String jobName = getEnvOrProperty("azkaban.jobname");
    String taskName = getEnvOrProperty("azkaban.projectname");
    context.jobName = jobName;
    context.taskName = taskName;
    String configId = getEnvOrProperty("azkaban.configid");
    context.configId = configId;
    System.out.println("plugin runtime info :flowName="
          + flowName + " execId=" + execId + "  taskName=" + taskName + " jobName=" + jobName
          + " configId=" + configId);

  }

  private static void setJavaOptionsToSysProperty() {
    //spark.executor.extraJavaOptions="-Dazkaban.flowid=spark_rpc
    // -Dazkaban.execid=12986 -Dazkaban.jobid=spark_rpc
    // -Dazkaban.jobname=spark_rpc -Dazkaban.projectname=spark_rpc_param_test3"
    String extraJavaOptions = getEnvOrProperty("spark.executor.extraJavaOptions");
    if (extraJavaOptions == null) {
      logger.info("spark.executor.extraJavaOptions not set");
      return;
    }
    extraJavaOptions = extraJavaOptions.replaceAll("-D", "")
          .replaceAll("\"", "");
    String[] pairs = extraJavaOptions.split(" ");
    for (String pair : pairs) {
      String[] ss = pair.split("=");
      if (ss.length == 2) {
        System.setProperty(ss[0], ss[1]);
      }
    }

  }

  /**
   * replace job referenced params.
   *
   * @param context context
   * @param confMap rawConfMap
   * @param dpApi dpApi
   * @return map
   * @throws IOException ioe
   */
  private static Map<String, Object> replaceReferenceJobOutparams(PluginContext context,
        Map<String, Object> confMap, DpTaskApi dpApi) throws IOException {
    Map<String, Map<String, Object>> params =
          dpApi.getTaskOutputParams(context.getTaskName(), context.getFlowExecId());
    for (String key : confMap.keySet()) {
      if (confMap.get(key) == null) {
        continue;
      }
      String value = confMap.get(key).toString().trim();
      if (value.matches("^\\$\\{.+\\}$")) {
        String p = value.substring(2, value.length() - 1);
        String[] ss = p.split("\\.", 2);
        String jobname = ss[0];
        String paramname = ss[1];
        if (params.get(jobname) == null
              || params.get(jobname).get(paramname) == null) {
          throw new RuntimeException("no output param found for job :" + jobname);
        }
        confMap.put(key, params.get(jobname).get(paramname));
      }
    }
    return confMap;
  }

}
