package com.apex.spark.sink;

import com.apex.spark.SparkEnvironment;
import com.apex.spark.batch.SparkBatchSink;
import com.apex.spark.utils.ConfigKeyName;
import com.typesafe.config.Config;
import org.apache.commons.lang3.StringUtils;
import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.Row;
import org.apache.spark.sql.RuntimeConfig;
//import org.elasticsearch.spark.rdd.api.java.JavaEsSpark;
import org.elasticsearch.spark.sql.api.java.JavaEsSparkSQL;

public class ElasticSearchBatchSink implements SparkBatchSink {
    private Config config;
    private String nodes;
    private String port;
    private String user;
    private String password;
    private String index;
    @Override
    public void prepare(SparkEnvironment plugin) {
       if (config.hasPath(ConfigKeyName.ELASTIC_WRITER_NODES)){
            nodes = config.getString(ConfigKeyName.ELASTIC_WRITER_NODES);
            port = config.getString(ConfigKeyName.ELASTIC_WRITER_PORT);
            index = config.getString(ConfigKeyName.ELASTIC_WRITER_INDEX);
        }
       if (config.hasPath(ConfigKeyName.ELASTIC_WRITER_USER)){
           user = config.getString(ConfigKeyName.ELASTIC_WRITER_USER);
       }
       if (config.hasPath(ConfigKeyName.ELASTIC_WRITER_PASSWORD)){
           password = config.getString(ConfigKeyName.ELASTIC_WRITER_PASSWORD);
       }
    }

    @Override
    public Config getConfig() {
        return config;
    }

    @Override
    public void setConfig(Config config) {
        this.config = config;
    }

    @Override
    public void outputBatch(SparkEnvironment env, Dataset<Row> dataset) {
        RuntimeConfig conf = env.getSparkSession().conf();
        conf.set("es.index.auto.create", "true");
        conf.set("pushdown", "true");
        conf.set("es.nodes",nodes);
        conf.set("es.port",port);
        conf.set("es.nodes.wan.only","true");
        if (!StringUtils.isBlank(user)){
            conf.set("es.net.http.auth.user",user);
            conf.set("es.net.http.auth.pass",password);
        }
        JavaEsSparkSQL.saveToEs(dataset,index);
    }
}
