package com.apex.spark.source;

import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import com.apex.spark.SparkEnvironment;
import com.apex.spark.batch.SparkBatchSource;
import com.apex.spark.utils.ConfigKeyName;
import com.typesafe.config.Config;
import org.apache.commons.lang3.StringUtils;
import org.apache.spark.api.java.JavaPairRDD;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.api.java.function.Function;
import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.Row;
import org.apache.spark.sql.RuntimeConfig;
import org.elasticsearch.spark.rdd.api.java.JavaEsSpark;

import java.util.Map;

public class ElasticBatchSource implements SparkBatchSource {
    private Config config;
    private String nodes;
    private String port;
    private String user;
    private String password;
    private String index;
    @Override
    public Dataset<Row> getData(SparkEnvironment environment) {
        RuntimeConfig conf = environment.getSparkSession().conf();
        conf.set("es.nodes",nodes);
        conf.set("pushdown", "true");
        conf.set("es.port",port);
        conf.set("es.nodes.wan.only","true");
        if (!StringUtils.isBlank(user)){
            conf.set("es.net.http.auth.user",user);
            conf.set("es.net.http.auth.pass",password);
        }
        JavaSparkContext jsc = new JavaSparkContext(environment.getSparkSession().sparkContext());
        JavaRDD<Map<String, Object>> esRDD = JavaEsSpark.esRDD(jsc, index).values();
        JavaRDD<String> javaRDD = esRDD.map(new Function<Map<String, Object>, String>() {
            @Override
            public String call(Map<String, Object> value) throws Exception {
                JSONObject jsonObject = new JSONObject();
                for (Map.Entry<String,Object> entry : value.entrySet()){
                    jsonObject.put(entry.getKey(),entry.getValue());
                }
                return jsonObject.toJSONString();
            }
        });
        Dataset<Row> dataset = environment.getSparkSession().read().json(javaRDD);
        dataset.createOrReplaceTempView(index.split("/")[0]);
        return dataset;
    }

    @Override
    public void prepare(SparkEnvironment plugin) {
        if (config.hasPath(ConfigKeyName.ELASTIC_READER_NODES)){
            nodes = config.getString(ConfigKeyName.ELASTIC_READER_NODES);
            port = config.getString(ConfigKeyName.ELASTIC_READER_PORT);
            index = config.getString(ConfigKeyName.ELASTIC_READER_INDEX);
        }
        if (config.hasPath(ConfigKeyName.ELASTIC_READER_USER)){
            user = config.getString(ConfigKeyName.ELASTIC_READER_USER);
        }
        if (config.hasPath(ConfigKeyName.ELASTIC_READER_PASSWORD)){
            password = config.getString(ConfigKeyName.ELASTIC_READER_PASSWORD);
        }
    }

    @Override
    public Config getConfig() {
        return config;
    }

    @Override
    public void setConfig(Config config) {
        this.config = config;
    }
}
