package sc.love.boot.spark;

import org.apache.spark.SparkConf;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.sql.SparkSession;
import org.elasticsearch.spark.rdd.api.java.JavaEsSpark;
import org.spark_project.guava.collect.ImmutableList;
import org.spark_project.guava.collect.ImmutableMap;
import org.springframework.stereotype.Component;

import java.util.Map;

@Component
public class SparkUtil {

    public void writeEs() {
        String elasticIndex = "es_book/book";
        SparkConf sparkConf = new SparkConf().setAppName("writeEs-yjl").setMaster("spark://192.168.12.70:7077")
                .set("es.index.auto.create", "true")
                .set("es.nodes", "192.168.12.70")
                .set("es.port", "9200")
                .set("es.nodes.wan.only", "true")
                .set("spark.executor.memory","1g");
        SparkSession sparkSession = SparkSession.builder().config(sparkConf).getOrCreate();
        JavaSparkContext jsc = new JavaSparkContext(sparkSession.sparkContext());//adapter
        Map<String, ?> numbers = ImmutableMap.of("bookId", 13, "bookName", "sprak-13", "bookDetail", "sprak1 study");
        Map<String, ?> airports = ImmutableMap.of("bookId", 14, "bookName", "sprak-14", "bookDetail", "sprak2 study");
        JavaRDD<Map<String, ?>> javaRDD = jsc.parallelize(ImmutableList.of(numbers, airports));
        JavaEsSpark.saveToEs(javaRDD, elasticIndex);

    }


    public void readEs() {
        SparkConf sparkConf = new SparkConf().setAppName("readEs-yjl").setMaster("spark://192.168.12.70:7077").set("es.index.auto.create", "true")
                .set("es.nodes", "192.168.12.70").set("es.port", "9200").set("es.nodes.wan.only", "true");
        SparkSession sparkSession = SparkSession.builder().config(sparkConf).getOrCreate();
        JavaSparkContext jsc = new JavaSparkContext(sparkSession.sparkContext());//adapter
        StringBuffer query = new StringBuffer();
        query.append("{  \"query\": {\"match_all\": {    }  }}");

        JavaRDD<Map<String, Object>> searchRdd = JavaEsSpark.esRDD(jsc, "es_book/book", "?q=13").values();
        for (Map<String, Object> item : searchRdd.collect()) {
            item.forEach((key, value) -> {
                System.out.println("search key:" + key + ", search value:" + value);
            });
        }
        sparkSession.stop();
    }

    public static void main(String[] args) {
        new SparkUtil().readEs();
    }

}
