package com.guchenbo.bigdata.spark;

import com.google.common.collect.ImmutableMap;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import org.apache.spark.SparkConf;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.JavaSparkContext;
import org.elasticsearch.spark.rdd.api.java.JavaEsSpark;

import java.text.ParseException;
import java.util.List;
import java.util.Map;

/**
 * @author guchenbo
 */
public class EsDemo {

    public static void main(String[] args) throws ParseException {
        Map<String, Object> logs = ImmutableMap
                        .of("modelUuid", "test", "modelVersion", "V1.0.0", "label", 1, "timestamp",
                                        System.currentTimeMillis(), "bizDate", "20201029");
        List<Map<String, Object>> list = Lists.newArrayList();
        for (int i = 0; i < 100; i++) {
            list.add(logs);
        }

        Map<String, List<Map<String, Object>>> map = Maps.newHashMap();
        map.put("20201104", list);
        map.put("20201105", list);
        map.put("20201106", list);

        saveMap(map);
    }

    public static void saveMap(Map<String, List<Map<String, Object>>> map) {

        SparkConf conf = new SparkConf().setAppName("gcb").setMaster("local");
        conf.set("es.nodes", "10.57.34.4").set("es.port", "9200").set("es.batch.size.bytes", "20mb")
                        .set("es.batch.size.entries", "20000")
        //                        .set("es.batch.write.refresh","false")
        ;

        JavaSparkContext sc = new JavaSparkContext(conf);



        for (Map.Entry<String, List<Map<String, Object>>> entry : map.entrySet()) {
            String a = entry.getKey();
            List<Map<String, Object>> b = entry.getValue();
            JavaRDD<Map<String, Object>> r = sc.parallelize(b);
            System.out.println("add " + a + " " + r);
            JavaEsSpark.saveToEs(r, "paas-model-log-auto-" + a + "/paas_model_log");
        }
    }
}
