package com.guchenbo.bigdata.spark;

import org.apache.spark.SparkConf;
import org.apache.spark.api.java.JavaPairRDD;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.JavaSparkContext;
import scala.Tuple2;

import java.util.List;
import java.util.Map;

/**
 * @author guchenbo
 */
public class EsDemo3 {

    public static void main(String[] args) throws Exception {
        SparkConf conf = new SparkConf().setAppName("gcb").setMaster("local");
        conf.set("es.nodes", "10.57.34.4").set("es.port", "9200").set("es.batch.size.bytes", "20mb")
                        .set("es.batch.size.entries", "20000");
        JavaSparkContext sc = new JavaSparkContext(conf);

        List<Map<String, Object>> list = EsDemo2.buildList();
        JavaRDD<Map<String, Object>> rdd = sc.parallelize(list);

        JavaPairRDD<String, Iterable<Map<String, Object>>> rdd1 = rdd.groupBy(v1 -> (String) v1.get("bizDate"));
        List<Tuple2<String, Iterable<Map<String, Object>>>> l = rdd1.collect();

        rdd1.flatMapValues(f->{
            System.out.println(f);
            return null;
        });

    }
}
