package core.rdd.transform;

import org.apache.spark.SparkConf;
import org.apache.spark.api.java.JavaPairRDD;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.api.java.function.FlatMapFunction;
import org.apache.spark.api.java.function.Function;

import java.util.Arrays;
import java.util.Iterator;
import java.util.List;

public class Spark05_GROUPBY {

    public static void main(String[] args) {
        /**
         *flatMap 是将多集合转换成一个
         */
        // 配置SparkConf指向你的Spark master URL
        SparkConf conf = new SparkConf()
                .setAppName("Spark03_FLATMAP") // 应用名称
                .setMaster("local[*]"); // 替换成你的master地址
        JavaSparkContext sc = new JavaSparkContext(conf);
        // 创建JavaSparkContext，它是与集群交互的主要入口点
        try {

            List<List<String>> stringLists = Arrays.asList(
                    Arrays.asList("张三", "张四", "张5", "张6")
                    , Arrays.asList("王三", "王四", "王5", "王6")
                    , Arrays.asList("杨1", "杨12", "杨123", "杨1234"));
            JavaRDD<List<String>> parallelize = sc.parallelize(stringLists, 2);

            /**
             * 非省略写法
             */
            JavaRDD<String> flatMap1 = parallelize.flatMap(new FlatMapFunction<List<String>, String>() {
                @Override
                public Iterator<String> call(List<String> list) throws Exception {
                    return list.iterator();
                }
            });

            JavaPairRDD<Object, Iterable<String>> groupByRdd1 = flatMap1.groupBy(new Function<String, Object>() {
                @Override
                public Object call(String v1) throws Exception {
                    return v1.contains("张");
                }
            });


            groupByRdd1.collect().forEach(System.out::println);


            System.out.println("##################################分隔符#############################################");

            /**
             * Lambda
             */
            JavaPairRDD<Object, Iterable<String>> groupByRdd2 = flatMap1.groupBy(
                    string -> string.contains("杨")==true
            );


            groupByRdd2.collect().forEach(System.out::println);

        } finally {
            sc.close();
        }
    }
}
