package com.catmiao.rdd.operate.transform;

import com.google.common.collect.Lists;
import org.apache.spark.SparkConf;
import org.apache.spark.api.java.JavaPairRDD;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.api.java.function.FlatMapFunction;
import org.apache.spark.api.java.function.Function;

import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;

/**
 * @author ChengMiao
 * @title: Transfer_01_Map
 * @projectName spark_study
 * @description: TODO
 * @date 2024/11/25 16:27
 */
public class Transfer_06_GroupBy {

    public static void main(String[] args) {

        final SparkConf conf = new SparkConf();
        conf.setAppName("appName");
        conf.setMaster("local[*]");

        final JavaSparkContext jsc = new JavaSparkContext(conf);


        ArrayList<Integer> list = Lists.newArrayList(1, 2, 3, 3, 4);

        JavaRDD<Integer> rdd = jsc.parallelize(list, 2);

        JavaPairRDD<Integer, Iterable<Integer>> rdd2 = rdd.groupBy(new Function<Integer, Integer>() {
            @Override
            public Integer call(Integer i) throws Exception {
                // 返回的值就是数据对应的组的名称
                return i;
            }
        });


        JavaPairRDD<Integer, Integer> rdd3 = rdd2.mapValues(new Function<Iterable<Integer>, Integer>() {

            @Override
            public Integer call(Iterable<Integer> integers) throws Exception {
                Iterator<Integer> iterator = integers.iterator();
                int count = 0;
                while (iterator.hasNext()) {
                    count++;
                    iterator.next();
                }
                return count;
            }
        });

        rdd3.collect().forEach(System.out::println);


        jsc.close();
    }
}
