package net.lzzy.kvrdd;

import org.apache.spark.SparkConf;
import org.apache.spark.api.java.JavaPairRDD;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.api.java.function.Function;
import org.apache.spark.api.java.function.Function2;
import org.apache.spark.api.java.function.PairFunction;
import scala.Tuple2;

import java.util.Arrays;

public class MapOpenator3 {
    public static void main(String[] args) {
        SparkConf conf = new SparkConf().setAppName("MapToPair").setMaster("local");
        JavaSparkContext jsc = new JavaSparkContext(conf);
        JavaRDD<String> parallelize = jsc.parallelize(Arrays.asList("a,1","a,2","a,3","b,2","b,5"));
        JavaPairRDD<String, String> javaPairRDD = parallelize.mapToPair(new PairFunction<String, String, String>() {
            @Override
            public Tuple2<String, String> call(String s) throws Exception {
                return new Tuple2<>(s.split(",")[0], s.split(",")[1]);
            }
        });
        JavaPairRDD<String, String> javaPairRDD1 = javaPairRDD.reduceByKey(new Function2<String, String, String>() {
            @Override
            public String call(String v1, String v2) throws Exception {
                Integer vs1 = Integer.parseInt(v1);
                Integer vs2 = Integer.parseInt(v2);
                return (vs1 + vs2) + "";
            }
        });
        JavaPairRDD<String, Integer> javaPairRDD2 = javaPairRDD1.mapValues(new Function<String, Integer>() {
            @Override
            public Integer call(String v1) throws Exception {
                Integer vs1 = Integer.parseInt(v1);
                return vs1 * 10;
            }
        });
        javaPairRDD2.collect().forEach(System.out::println);
    }
}
