package com.seeyii.springboot.bigdata;

import org.apache.spark.SparkConf;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.JavaSparkContext;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;
import org.springframework.boot.logging.log4j2.Log4J2LoggingSystem;
import scala.Tuple2;

import java.util.Arrays;

@SpringBootApplication
public class SpringBigdataApplication {

    public static void main(String[] args) {
        SpringApplication.run(SpringBigdataApplication.class, args);
        execute();
    }
    public static void execute() {
        // 初始化Spark环境
        SparkConf sparkConf = new SparkConf()
                .setAppName("xxx")
                .setMaster("local[4]");

        JavaSparkContext javaSparkContext = new JavaSparkContext(sparkConf);
        JavaRDD<Integer> parallelize = javaSparkContext.parallelize(Arrays.asList(1, 2, 3, 4), 3);
        Tuple2<Integer, Integer> reduce = parallelize.mapToPair(x -> new Tuple2<>(x, 1))
                .reduce((x, y) -> getReduce(x, y));
        System.out.println("数组sum值:" + reduce._1 + " 计算次数:" + (reduce._2 - 1));
    }

    public static Tuple2 getReduce(Tuple2<Integer, Integer> x, Tuple2<Integer, Integer> y) {
        Integer a = x._1();
        Integer b = x._2();
        a += y._1();
        b += y._2();
        return new Tuple2(a, b);
    }
}
