package core.rdd.持久化;

import org.apache.spark.SparkConf;
import org.apache.spark.api.java.JavaPairRDD;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.api.java.Optional;
import org.apache.spark.api.java.function.FlatMapFunction;
import org.apache.spark.api.java.function.Function;
import org.apache.spark.api.java.function.Function2;
import org.apache.spark.api.java.function.PairFunction;
import org.apache.spark.storage.StorageLevel;
import scala.Tuple2;

import java.util.Arrays;
import java.util.Iterator;
import java.util.List;

/**
 * 1、checkpoint会切断血缘，cache会在血缘关系中增加依赖关系。
 *
 * C2、ache和Persist是将之前的计算的结果进行缓存，默认情况下会把数据以序列化的形式存在JVM堆内存中
 * 但是不是调用的时候马上就缓存，二十出发ACTION算子的时候，才会进行缓存。
 *
 *  * 3、如果使用完了缓存，可以通过unpersist()方法进行释放
 */
public class Spark03_CheckPoint_B {
    public static void main(String[] args) {

        // 配置SparkConf指向你的Spark master URL
        SparkConf conf = new SparkConf()
                .setAppName("Spark03_CheckPoint_B") // 应用名称
                .setMaster("local[*]"); // 替换成你的master地址
        JavaSparkContext jsc = new JavaSparkContext(conf);
        jsc.setCheckpointDir("checkpoint");
        // 创建JavaSparkContext，它是与集群交互的主要入口点
        try {
            Tuple2<String, Integer> tuple1 = new Tuple2<>("a", 123);
            Tuple2<String, Integer> tuple2 = new Tuple2<>("b", 456);
            Tuple2<String, Integer> tuple3 = new Tuple2<>("c", 5);
            Tuple2<String, Integer> tuple4 = new Tuple2<>("d", 6);
            Tuple2<String, Integer> tuple5 = new Tuple2<>("e", 2);
            Tuple2<String, Integer> tuple6 = new Tuple2<>("f", 4);


            List<Tuple2<String, Integer>> tuple2s = Arrays.asList(
                    tuple1, tuple2, tuple3, tuple5, tuple4, tuple6
            );


            JavaRDD<Tuple2<String, Integer>> rdd = jsc.parallelize(tuple2s);


            JavaPairRDD<String, Integer> mapToPairRdd = rdd.mapToPair(new PairFunction<Tuple2<String, Integer>, String, Integer>() {
                @Override
                public Tuple2<String, Integer> call(Tuple2<String, Integer> stringIntegerTuple2) throws Exception {
                    return stringIntegerTuple2;
                }
            });

            mapToPairRdd.cache();

            JavaPairRDD<String, Integer> workCountRdd = mapToPairRdd.reduceByKey(new Function2<Integer, Integer, Integer>() {
                @Override
                public Integer call(Integer v1, Integer v2) throws Exception {
                    return v1 + v2;
                }
            });

            System.out.println(workCountRdd.toDebugString());
            System.out.println("*****************************************");
            workCountRdd.collect();
            System.out.println(workCountRdd.toDebugString());
            mapToPairRdd.unpersist();

        } finally {
            jsc.close();
        }
    }
}
