package spark_core.operate_transform.singlevalue;

import java.util.Arrays;
import org.apache.spark.SparkConf;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.JavaSparkContext;

/**
 * @author shihb
 * @date 2020/1/7 11:43
 * 缩减分区,默认为简单合并分区，不shuffle
 * 如果要shuffle,可以多传一个参数
 */
public class CoalesceDemo {

  public static void main(String[] args) {
    //local模式,创建SparkConf对象设定spark的部署环境
    SparkConf sparkConf = new SparkConf().setMaster("local[*]").setAppName("mark rdd");
    //创建spark上下文对象（这边是java上下文）
    JavaSparkContext sc = new JavaSparkContext(sparkConf);

    JavaRDD<Integer> arrayRdd = sc.parallelize(Arrays.asList(1, 2, 3, 4,1,2),4);
    System.out.println(arrayRdd.partitions().size());

    //默认不shuffle
//    JavaRDD<Integer> coalesceRdd = arrayRdd.coalesce(3);

    //shuffle
    JavaRDD<Integer> coalesceRdd = arrayRdd.coalesce(3,true);
    System.out.println(coalesceRdd.partitions().size());
    //停止
    sc.stop();

  }
}
