package com.catmiao.rdd.instance;


import org.apache.spark.SparkConf;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.JavaSparkContext;

import java.util.Arrays;
import java.util.List;

public class Spark04_Memory_Partition_Data {

    public static void main(String[] args) {

        final SparkConf sparkConf = new SparkConf();
        sparkConf.setMaster("local[*]");
        sparkConf.setAppName("spark");


        /**
         * 1
         * 2
         * 3 4
         * -------------
         * 1
         * 2 3
         * 4
         * 5 6
         *
         * len=6 partitionNum=4
         * start => (i*length)/numSlices
         * end => (i+1)*length/numSLices
         *
         * 0   (0*6)/4 , (0+1)*6/4     0,1
         * 1   (1*6)/4 , (1+1)*6/4     1,3
         * 2                           3,4
         * 3                           4,6
         */

        final JavaSparkContext sc = new JavaSparkContext(sparkConf);
        List<Integer> names = Arrays.asList(1, 2, 3, 4,5,6);

        JavaRDD<Integer> rdd = sc.parallelize(names, 4);


        rdd.saveAsTextFile("output/test1");

        sc.close();


    }
}
