package org.example.com.atguigu.day02;

import org.apache.spark.SparkConf;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.JavaSparkContext;
import org.junit.Test;

import java.util.Arrays;

public class $01_testPartitions {
    @Test
    public void createRddByCollections(){
        SparkConf conf = new SparkConf().setMaster("local[4]").setAppName("com.atguigu.day01.$01_RddCreate");
        JavaSparkContext sc = new JavaSparkContext(conf);

        JavaRDD<Integer> rdd1 = sc.parallelize(Arrays.asList(1, 7, 2, 1, 5));
        System.out.println(rdd1.getNumPartitions());
    }

    @Test
    public void createRddByFile(){
        SparkConf conf = new SparkConf().setMaster("local[4]").setAppName("com.atguigu.day01.$01_RddCreate");
        JavaSparkContext sc = new JavaSparkContext(conf);
        JavaRDD<String> rdd = sc.textFile("datas/wc.txt", 4);
        System.out.println(rdd.getNumPartitions());

    }

    @Test
    public void createRddByRdd(){
        SparkConf conf = new SparkConf().setMaster("local[4]").setAppName("com.atguigu.day01.$01_RddCreate");
        JavaSparkContext sc = new JavaSparkContext(conf);
        JavaRDD<String> rdd = sc.textFile("datas/wc.txt", 1);

        JavaRDD<String> rdd1 = rdd.map(line -> {
            return line;
        });
        System.out.println(rdd1.getNumPartitions());
    }
}
