package com.demo.spark.rdd;

import org.apache.spark.SparkConf;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.api.java.function.Function;

import java.util.ArrayList;
import java.util.Comparator;
import java.util.List;
import java.util.Random;

public class RddTakeOrdered {
    public static void main(String[] args) {
        SparkConf config = new SparkConf().setAppName("rdd map partition with index").setMaster("local");
        JavaSparkContext javaSparkContext = new JavaSparkContext(config);
        javaSparkContext.setLogLevel("error");
        //List<Integer> data = Arrays.asList(10, 12, 4, 9, 5, 61, 47);
        List<Integer> data = new ArrayList<>(1024);
        Random random = new Random(1);
        for (int i = 0; i < 100; i++) {
            data.add(random.nextInt(100));
        }
        JavaRDD<Integer> rdd = javaSparkContext.parallelize(data,3);

        List<Integer> stringList = rdd.takeOrdered(10);
        for (Integer s : stringList) {
            System.out.println(s);
        }


    }
}
