package com.demo.spark.rdd;

import org.apache.spark.SparkConf;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.api.java.function.Function;

import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.Random;

public class RddSortBy {
    public static void main(String[] args) {
        SparkConf config = new SparkConf().setAppName("rdd map partition with index").setMaster("local");
        JavaSparkContext javaSparkContext = new JavaSparkContext(config);
        javaSparkContext.setLogLevel("error");
        //List<Integer> data = Arrays.asList(10, 12, 4, 9, 5, 61, 47);
        List<Integer> data = new ArrayList<>(1024*1024);
        Random random = new Random(1);
        for (int i = 0; i < 100; i++) {
            data.add(random.nextInt(100));
        }
        JavaRDD<Integer> rdd = javaSparkContext.parallelize(data,3);
        JavaRDD<String> map = rdd.map(new Function<Integer, String>() {
            @Override
            public String call(Integer v1) throws Exception {
                return v1 + "_" + v1.hashCode();
            }
        });
        JavaRDD<String> stringJavaRDD = map.sortBy(new Function<String, Integer>() {
            @Override
            public Integer call(String v1) throws Exception {

                return Integer.valueOf(v1.split("_")[1]);
            }
        },true,2);
        for (String s : stringJavaRDD.collect()) {
            System.out.println(s);
        }


    }
}
