package net.lzzy.spark;

import org.apache.spark.SparkConf;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.api.java.function.Function;
import org.apache.spark.sql.sources.In;

import java.util.Arrays;
import java.util.List;

public class SparkSortBy_Demo {
    public static String test;
    public static void main(String[] args) {
        SparkConf conf = new SparkConf().setAppName("SparkTextFile").setMaster(args[0]);
        JavaSparkContext javaSparkContext = new JavaSparkContext(conf);
        User user1 = new User("张三", '男', 18);
        User user2 = new User("李四", '男', 17);
        User user3 = new User("王五", '男', 20);
        List<User> list = Arrays.asList(user1,user2,user3);
        JavaRDD<User> javaRDD = javaSparkContext.parallelize(list, 2);
        JavaRDD<User> javaRDD1 = javaRDD.sortBy(new Function<User, Integer>() {
            @Override
            public Integer call(User v1) throws Exception {
                return v1.getOlder();
            }
        }, true, javaRDD.getNumPartitions());
        List<User> collect = javaRDD1.collect();
        System.out.println(collect.toString());
    }
}
