package com.sub.spark.core.rdd.operate;

import org.apache.spark.SparkConf;
import org.apache.spark.api.java.JavaPairRDD;
import org.apache.spark.api.java.JavaSparkContext;
import scala.*;

import java.util.Arrays;
import java.util.List;

/**
 * @ClassName RDDKVMethod
 * @Description: RDD的KV操作操作
 * @Author Submerge.
 * @Since 2025/5/17 23:20
 * @Version 1.0
 */
public class RDDKVMethod {

    private static JavaSparkContext javaSparkContext;

    public static void main(String[] args) {

        //spark conf
        SparkConf conf = new SparkConf();
        conf.setMaster("local[*]");
        conf.setAppName("sparkCore");
        //spark context
        javaSparkContext = new JavaSparkContext(conf);

        //mapValues
        //mapValues();

        //groupByKey
//        groupByKey();


        //reduceByKey
//        reduceByKey();


        //sortByKey
        //sortByKey();
        sortByKeyByDefine();

        //join


    }

    private static void data() {
        Tuple1<String> tuple1 = new Tuple1<>("a");
        Tuple2<String,Integer> tuple2 = new Tuple2<>("b",1);
        Tuple3<String,Integer,String> tuple3 = new Tuple3<>("c",1,"d");
        Tuple4<String,Integer,String,String> tuple4 = new Tuple4<>("e",1,"f","g");
    }


    /**
     * mapValues：
     * 对于KV类型的RDD，mapValues可以对KV中的value进行转换操作
     */
    public static void mapValues() {
        Tuple2<String,Integer> tuple21 = new Tuple2<>("a",1);
        Tuple2<String,Integer> tuple22 = new Tuple2<>("b",2);
        Tuple2<String,Integer> tuple23 = new Tuple2<>("c",3);
        Tuple2<String,Integer> tuple24 = new Tuple2<>("d",4);
        Tuple2<String,Integer> tuple25 = new Tuple2<>("e",5);

        List<Tuple2<String, Integer>> tuple2s = Arrays.asList(tuple21, tuple22, tuple23, tuple24, tuple25);

        JavaPairRDD<String, Integer> pairRDD = javaSparkContext.parallelizePairs(tuple2s)
                .mapValues(data -> data * 2);

        pairRDD.collect().forEach(System.out::println);
    }

    /**
     * groupByKey：
     * 对KV类型的RDD，groupByKey会对相同的key的数据进行分组，且支持分组后聚合
     */
    public static void groupByKey() {
        Tuple2<String,Integer> tuple21 = new Tuple2<>("a",1);
        Tuple2<String,Integer> tuple22 = new Tuple2<>("b",2);
        Tuple2<String,Integer> tuple23 = new Tuple2<>("a",3);
        Tuple2<String,Integer> tuple24 = new Tuple2<>("b",4);
        Tuple2<String,Integer> tuple25 = new Tuple2<>("b",5);

        List<Tuple2<String, Integer>> tuple2s = Arrays.asList(tuple21, tuple22, tuple23, tuple24, tuple25);

        JavaPairRDD<String, Integer> pairRDD = javaSparkContext.parallelizePairs(tuple2s)
                .mapValues(data -> data * 2);

        pairRDD.groupByKey().collect().forEach(System.out::println);
    }


    /**
     * reduceByKey：
     * 对KV类型的RDD，reduceByKey会对相同的key的数据进行reduce操作
     * 与groupByKey不同的是，reduceByKey会对相同的key的数据进行combine操作
     */
    public static void reduceByKey() {
        Tuple2<String,Integer> tuple21 = new Tuple2<>("a",1);
        Tuple2<String,Integer> tuple22 = new Tuple2<>("b",2);
        Tuple2<String,Integer> tuple23 = new Tuple2<>("a",3);
        Tuple2<String,Integer> tuple24 = new Tuple2<>("b",4);
        Tuple2<String,Integer> tuple25 = new Tuple2<>("a",5);

        List<Tuple2<String, Integer>> tuple2s = Arrays.asList(tuple21, tuple22, tuple23, tuple24, tuple25);

        JavaPairRDD<String, Integer> pairRDD = javaSparkContext.parallelizePairs(tuple2s, 2);
        pairRDD.reduceByKey(Integer::sum).collect().forEach(System.out::println);
    }

    /**
     * sortByKey：
     * 对KV类型的RDD，sortByKey会对相同的key的数据进行排序，可以指定排序规则。如升序、降序
     * 也可自定义排序规则，如自定义类实现Comparable接口，重写compareTo方法，当返回值为正数时，表示升序，为负数时，表示降序，实现参考sortByKeyByDefine()方法
     */
    public static void sortByKey() {
        Tuple2<String,Integer> tuple21 = new Tuple2<>("b",1);
        Tuple2<String,Integer> tuple22 = new Tuple2<>("a",2);
        Tuple2<String,Integer> tuple23 = new Tuple2<>("e",3);
        Tuple2<String,Integer> tuple24 = new Tuple2<>("c",4);
        Tuple2<String,Integer> tuple25 = new Tuple2<>("d",5);

        List<Tuple2<String, Integer>> tuple2s = Arrays.asList(tuple21, tuple22, tuple23, tuple24, tuple25);

        JavaPairRDD<String, Integer> pairRDD = javaSparkContext.parallelizePairs(tuple2s);

        pairRDD.sortByKey(false).collect().forEach(System.out::println);
    }


    /**
     * 根据自定义类进行排序
     */
    public static void sortByKeyByDefine() {

        Tuple2<User, Integer> tuple21 = new Tuple2<>(new User("张三",29),1);
        Tuple2<User, Integer> tuple22 = new Tuple2<>(new User("李四",10),2);
        Tuple2<User, Integer> tuple23 = new Tuple2<>(new User("王五",31),3);
        Tuple2<User, Integer> tuple24 = new Tuple2<>(new User("赵六",22),4);
        Tuple2<User, Integer> tuple25 = new Tuple2<>(new User("田七",23),5);

        List<Tuple2<User, Integer>> tuple2s = Arrays.asList(tuple21, tuple22, tuple23, tuple24, tuple25);



        JavaPairRDD<User, Integer> pairRDD = javaSparkContext.parallelizePairs(tuple2s);

        pairRDD.sortByKey(false).collect().forEach(System.out::println);
    }




    /**
     * 自定义用户类，实现Comparable接口，重写compareTo方法，当返回值为正数时，表示升序，为负数时，表示降序
     */
    public static class User implements Comparable<User>, Serializable {
        public String name;
        public Integer age;

        public User(String name, Integer age) {
            this.name = name;
            this.age = age;
        }

        @Override
        public int compareTo(User o) {
            return this.age - o.age;
        }

        @Override
        public String toString() {
            return "User{" +
                    "name='" + name + '\'' +
                    ", age=" + age +
                    '}';
        }
    }



}
