package org.example.com.atguigu.day02;

import org.apache.spark.SparkConf;
import org.apache.spark.api.java.JavaPairRDD;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.api.java.function.FlatMapFunction;
import org.apache.spark.api.java.function.Function;
import org.apache.spark.api.java.function.PairFunction;
import org.junit.Test;
import scala.Tuple2;

import java.io.Serializable;
import java.util.Arrays;
import java.util.Iterator;

public class $02_testTransformation implements Serializable {

    @Test
    public void mapTest() {
        //1、创建JavaSparkcontext
        SparkConf conf = new SparkConf().setMaster("local[3]").setAppName("com.atguigu.day01.$01_RddCreate");
        JavaSparkContext sc = new JavaSparkContext(conf);

        JavaRDD<Integer> rdd1 = sc.parallelize(Arrays.asList(1, 6, 8, 9, 12, 3));
        JavaRDD<Integer> rdd2 = rdd1.map(new Function<Integer, Integer>() {
            @Override
            public Integer call(Integer v1) throws Exception {
                System.out.println(Thread.currentThread().getName());
                // 过滤偶数, 保留奇数
                if (v1 % 2 == 0) {
                    return null;
                } else return v1;
            }
        }).filter(new Function<Integer, Boolean>() {
            @Override
            public Boolean call(Integer v1) throws Exception {
                return v1 != null;
            }
        });
        System.out.println(rdd2.collect());
    }

    @Test
    public void mapTest1() {
        //1、创建JavaSparkcontext
        SparkConf conf = new SparkConf().setMaster("local[3]").setAppName("com.atguigu.day01.$01_RddCreate");
        JavaSparkContext sc = new JavaSparkContext(conf);

        JavaRDD<String> rdd1 = sc.parallelize(Arrays.asList("hello java", "hello map", "where"));

        JavaRDD<String> rdd2 = rdd1.map(new Function<String, String>() {
            @Override
            public String call(String v1) throws Exception {
                String[] s = v1.split(" ");
                for (String s1 : s) {
                    return s1;
                }
                return null;
            }
        });
        System.out.println(rdd2.collect());
    }

    @Test
    public void flatMapTest() {
        //1、创建JavaSparkcontext
        SparkConf conf = new SparkConf().setMaster("local[3]").setAppName("com.atguigu.day01.$01_RddCreate");
        JavaSparkContext sc = new JavaSparkContext(conf);

        JavaRDD<String> rdd1 = sc.parallelize(Arrays.asList("hello java", "hadoop flume kafka hello", "where hadoop"));

        JavaRDD<String> rdd2 = rdd1.flatMap(new FlatMapFunction<String, String>() {
            @Override
            public Iterator<String> call(String s) throws Exception {
                String[] s1 = s.split(" ");
                for (int i = 0; i < s1.length; i++) {
                    s1[i] = s1[i].substring(0, 3);
                }
                return Arrays.asList(s1).iterator();
            }
        });
        System.out.println(rdd2.collect());
    }

    @Test
    public void groupByTest() {
        //1、创建JavaSparkcontext
        SparkConf conf = new SparkConf().setMaster("local[3]").setAppName("com.atguigu.day01.$01_RddCreate");
        JavaSparkContext sc = new JavaSparkContext(conf);

        JavaRDD<String> rdd1 = sc.parallelize(Arrays.asList("Bob 1", "Alice 3", "Tom 2", "Jack 2", "Rose 1", "Kite 2"));

        JavaPairRDD<String, Iterable<String>> rdd2 = rdd1.groupBy(new Function<String, String>() {
            @Override
            public String call(String v1) throws Exception {
                String s = v1.split(" ")[1];
                return s;
            }
        });
        System.out.println(rdd2.collect());
    }

    @Test
    public void filterTest() {
        //1、创建JavaSparkcontext
        SparkConf conf = new SparkConf().setMaster("local[3]").setAppName("com.atguigu.day01.$01_RddCreate");
        JavaSparkContext sc = new JavaSparkContext(conf);

        JavaRDD<Integer> rdd1 = sc.parallelize(Arrays.asList(1, 4, 5, 6, 7, 8, 89, 11, 24));

        // 过滤偶数, 保留奇数
        JavaRDD<Integer> rdd2 = rdd1.filter(new Function<Integer, Boolean>() {
            @Override
            public Boolean call(Integer v1) throws Exception {
                if (v1 % 2 == 0) {
                    return false;
                } else return true;
            }
        });
        System.out.println(rdd2.collect());
    }

    @Test
    public void distinctTest() {
        //1、创建JavaSparkcontext
        SparkConf conf = new SparkConf().setMaster("local[3]").setAppName("com.atguigu.day01.$01_RddCreate");
        JavaSparkContext sc = new JavaSparkContext(conf);

        JavaRDD<Integer> rdd1 = sc.parallelize(Arrays.asList(1, 4, 5, 6, 7, 8, 89, 11, 24));
        JavaRDD<Integer> rdd2 = rdd1.distinct();
        System.out.println(rdd2.collect());
    }

    @Test
    public void sortByTest() {
        //1、创建JavaSparkcontext
        SparkConf conf = new SparkConf().setMaster("local[3]").setAppName("com.atguigu.day01.$01_RddCreate");
        JavaSparkContext sc = new JavaSparkContext(conf);

        JavaRDD<String> rdd1 = sc.parallelize(Arrays.asList("Bob 1", "Alice 3", "Tom 2", "Jack 2", "Rose 1", "Kite 2"));

        JavaRDD<String> rdd2 = rdd1.sortBy(new Function<String, Integer>() {
            @Override
            public Integer call(String v1) throws Exception {
                String s = v1.split(" ")[1];
                int i = Integer.parseInt(s);
                return i;
            }
        }, false, 4);
        System.out.println(rdd2.collect());
    }


    @Test
    public void mapValuesTest() {
        //1、创建JavaSparkcontext
        SparkConf conf = new SparkConf().setMaster("local[4]").setAppName("com.atguigu.day01.$01_RddCreate");
        JavaSparkContext sc = new JavaSparkContext(conf);

        JavaRDD<String> rdd1 = sc.parallelize(Arrays.asList("Bob 19", "Alice 23", "Tom 22"));
        JavaPairRDD<String, Integer> rdd2 = rdd1.mapToPair(new PairFunction<String, String, Integer>() {
            @Override
            public Tuple2<String, Integer> call(String s) throws Exception {
                String[] s1 = s.split(" ");
                return new Tuple2<>(s1[0], Integer.parseInt(s1[1]));
            }
        });
//        System.out.println(rdd2.collect());
//        JavaPairRDD<String, Integer> rdd3 = rdd2.mapValues(new Function<Integer, Integer>() {
//            @Override
//            public Integer call(Integer v1) throws Exception {
//                return v1 * 20;
//            }
//        });
//        System.out.println(rdd3.collect());
        JavaPairRDD<String, Integer> rdd3 = rdd2.mapToPair(new PairFunction<Tuple2<String, Integer>, String, Integer>() {
            @Override
            public Tuple2<String, Integer> call(Tuple2<String, Integer> v1) throws Exception {
                return new Tuple2<>(v1._1, v1._2 * 100);
            }
        });
        System.out.println(rdd3.collect());
    }

    @Test
    public void groupByKeyTest(){
        //1、创建JavaSparkcontext
        SparkConf conf = new SparkConf().setMaster("local[4]").setAppName("com.atguigu.day01.$01_RddCreate");
        JavaSparkContext sc = new JavaSparkContext(conf);
        JavaRDD<String> rdd1 = sc.parallelize(Arrays.asList("Alice woman", "Bob man", "Kite woman", "Tao none", "Tom man", "Jim man"));
        // 以性别为key, 姓名为value
        JavaPairRDD<String, String> rdd2 = rdd1.mapToPair(new PairFunction<String, String, String>() {
            @Override
            public Tuple2<String, String> call(String s) throws Exception {
                String[] s1 = s.split(" ");
                return new Tuple2<>(s1[1], s1[0]);
            }
        });
        System.out.println(rdd2.collect());
        /*JavaPairRDD<String, Iterable<String>> rdd3 = rdd2.groupByKey();
        System.out.println(rdd3.collect());*/

        // 使用grouby也能分组
        JavaPairRDD<String, Iterable<Tuple2<String, String>>> rdd3 = rdd2.groupBy(new Function<Tuple2<String, String>, String>() {
            @Override
            public String call(Tuple2<String, String> v1) throws Exception {
                return v1._1();
            }
        });
        JavaPairRDD<String, String> rdd4 = rdd3.mapValues(new Function<Iterable<Tuple2<String, String>>, String>() {
            @Override
            public String call(Iterable<Tuple2<String, String>> v1) throws Exception {
                // v1 = [(man,Bob), (man,Tom), (man,Jim)], 取Bob, Tom, Jim
                String s1 = "";
                for (Tuple2<String, String> stringStringTuple2 : v1) {
                    s1 += stringStringTuple2._2 + " ";
                }
                return s1;
            }
        });
        System.out.println(rdd4.collect());
    }

    @Test
    public void sortByKeyTest(){
        //1、创建JavaSparkcontext
        SparkConf conf = new SparkConf().setMaster("local[4]").setAppName("com.atguigu.day01.$01_RddCreate");
        JavaSparkContext sc = new JavaSparkContext(conf);

        JavaRDD<String> rdd1 = sc.parallelize(Arrays.asList("Bob 19", "Alice 23", "Tom 22"));

        JavaPairRDD<String, Integer> rdd2= rdd1.mapToPair(new PairFunction<String, String, Integer>() {
            @Override
            public Tuple2<String, Integer> call(String s) throws Exception {
                String[] s1 = s.split(" ");
                return new Tuple2<>(s1[0], Integer.parseInt(s1[1]));
            }
        });
        System.out.println(rdd2.collect());
        JavaPairRDD<String, Integer> rdd3 = rdd2.sortByKey(true);
        System.out.println(rdd3.collect());
    }

    @Test
    public void sortByKeyTest1(){
        //1、创建JavaSparkcontext
        SparkConf conf = new SparkConf().setMaster("local[4]").setAppName("com.atguigu.day01.$01_RddCreate");
        JavaSparkContext sc = new JavaSparkContext(conf);

        JavaRDD<String> rdd1 = sc.parallelize(Arrays.asList("Bob 19", "Alice 23", "Tom 22", "Cate 22"));

        JavaPairRDD<Integer, String> rdd2 = rdd1.mapToPair(new PairFunction<String, Integer, String>() {
            @Override
            public Tuple2<Integer, String> call(String s) throws Exception {
                String[] s1 = s.split(" ");
                return new Tuple2<>(Integer.parseInt(s1[1]), s1[0]);
            }
        });
        System.out.println(rdd2.sortByKey().collect());
    }




}
