package com.etc.java;

import org.apache.spark.SparkConf;
import org.apache.spark.api.java.JavaPairRDD;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.api.java.function.*;
import scala.Tuple2;
import java.io.IOException;
import java.util.Arrays;
import java.util.Iterator;
import java.util.List;

/**
 * @author 杰哥
 * @Title: TransformationOperation
 * @ProjectName scalaworldcount
 * @Description: TODO
 * @date 2019/7/1517:22
 */
public class TransformationOperation {

    private static void map(){
        SparkConf conf = new SparkConf().setAppName("map").setMaster("local");
        JavaSparkContext context = new JavaSparkContext(conf);
        List<Integer> list = Arrays.asList(1, 2, 3, 4, 5, 6, 7);
        JavaRDD<Integer> parallelize = context.parallelize(list);
        JavaRDD<Integer> map = parallelize.map(new Function<Integer, Integer>() {
            @Override
            public Integer call(Integer it) throws Exception {
                return it * 2;
            }
        });
        map.foreach(new VoidFunction<Integer>() {
            @Override
            public void call(Integer integer) throws Exception {
                System.out.println(integer);
            }
        });
    }

    private static void filter(){
        SparkConf conf = new SparkConf().setAppName("filter").setMaster("local");
        JavaSparkContext jsc = new JavaSparkContext(conf);
        List<Integer> list = Arrays.asList(1, 2, 3, 4, 5, 6, 7, 8, 9, 10);
        JavaRDD<Integer> parallelize = jsc.parallelize(list);
        JavaRDD<Integer> filter = parallelize.filter(new Function<Integer, Boolean>() {
            @Override
            public Boolean call(Integer integer) throws Exception {
                return integer % 2 == 0;
            }
        });
        filter.foreach(new VoidFunction<Integer>() {
            @Override
            public void call(Integer integer) throws Exception {
                System.out.println(integer);
            }
        });
        jsc.close();
    }


    private static void flatMap() {
        SparkConf conf = new SparkConf().setAppName("flatMap").setMaster("local");
        JavaSparkContext jsc = new JavaSparkContext(conf);
        List<String> list = Arrays.asList("hello jieGe", "hello jieSao", "hello Jie");
        JavaRDD<String> parallelize = jsc.parallelize(list);


        JavaRDD<String> words = parallelize.flatMap(new FlatMapFunction<String, String>() {
            @Override
            public Iterator<String> call(String s) throws Exception {
               return  Arrays.asList(s.split(" ")).iterator();

            }
        });

        words.foreach(new VoidFunction<String>() {
            @Override
            public void call(String s) throws Exception {
                System.out.println(s);
            }
        });
        jsc.close();
    }
    private static void groupByKey(){
        SparkConf conf = new SparkConf().setAppName("groupByKey").setMaster("local");
        JavaSparkContext jsc = new JavaSparkContext(conf);
        List<Tuple2<String, Integer>> tuple2s = Arrays.asList(
                new Tuple2<String, Integer>("class1", 50),
                new Tuple2<String, Integer>("class2", 45),
                new Tuple2<String, Integer>("class1", 60),
                new Tuple2<String, Integer>("class2", 65)
        );
        JavaPairRDD<String, Integer> pairRDD = jsc.parallelizePairs(tuple2s);
        JavaPairRDD<String, Iterable<Integer>> JavaPairRDD = pairRDD.groupByKey();
        JavaPairRDD.foreach(new VoidFunction<Tuple2<String, Iterable<Integer>>>() {
            @Override
            public void call(Tuple2<String, Iterable<Integer>> t) throws Exception {
                System.out.println("class" + "\t" + t._1);
                Iterator<Integer> iterator = t._2.iterator();
                while(iterator.hasNext()){
                    System.out.println(iterator.next());
                }
                System.out.println("====================");
            }
        });
        jsc.close();

    }
    private static void reduceByKey(){
        SparkConf conf = new SparkConf().setAppName("reduceByKey").setMaster("local");
        JavaSparkContext jsc = new JavaSparkContext(conf);
        List<Tuple2<String, Integer>> tuple2s = Arrays.asList(
                new Tuple2<String, Integer>("class1", 80),
                new Tuple2<String, Integer>("class2", 75),
                new Tuple2<String, Integer>("class1", 90),
                new Tuple2<String, Integer>("class2", 65)
        );
        JavaPairRDD<String, Integer> pairRDD = jsc.parallelizePairs(tuple2s);
        JavaPairRDD<String, Integer> pairRDD1 = pairRDD.reduceByKey(new Function2<Integer, Integer, Integer>() {
            @Override
            public Integer call(Integer t1, Integer t2) throws Exception {
                return t1 + t2;
            }
        });
        pairRDD1.foreach(new VoidFunction<Tuple2<String, Integer>>() {
            @Override
            public void call(Tuple2<String, Integer> s) throws Exception {
                System.out.println(s._1 + "\t" + s._2);
            }
        });
        jsc.close();

    }
    private static void sortByKey(){
        SparkConf conf = new SparkConf().setAppName("sortByKey").setMaster("local");
        JavaSparkContext jsc = new JavaSparkContext(conf);
        List<Tuple2<Integer, String>> list = Arrays.asList(
                new Tuple2<Integer, String>(65, "leo"),
                new Tuple2<Integer, String>(50, "tom"),
                new Tuple2<Integer, String>(100, "marry"),
                new Tuple2<Integer, String>(80, "jack"));
        JavaPairRDD<Integer, String> JavaPairRDD = jsc.parallelizePairs(list);
        org.apache.spark.api.java.JavaPairRDD<Integer, String> StringJavaPairRDD = JavaPairRDD.sortByKey(false);
        StringJavaPairRDD.foreach(new VoidFunction<Tuple2<Integer, String>>() {
            @Override
            public void call(Tuple2<Integer, String> i) throws Exception {
                System.out.println(i._1 + "\t" +i._2);
            }
        });
        jsc.close();
    }
    private static void join(){
        SparkConf conf = new SparkConf().setAppName("join").setMaster("local");
        JavaSparkContext jsc = new JavaSparkContext(conf);
        List<Tuple2<Integer, String>> studentList = Arrays.asList(
                new Tuple2<Integer, String>(1, "leo"),
                new Tuple2<Integer, String>(2, "jack"),
                new Tuple2<Integer, String>(3, "tom"));

        List<Tuple2<Integer, Integer>> scoreList = Arrays.asList(
                new Tuple2<Integer, Integer>(1, 100),
                new Tuple2<Integer, Integer>(2, 90),
                new Tuple2<Integer, Integer>(3, 60));
        JavaPairRDD<Integer, String> student = jsc.parallelizePairs(studentList);
        JavaPairRDD<Integer, Integer> score = jsc.parallelizePairs(scoreList);
        JavaPairRDD<Integer, Tuple2<String, Integer>> join = student.join(score);
        join.foreach(new VoidFunction<Tuple2<Integer, Tuple2<String, Integer>>>() {
            @Override
            public void call(Tuple2<Integer, Tuple2<String, Integer>> a) throws Exception {
                System.out.println(a._1);
                System.out.println(a._2._1);
                System.out.println(a._2._2);
                System.out.println("=================");
            }
        });
        jsc.close();

    }
    private static void coGroup(){
        SparkConf conf = new SparkConf().setAppName("coGroup").setMaster("local");
        JavaSparkContext jsc = new JavaSparkContext(conf);
        // 模拟集合
        List<Tuple2<Integer, String>> studentList = Arrays.asList(
                new Tuple2<Integer, String>(1, "jie"),
                new Tuple2<Integer, String>(2, "jack"),
                new Tuple2<Integer, String>(3, "tom"));

        List<Tuple2<Integer, Integer>> scoreList = Arrays.asList(
                new Tuple2<Integer, Integer>(1, 100),
                new Tuple2<Integer, Integer>(2, 90),
                new Tuple2<Integer, Integer>(3, 60),
                new Tuple2<Integer, Integer>(1, 70),
                new Tuple2<Integer, Integer>(2, 80),
                new Tuple2<Integer, Integer>(3, 50));
        JavaPairRDD<Integer, String> student = jsc.parallelizePairs(studentList);
        JavaPairRDD<Integer, Integer> score = jsc.parallelizePairs(scoreList);
        JavaPairRDD<Integer, Tuple2<Iterable<String>, Iterable<Integer>>> cogroup = student.cogroup(score);
        cogroup.foreach(new VoidFunction<Tuple2<Integer, Tuple2<Iterable<String>, Iterable<Integer>>>>() {
            @Override
            public void call(Tuple2<Integer, Tuple2<Iterable<String>, Iterable<Integer>>> t) throws Exception {
                System.out.println("====================");
                System.out.println(t._1);
                Iterator<String> it = t._2._1.iterator();
                Iterator<Integer> it1 = t._2()._2.iterator();
                while(it.hasNext()){
                    System.out.println(it.next());
                }
                while (it1.hasNext()){
                    System.out.println(it1.next());
                }


            }
        });
        jsc.close();

    }
    private static void mapPartitions(){
        SparkConf conf = new SparkConf().setAppName("mapPartitions").setMaster("local");
        JavaSparkContext jsc = new JavaSparkContext(conf);
        List<Integer> list = Arrays.asList(1, 2, 3, 4, 5, 6, 7, 8, 9, 10);
        JavaRDD<Integer> parallelize = jsc.parallelize(list,2);
        parallelize.foreach(new VoidFunction<Integer>() {
            @Override
            public void call(Integer integer) throws Exception {
                System.out.println(integer);
            }
        });
    }
    private static void union(){
        SparkConf conf = new SparkConf().setAppName("mapPartitions").setMaster("local");
        JavaSparkContext jsc = new JavaSparkContext(conf);
        List<Integer> list = Arrays.asList(1, 2, 3, 4, 5);
        List<Integer> list1 = Arrays.asList(6, 7, 8, 9, 10);
        JavaRDD<Integer> union = jsc.parallelize(list).union(jsc.parallelize(list1));
        union.foreach(new VoidFunction<Integer>() {
            @Override
            public void call(Integer integer) throws Exception {
                System.out.println(integer);
            }
        });
    }
    private static void cartesian(){
        SparkConf conf = new SparkConf().setAppName("cartesian").setMaster("local");
        JavaSparkContext jsc = new JavaSparkContext(conf);
        List<String> list1 = Arrays.asList("A", "B", "C");
        List<Integer> list2 = Arrays.asList(1, 2, 3, 4, 5);
        JavaPairRDD<String, Integer> cartesian = jsc.parallelize(list1).cartesian(jsc.parallelize(list2));
        cartesian.foreach(new VoidFunction<Tuple2<String, Integer>>() {
            @Override
            public void call(Tuple2<String, Integer> v) throws Exception {
                System.out.println(v._1 + "\t" +v._2);
            }
        });

    }
    private static void groupBy(){
        SparkConf conf = new SparkConf().setAppName("groupBy").setMaster("local");
        JavaSparkContext jsc = new JavaSparkContext(conf);
        List<Integer> list = Arrays.asList(1, 2, 3, 4, 5, 6, 7, 8, 9, 10);
        JavaRDD<Integer> parallelize = jsc.parallelize(list);
//        parallelize.groupBy();


    }
    private static void distinct(){
        SparkConf conf = new SparkConf().setAppName("groupBy").setMaster("local");
        JavaSparkContext jsc = new JavaSparkContext(conf);
        List<Integer> list = Arrays.asList(1, 1, 3, 3, 5, 5, 8, 8, 9, 10);
        JavaRDD<Integer> distinct = jsc.parallelize(list).distinct();
        distinct.foreach(new VoidFunction<Integer>() {
            @Override
            public void call(Integer integer) throws Exception {
                System.out.println(integer);
            }
        });

    }
    private static void subtract(){
        SparkConf conf = new SparkConf().setAppName("groupBy").setMaster("local");
        JavaSparkContext jsc = new JavaSparkContext(conf);
        List<Integer> list1 = Arrays.asList(1,2,3,4,5,6,7,8,9);
        List<Integer> list2 = Arrays.asList(1,3,5,7,9);
        JavaRDD<Integer> subtract = jsc.parallelize(list1).subtract(jsc.parallelize(list2));
        subtract.foreach(new VoidFunction<Integer>() {
            @Override
            public void call(Integer integer) throws Exception {
                System.out.println(integer);
            }
        });
    }
    private static void Sample(){

    }
    public static void main(String[] args) throws IOException {

//        map();
//        filter();
//        flatMap();
//        groupByKey();
//        reduceByKey();
//        sortByKey();
//        join();
//        coGroup();
//        mapPartitions();
//        union();
//        cartesian();
//        distinct();
        subtract();

    }
}
