package com.scala;

import com.google.common.base.Optional;
import org.apache.avro.generic.GenericData;
import org.apache.spark.SparkConf;
import org.apache.spark.api.java.JavaPairRDD;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.api.java.function.FlatMapFunction;
import org.apache.spark.api.java.function.Function;
import org.apache.spark.api.java.function.VoidFunction;
import scala.Tuple2;

import java.util.ArrayList;
import java.util.Arrays;
import java.util.Iterator;
import java.util.List;
import java.util.function.Consumer;

public class Java_Lesson1 {
    public static void main(String[] args) {
        SparkConf conf=new SparkConf();
        conf.setMaster("local");
        conf.setAppName("test");
        JavaSparkContext sc=new JavaSparkContext(conf);
        List<String> datas = Arrays.asList("a", "b", "c", "d", "a", "b", "c", "d", "b", "c", "d");

        JavaRDD<String> rdd = sc.parallelize(datas);
        JavaRDD<String> distinct = rdd.distinct();
        distinct.foreach(new VoidFunction<String>() {
            @Override
            public void call(String s) throws Exception {
                System.out.println("s = " + s);
            }
        });


        sc.close();
    }

    /**
     * 算子操作数据库
     * @param sc
     */
    void test10(JavaSparkContext sc){
        List<Tuple2<String, String>> datas1 = Arrays.asList(new Tuple2<String, String>("zhangsan", "124")
                , new Tuple2<String, String>("lisi", "124")
                , new Tuple2<String, String>("zhaosi", "234")
        );
        List<Tuple2<String, String>> datas2 = Arrays.asList(new Tuple2<String, String>("zhangsan", "12")
                , new Tuple2<String, String>("lisi", "124")
                , new Tuple2<String, String>("wangwu", "14")
        );
        //返回K,V格式的rdd
        JavaPairRDD<String, String> rdd1 = sc.parallelizePairs(datas1);

        JavaRDD<Object> javaRDD = rdd1.mapPartitions(new FlatMapFunction<Iterator<Tuple2<String, String>>, Object>() {
            @Override
            public Iterable<Object> call(Iterator<Tuple2<String, String>> tuple) throws Exception {
                tuple.forEachRemaining(new Consumer<Tuple2<String, String>>() {
                    @Override
                    public void accept(Tuple2<String, String> elems) {
                        datas.add(elems._2);
                    }
                });
                return  datas;
            }

            List<Object> datas=new ArrayList<>();





        });
        javaRDD.collect();

//        JavaRDD<Object> map = rdd1.map(new Function<Tuple2<String, String>, Object>() {
//            @Override
//            public Object call(Tuple2<String, String> s) throws Exception {
//                //来条数据创建一次连接，不可取
//                System.out.println("创建数据库连接"+s);
//                return s;
//            }
//        });
//        map.collect();
    }
    /**
     * cogroup 将key对应的value转为集合
     * @param sc
     */
    void test9(JavaSparkContext sc){
        List<Tuple2<String, String>> datas1 = Arrays.asList(new Tuple2<String, String>("zhangsan", "124")
                , new Tuple2<String, String>("lisi", "124")
                , new Tuple2<String, String>("zhaosi", "234")
        );
        List<Tuple2<String, String>> datas2 = Arrays.asList(new Tuple2<String, String>("zhangsan", "12")
                , new Tuple2<String, String>("lisi", "124")
                , new Tuple2<String, String>("wangwu", "14")
        );
        //返回K,V格式的rdd
        JavaPairRDD<String, String> rdd1 = sc.parallelizePairs(datas1,2);
        JavaPairRDD<String, String> rdd2 = sc.parallelizePairs(datas2);

        //数据格式相同<String,String>
        JavaPairRDD<String, Tuple2<Iterable<String>, Iterable<String>>> cogroup = rdd1.cogroup(rdd2);

        System.out.println("length = "+cogroup.collect().size());
        cogroup.foreach(new VoidFunction<Tuple2<String, Tuple2<Iterable<String>, Iterable<String>>>>() {
            @Override
            public void call(Tuple2<String, Tuple2<Iterable<String>, Iterable<String>>> tuple) throws Exception {
                System.out.println(tuple);
            }
        });

    }

    /**
     * 差集 A.subtract(B) 即A差B
     * @param sc
     */
    void test8(JavaSparkContext sc){
        List<Tuple2<String, String>> datas1 = Arrays.asList(new Tuple2<String, String>("zhangsan", "124")
                , new Tuple2<String, String>("lisi", "124")
                , new Tuple2<String, String>("zhaosi", "234")
        );
        List<Tuple2<String, String>> datas2 = Arrays.asList(new Tuple2<String, String>("zhangsan", "12")
                , new Tuple2<String, String>("lisi", "124")
                , new Tuple2<String, String>("wangwu", "14")
        );
        //返回K,V格式的rdd
        JavaPairRDD<String, String> rdd1 = sc.parallelizePairs(datas1,2);
        JavaPairRDD<String, String> rdd2 = sc.parallelizePairs(datas2);

        //数据格式相同<String,String>
        JavaPairRDD<String, String> subtract = rdd1.subtract(rdd2);

        System.out.println("length = "+subtract.collect().size());
        subtract.foreach(new VoidFunction<Tuple2<String, String>>() {
            @Override
            public void call(Tuple2<String, String> tuple) throws Exception {
                System.out.println(tuple);
            }
        });
    }

    /**
     * intersection K,V相等的数据集 交集
     * @param sc
     */
    void test7(JavaSparkContext sc){
        List<Tuple2<String, String>> datas1 = Arrays.asList(new Tuple2<String, String>("zhangsan", "124")
                , new Tuple2<String, String>("lisi", "234")
                , new Tuple2<String, String>("zhaosi", "234")
        );
        List<Tuple2<String, String>> datas2 = Arrays.asList(new Tuple2<String, String>("zhangsan", "12")
                , new Tuple2<String, String>("lisi", "124")
                , new Tuple2<String, String>("wangwu", "14")
        );
        //返回K,V格式的rdd
        JavaPairRDD<String, String> rdd1 = sc.parallelizePairs(datas1,2);
        JavaPairRDD<String, String> rdd2 = sc.parallelizePairs(datas2);

        //数据格式相同<String,String>
        JavaPairRDD<String, String> intersection = rdd2.intersection(rdd1);

        System.out.println("length = "+intersection.collect().size());
        intersection.foreach(new VoidFunction<Tuple2<String, String>>() {
            @Override
            public void call(Tuple2<String, String> tuple) throws Exception {
                System.out.println(tuple);
            }
        });
    }

    /**
     * union
     * @param sc
     */
    void test6(JavaSparkContext sc){
        List<Tuple2<String, String>> datas1 = Arrays.asList(new Tuple2<String, String>("zhangsan", "124")
                , new Tuple2<String, String>("lisi", "234")
                , new Tuple2<String, String>("zhaosi", "234")
        );
        List<Tuple2<String, String>> datas2 = Arrays.asList(new Tuple2<String, String>("zhangsan", "12")
                , new Tuple2<String, String>("lisi", "12")
                , new Tuple2<String, String>("wangwu", "14")
        );
        //返回K,V格式的rdd
        JavaPairRDD<String, String> rdd1 = sc.parallelizePairs(datas1,2);
        JavaPairRDD<String, String> rdd2 = sc.parallelizePairs(datas2);

        //数据格式相同<String,String>
        JavaPairRDD<String, String> union = rdd2.union(rdd1);

        System.out.println("length = "+union.collect().size());
    }
    /**
     * full join
     * @param sc
     */
    void test5(JavaSparkContext sc){
        List<Tuple2<String, String>> datas1 = Arrays.asList(new Tuple2<String, String>("zhangsan", "124")
                , new Tuple2<String, String>("lisi", "234")
                , new Tuple2<String, String>("zhaosi", "234")
        );
        List<Tuple2<String, Integer>> datas2 = Arrays.asList(new Tuple2<String, Integer>("zhangsan", 12)
                , new Tuple2<String, Integer>("lisi", 12)
                , new Tuple2<String, Integer>("wangwu", 14)
        );
        //返回K,V格式的rdd
        JavaPairRDD<String, String> rdd1 = sc.parallelizePairs(datas1,2);
        JavaPairRDD<String, Integer> rdd2 = sc.parallelizePairs(datas2);

        //只有key相同才可以join出数据
        JavaPairRDD<String, Tuple2<Optional<Integer>,Optional<String>>> join = rdd2.fullOuterJoin(rdd1);

        System.out.println("length = "+join.collect().size());


//        System.out.println("rdd partition size="+rdd.partitions().size());
//        rdd.collect();

        sc.close();
    }


    /**
     * right join
     * @param sc
     */
//    void test4(JavaSparkContext sc){
//        List<Tuple2<String, String>> datas1 = Arrays.asList(new Tuple2<String, String>("zhangsan", "124")
//                , new Tuple2<String, String>("lisi", "234")
//                , new Tuple2<String, String>("zhaosi", "234")
//        );
//        List<Tuple2<String, Integer>> datas2 = Arrays.asList(new Tuple2<String, Integer>("zhangsan", 12)
//                , new Tuple2<String, Integer>("lisi", 12)
//                , new Tuple2<String, Integer>("wangwu", 14)
//        );
//        //返回K,V格式的rdd
//        JavaPairRDD<String, String> rdd1 = sc.parallelizePairs(datas1,2);
//        JavaPairRDD<String, Integer> rdd2 = sc.parallelizePairs(datas2);
//
//        //只有key相同才可以join出数据
//        JavaPairRDD<String, Tuple2<Optional<String>, Integer>> join = rdd1.rightOuterJoin(rdd2);
//
//        join.foreach(new VoidFunction<Tuple2<String, Tuple2<Optional<String>, Integer>>>() {
//                         @Override
//                         public void call(Tuple2<String, Tuple2<Optional<String>, Integer>> tuple) throws Exception {
//                             String key=tuple._1;
//                             //tuple里边的tuple
//                             Tuple2<Optional<String>, Integer> tupleAround = tuple._2;
//                             Integer rightV = tupleAround._2;
//                             String leftV =tupleAround._1.isPresent()? tupleAround._1.get():"is null this key";
//                             System.out.println("key ="+key+" leftV ="+leftV+" rightV =" + rightV);
//                         }
//                     }
//        );
//    }

    /**
     * leftjoin
     * @param sc
     */
//    void test3(JavaSparkContext sc){
//        List<Tuple2<String, String>> datas1 = Arrays.asList(new Tuple2<String, String>("zhangsan", "124")
//                , new Tuple2<String, String>("lisi", "234")
//                , new Tuple2<String, String>("zhaosi", "234")
//        );
//        List<Tuple2<String, Integer>> datas2 = Arrays.asList(new Tuple2<String, Integer>("zhangsan", 12)
//                , new Tuple2<String, Integer>("lisi", 12)
//                , new Tuple2<String, Integer>("wangwu", 14)
//        );
//        //返回K,V格式的rdd
//        JavaPairRDD<String, String> rdd1 = sc.parallelizePairs(datas1,2);
//        JavaPairRDD<String, Integer> rdd2 = sc.parallelizePairs(datas2);
//
//        //只有key相同才可以join出数据
//        JavaPairRDD<String, Tuple2<String, Optional<Integer>>> join = rdd1.leftOuterJoin(rdd2);
//
//        join.foreach(new VoidFunction<Tuple2<String, Tuple2<String, Optional<Integer>>>>() {
//            @Override
//            public void call(Tuple2<String, Tuple2<String, Optional<Integer>>> tuple) throws Exception {
//                String key=tuple._1;
//                //tuple里边的tuple
//                Tuple2<String, Optional<Integer>> tupleAround = tuple._2;
//                String leftV = tupleAround._1;
//                Integer rightV =tupleAround._2.isPresent()? tupleAround._2.get():-1;
//                System.out.println("key ="+key+" leftV ="+leftV+" rightV =" + rightV);
//            }
//        });
//    }
    /**
     * test join 只有数据为K,V格式才可以使用join,且数据的K相同可join出结果集
     * @param sc
     */
    void test2(JavaSparkContext sc){
        List<Tuple2<String, String>> datas1 = Arrays.asList(new Tuple2<String, String>("zhangsan", "124")
                , new Tuple2<String, String>("lisi", "234")
        );
        List<Tuple2<String, Integer>> datas2 = Arrays.asList(new Tuple2<String, Integer>("zhangsan", 12)
                , new Tuple2<String, Integer>("lisi", 12)
                , new Tuple2<String, Integer>("wangwu", 14)
        );
        //返回K,V格式的rdd
        JavaPairRDD<String, String> rdd1 = sc.parallelizePairs(datas1,2);
        JavaPairRDD<String, Integer> rdd2 = sc.parallelizePairs(datas2);

        //只有key相同才可以join出数据
        JavaPairRDD<String, Tuple2<String, Integer>> join = rdd1.join(rdd2);

        join.foreach(new VoidFunction<Tuple2<String, Tuple2<String, Integer>>>() {
            @Override
            public void call(Tuple2<String, Tuple2<String, Integer>> stringTuple2Tuple2) throws Exception {
                System.out.println(stringTuple2Tuple2._1+stringTuple2Tuple2._2);
            }
        });
    }


    void test1(JavaSparkContext sc){
        List<String> datas = Arrays.asList("a", "b", "c", "d", "e", "a", "b", "c", "d", "e");
        //设置分区为4
        JavaRDD<String> rdd = sc.parallelize(datas,4);

        System.out.println("rdd partitons length ="+rdd.partitions().size());
        List<String> collect = rdd.collect();
    }


}
