package com.etc.java;


import org.apache.spark.SparkConf;
import org.apache.spark.api.java.JavaPairRDD;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.api.java.function.Function;
import org.apache.spark.api.java.function.Function2;
import scala.Tuple2;

import java.util.Arrays;
import java.util.List;
import java.util.Map;

/**
 * @author 杰哥
 * @Title: ActionOperation
 * @ProjectName scalaworldcount
 * @Description: TODO
 * @date 2019/7/169:31
 */
public class ActionOperation {
    private static void reduce(){
        SparkConf conf = new SparkConf().setAppName("reduce").setMaster("local");
        JavaSparkContext jsc = new JavaSparkContext(conf);
        List<Integer> list = Arrays.asList(1, 3, 5, 7, 9, 11, 13, 15, 17);
        JavaRDD<Integer> parallelize = jsc.parallelize(list);
        Integer reduce = parallelize.reduce(new Function2<Integer, Integer, Integer>() {
            @Override
            public Integer call(Integer i1, Integer i2) throws Exception {
                return i1 + i2;
            }
        });
        System.out.println(reduce);
        jsc.close();
    }
    private static void collect(){
        SparkConf conf = new SparkConf().setAppName("collect").setMaster("local");
        JavaSparkContext jsc = new JavaSparkContext(conf);
        List<Integer> list = Arrays.asList(1,2,3,4,5,6,7,8,9,10);
        JavaRDD<Integer> parallelize = jsc.parallelize(list);
        JavaRDD<Integer> map = parallelize.map(new Function<Integer, Integer>() {
            @Override
            public Integer call(Integer it) throws Exception {
                return it * 2;
            }
        });
        List<Integer> collect = map.collect();
        for(Integer sum : collect){
            System.out.println(sum);
        }
        jsc.close();
    }
    private static void count(){
        SparkConf conf = new SparkConf().setAppName("count").setMaster("local");
        JavaSparkContext jsc = new JavaSparkContext(conf);
        List<Integer> list = Arrays.asList(1, 2, 3, 4, 5, 6, 7, 8, 9, 10);
        JavaRDD<Integer> parallelize = jsc.parallelize(list);
        long count = parallelize.count();
        System.out.println(count);
        jsc.close();
    }
    private static void take(){
        SparkConf conf = new SparkConf().setAppName("take").setMaster("local");
        JavaSparkContext jsc = new JavaSparkContext(conf);
        List<Integer> list = Arrays.asList(1, 2, 3, 4, 5, 6, 7, 8, 9, 10);
        JavaRDD<Integer> parallelize = jsc.parallelize(list);
        List<Integer> take = parallelize.take(5);
        for(Integer it : take){
            System.out.println(it);
        }
        jsc.close();

    }
    private static void saveAsTextFile(){
        SparkConf conf = new SparkConf().setAppName("saveAsTextFile").setMaster("local");
        JavaSparkContext jsc = new JavaSparkContext(conf);
        List<Integer> list = Arrays.asList(1, 2, 3, 4, 5, 6, 7, 8, 9);
        JavaRDD<Integer> parallelize = jsc.parallelize(list);
        JavaRDD<Integer> map = parallelize.map(new Function<Integer, Integer>() {
            @Override
            public Integer call(Integer v) throws Exception {
                return v * 2;
            }
        });
        map.saveAsTextFile("hehe");
        jsc.close();
    }
    private static void countByKey(){
        SparkConf conf = new SparkConf().setAppName("countByKey").setMaster("local");
        JavaSparkContext jsc = new JavaSparkContext(conf);
        List<Tuple2<String, String>> scoreList = Arrays.asList(
                new Tuple2<String, String>("class1", "leo"),
                new Tuple2<String, String>("class2", "jack"),
                new Tuple2<String, String>("class1", "marry"),
                new Tuple2<String, String>("class2", "tom"),
                new Tuple2<String, String>("class2", "david"));
        JavaPairRDD<String, String> PairRDD = jsc.parallelizePairs(scoreList);
        Map<String, Long> stringLongMap = PairRDD.countByKey();
        for(Map.Entry<String, Long>  a : stringLongMap.entrySet()){
            System.out.println(a.getKey() + "\t" +a.getValue());
        }
        jsc.close();
    }
    private static void collectAsMap(){
        SparkConf conf = new SparkConf().setAppName("collectAsMap").setMaster("local");
        JavaSparkContext jsc = new JavaSparkContext(conf);
        List<Tuple2<Integer, Integer>> tuple2s = Arrays.asList(
                new Tuple2<Integer, Integer>(1, 11),
                new Tuple2<Integer, Integer>(2, 12),
                new Tuple2<Integer, Integer>(1, 11),
                new Tuple2<Integer, Integer>(2, 12),
                new Tuple2<Integer, Integer>(3, 13)
        );
        Map<Integer, Integer> integerIntegerMap = jsc.parallelizePairs(tuple2s).collectAsMap();
        for(Map.Entry<Integer, Integer>  a : integerIntegerMap.entrySet()){
            System.out.println(a.getKey() + "\t" +a.getValue());
        }
        jsc.close();
    }
    private static void lookup(){
        SparkConf conf = new SparkConf().setAppName("lookup").setMaster("local");
        JavaSparkContext jsc = new JavaSparkContext(conf);
        List<Tuple2<Integer, Integer>> tuple2s = Arrays.asList(
                new Tuple2<Integer, Integer>(1, 12),
                new Tuple2<Integer, Integer>(2, 13),
                new Tuple2<Integer, Integer>(1, 14),
                new Tuple2<Integer, Integer>(2, 15),
                new Tuple2<Integer, Integer>(3, 16)
        );
        List<Integer> lookup = jsc.parallelizePairs(tuple2s).lookup(2);
        for(Integer look : lookup){
            System.out.println(look);
        }
    }
    private static void top(){
        SparkConf conf = new SparkConf().setAppName("lookup").setMaster("local");
        JavaSparkContext jsc = new JavaSparkContext(conf);
        List<Integer> list = Arrays.asList(1, 2, 3, 4, 5, 6, 7, 8, 9);
        List<Integer> top = jsc.parallelize(list).top(6);
        for(Integer look : top){
            System.out.println(look);
        }
        jsc.close();
    }

    public static void main(String[] args) {
//         reduce();
//         collect();
//         count();
//         take();
//         saveAsTextFile();
//        countByKey();
//        collectAsMap();
//        lookup();
        top();
    }
}
