package spark.core.java;

import org.apache.spark.SparkConf;
import org.apache.spark.api.java.JavaPairRDD;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.api.java.function.Function;
import org.apache.spark.api.java.function.Function2;
import org.apache.spark.api.java.function.VoidFunction;
import scala.Tuple2;

import java.util.Arrays;
import java.util.List;
import java.util.Map;

/**
 * Created by Administrator on 2018/2/10/010.
 */
public class ActionOperation {
    public static void main(String[] args) {
        countByKey();
    }


    public static void reduce() {
        SparkConf conf = new SparkConf().setMaster("local").setAppName("reduce");
        JavaSparkContext sc = new JavaSparkContext(conf);
        List<Integer> numbers = Arrays.asList(1, 2, 3, 4, 5, 6, 7, 8, 9, 10);
        //并行化集合，创建初始RDD
        JavaRDD<Integer> scores = sc.parallelize(numbers);
        Integer result = scores.reduce(new Function2<Integer, Integer, Integer>() {
            public Integer call(Integer v1, Integer v2) throws Exception {
                return v1 + v2;
            }
        });
        System.out.println("result is +" + result);
        sc.close();
    }


    public static void collect() {
        SparkConf conf = new SparkConf().setMaster("local").setAppName("collect");
        JavaSparkContext sc = new JavaSparkContext(conf);
        List<Integer> numbers = Arrays.asList(1, 2, 3, 4, 5, 6, 7, 8, 9, 10);
        //并行化集合，创建初始RDD
        JavaRDD<Integer> scores = sc.parallelize(numbers);
        JavaRDD<Integer> dbNumbers = scores.map(new Function<Integer, Integer>() {
            public Integer call(Integer v1) throws Exception {
                return v1 * 2;
            }
        });
        List<Integer> result = dbNumbers.collect();
        for (Integer u : result) {
            System.out.println("result is " + u);
        }
        sc.close();
    }


    public static void count() {
        SparkConf conf = new SparkConf().setMaster("local").setAppName("collect");
        JavaSparkContext sc = new JavaSparkContext(conf);
        List<Integer> numbers = Arrays.asList(1, 2, 3, 4, 5, 6, 7, 8, 9, 10);
        //并行化集合，创建初始RDD
        JavaRDD<Integer> scores = sc.parallelize(numbers);
        JavaRDD<Integer> dbNumbers = scores.map(new Function<Integer, Integer>() {
            public Integer call(Integer v1) throws Exception {
                return v1 * 2;
            }
        });
        long result = dbNumbers.count();
        System.out.println("result is " + result);
        sc.close();
    }


    public static void take() {
        SparkConf conf = new SparkConf().setMaster("local").setAppName("reduce");
        JavaSparkContext sc = new JavaSparkContext(conf);
        List<Integer> numbers = Arrays.asList(1, 2, 3, 4, 5, 6, 7, 8, 9, 10);
        //并行化集合，创建初始RDD
        JavaRDD<Integer> scores = sc.parallelize(numbers);
        List<Integer> result = scores.take(3);
        for (Integer i : result) {
            System.out.println("" + i);
        }
        sc.close();
    }

    public static void saveAsTextFile() {
        SparkConf conf = new SparkConf().setMaster("local").setAppName("reduce");
        JavaSparkContext sc = new JavaSparkContext(conf);
        List<Integer> numbers = Arrays.asList(1, 2, 3, 4, 5, 6, 7, 8, 9, 10);
        //并行化集合，创建初始RDD
        JavaRDD<Integer> scores = sc.parallelize(numbers);

        JavaRDD<Integer> dbNumbers = scores.map(new Function<Integer, Integer>() {
            public Integer call(Integer v1) throws Exception {
                return v1 * 2;
            }
        });
        dbNumbers.saveAsTextFile("datas/saveAs.txt");
        sc.close();
    }

    private static  void countByKey(){
        SparkConf conf = new SparkConf().setMaster("local").setAppName("sortByKey");
        JavaSparkContext sc = new JavaSparkContext(conf);
        List<Tuple2<String, String>> numbers = Arrays.asList(
                new Tuple2<String, String>("class1", "lili"),
                new Tuple2<String, String>("class1",  "lee"),
                new Tuple2<String, String>("class2", "tom"),
                new Tuple2<String, String>("class2", "zheng"));
        //并行化集合，创建初始RDD
        JavaPairRDD<String, String> scores = sc.parallelizePairs(numbers);
        Map<String,Object> result =scores.countByKey();
        for(Map.Entry<String,Object> student: result.entrySet()){
            System.out.println("key:"+ student.getKey() + student.getValue());
        }
        sc.close();
    }
}
