package com.isunimp.sample.spark;

import org.apache.spark.SparkContext;
import org.apache.spark.api.java.JavaPairRDD;
import org.apache.spark.api.java.JavaPairRDD$;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.sql.SparkSession;
import scala.Int;
import scala.Tuple2;

import java.lang.reflect.Array;
import java.util.Arrays;
import java.util.Random;

/**
 * Main class
 *
 * @author renguiquan
 * @date 2019/2/1
 */
public class Main {

    public static void main(String[] args) {
        SparkSession session = SparkSession.builder()
                .appName("test")
                .master("local")
                .getOrCreate();

        Integer[] nums = (Integer[]) Array.newInstance(Integer.class, 100);
        Random random = new Random();
        for (int idx = 0; idx < nums.length; ++idx) {
            nums[idx] = random.nextInt(100);
        }
        System.out.println(Arrays.asList(nums));

        JavaSparkContext javaSparkContext = JavaSparkContext.fromSparkContext(session.sparkContext());
        JavaRDD<Integer> rdd = javaSparkContext.parallelize(Arrays.asList(nums), 1);
        JavaPairRDD<Integer, Integer> pairRDD = rdd.mapToPair(item -> new Tuple2(item, 1));
        pairRDD.reduceByKey((result, item) -> {
                    return result + item;
                }
        )
                .mapToPair(item -> new Tuple2(item._2, item._1))
                .sortByKey()
                .foreach(item -> System.out.println(item));

        long count = pairRDD.count();

        System.out.println(count);
    }

    void SparkContext(){
        SparkContext sc = new SparkContext();
        JavaSparkContext javaSparkContext = JavaSparkContext.fromSparkContext(sc);
        JavaRDD<String> rdd = javaSparkContext.parallelize(Arrays.asList("121", "sasa", "sasas"));
        rdd.filter(item -> item.matches("^[0-9]*$"))
                .mapToPair(item -> new Tuple2("1", item))
                .foreach(System.out::println);
    }

}
