package com.example.service;

import com.example.entity.User;
import org.apache.spark.SparkConf;
import org.apache.spark.api.java.JavaPairRDD;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.api.java.function.FlatMapFunction;
import org.apache.spark.api.java.function.PairFlatMapFunction;
import org.apache.spark.storage.StorageLevel;
import scala.Tuple2;

import java.util.Arrays;
import java.util.Iterator;
import java.util.List;
import java.util.Map;

/**
 * @author wangjinlong
 * @version 1.0
 * @date 2021/5/18 19:29
 */
public class SparkDemo2 {
    static JavaSparkContext jsc;
    static {
        SparkConf conf = new SparkConf().setAppName("spark-demo").setMaster("local");
        conf.set("spark.serializer", "org.apache.spark.serializer.KryoSerializer");
        conf.registerKryoClasses(new Class[]{User.class});
        conf.set("spark.rdd.compress", "true");
        conf.set("spark.eventLog.enabled", "true");
        conf.set("spark.eventLog.dir", "/tmp/spark-events");
        jsc = new JavaSparkContext(conf);
    }
    public static void main(String[] args) {
        test2();
    }

    public static void test3(){
        JavaRDD<String> distFile = jsc.textFile("data/file.txt");
        distFile.persist(StorageLevel.MEMORY_AND_DISK());


    }

    public static void test2(){
        String sname = jsc.getSparkHome().orElse("not exist");
        System.out.println(sname);

        JavaRDD<String> distFile = jsc.textFile("data/file.txt");
        distFile.persist(StorageLevel.MEMORY_AND_DISK());

        distFile.collect().forEach(System.out::println);

        Integer integer = distFile.map(s -> s.length()).reduce((a, b) -> a + b);
        System.out.println(integer);

        JavaRDD<Tuple2> javaRDD = distFile.map(s -> new Tuple2(s, 1));
        javaRDD.collect().forEach(System.out::println);

        JavaRDD<String> flatMap = distFile.flatMap((FlatMapFunction<String, String>) x -> Arrays.asList(x.split(" ")).iterator());
        flatMap.collect().forEach(System.out::println);

        JavaPairRDD<String, Integer> javaPairRDD = flatMap.flatMapToPair((PairFlatMapFunction<String, String, Integer>) x -> Arrays.asList(new Tuple2<>(x, 1)).iterator());
        javaPairRDD.collect().forEach(System.out::println);

        JavaPairRDD<String, Integer> reduceByKey = javaPairRDD.reduceByKey((x, y) -> x + y);
        reduceByKey.collect().forEach(System.out::println);

        Map<String, Long> longMap = javaPairRDD.countByKey();
        System.out.println(longMap);

        try {
            Thread.sleep(5 * 1000);
        } catch (InterruptedException e) {
            e.printStackTrace();
        }

        jsc.stop();
    }

    public static void test1() {
        List<Integer> data = Arrays.asList(1, 2, 3, 4, 5);
        JavaRDD<Integer> distData = jsc.parallelize(data);
        distData.collect().forEach(System.out::println);
        try {
            System.out.println("sleeping");
            Thread.sleep(50000);
        } catch (InterruptedException e) {
            e.printStackTrace();
        }
    }



}
