package com.example.stream;

import cn.hutool.json.JSONUtil;
import com.example.entity.User;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.common.serialization.StringDeserializer;
import org.apache.spark.SparkConf;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.streaming.Durations;
import org.apache.spark.streaming.api.java.JavaDStream;
import org.apache.spark.streaming.api.java.JavaInputDStream;
import org.apache.spark.streaming.api.java.JavaPairDStream;
import org.apache.spark.streaming.api.java.JavaStreamingContext;
import org.apache.spark.streaming.kafka010.ConsumerStrategies;
import org.apache.spark.streaming.kafka010.KafkaUtils;
import org.apache.spark.streaming.kafka010.LocationStrategies;
import scala.Tuple2;

import java.util.*;

/**
 * @author wangjinlong
 * @version 1.0
 * @date 2021/5/18 16:47
 */

public class SparkKafkaDemo {
    static List<String> topics = Arrays.asList("kafka-3");

    public static void main(String[] args) {
        SparkKafkaDemo demo = new SparkKafkaDemo();
        demo.run4();
    }

    public void run5(){
        SparkConf sparkConf = new SparkConf().setAppName("demo").setMaster("local");
        sparkConf.set("spark.serializer", "org.apache.spark.serializer.KryoSerializer");
        JavaSparkContext javaSparkContext = new JavaSparkContext(sparkConf);
        javaSparkContext.setLogLevel("WARN");
        JavaStreamingContext jsc = new JavaStreamingContext(javaSparkContext, Durations.seconds(2));
        JavaInputDStream<ConsumerRecord<String, String>> stream = KafkaUtils.createDirectStream(
                jsc,
                LocationStrategies.PreferConsistent(),
                ConsumerStrategies.<String, String>Subscribe(topics, properties())
        );
//        stream.print();
        JavaDStream<ConsumerRecord<String, String>> window = stream.window(Durations.seconds(6), Durations.seconds(2));
        JavaDStream<User> userJavaDStream = window.map(record -> (JSONUtil.toBean(record.value(), User.class)));

        userJavaDStream.print();

//        userJavaDStream.countByValue();
        try {
            jsc.start();
            jsc.awaitTermination();
        } catch (InterruptedException e) {
            e.printStackTrace();
        }
    }

    public void run4(){
        SparkConf sparkConf = new SparkConf().setAppName("demo").setMaster("local");
        sparkConf.set("spark.serializer", "org.apache.spark.serializer.KryoSerializer");
        JavaSparkContext javaSparkContext = new JavaSparkContext(sparkConf);
        javaSparkContext.setLogLevel("WARN");
        JavaStreamingContext jsc = new JavaStreamingContext(javaSparkContext, Durations.seconds(2));
        JavaInputDStream<ConsumerRecord<String, String>> stream = KafkaUtils.createDirectStream(
                jsc,
                LocationStrategies.PreferConsistent(),
                ConsumerStrategies.<String, String>Subscribe(topics, properties())
        );

        stream.print();
        JavaDStream<ConsumerRecord<String, String>> window = stream.window(Durations.seconds(6), Durations.seconds(2));
        JavaDStream<User> userJavaDStream = window.map(record -> (JSONUtil.toBean(record.value(), User.class)));
        userJavaDStream.print();

//        mapToPair.combineByKey(pair -> pair,
//                (Tuple2<Integer, Integer> acc, Integer v) -> (new Tuple2<>(acc._1 + v, acc._2 + 1)),
//                (Tuple2<Integer, Integer> acc2, Tuple2<Integer, Integer> acc3) -> (new Tuple2<Integer, Integer>(acc2._1 + acc3._1, acc2._2 + acc3._2)));

        JavaPairDStream<String, Integer> nameAgePair = userJavaDStream.mapToPair(user -> (new Tuple2<>(user.getName(), user.getAge())));
        JavaPairDStream<String, Integer> namePair = userJavaDStream.mapToPair(user -> new Tuple2<>(user.getName(), 1));
        JavaPairDStream<String, Integer> ageSumKey = nameAgePair.reduceByKey((x, y) -> (x + y));
        JavaPairDStream<String, Integer> nameCount = namePair.reduceByKey((x, y) -> (x + y));
        JavaPairDStream<String, Tuple2<Integer, Integer>> joinDstream = ageSumKey.join(nameCount);
        joinDstream.print();
        JavaDStream<Tuple2<String, Integer>> userDs = joinDstream.map(t -> (new Tuple2<>(t._1, t._2._1 / t._2._2)));
        userDs.print();

        JavaPairDStream<String, Tuple2<Iterable<Integer>, Iterable<Integer>>> cogroup = nameAgePair.cogroup(namePair);
        cogroup.print();

        try {
            jsc.start();
            jsc.awaitTermination();
        } catch (InterruptedException e) {
            e.printStackTrace();
        }
    }

    public void run3(){
        SparkConf sparkConf = new SparkConf().setAppName("demo").setMaster("local");
        sparkConf.set("spark.serializer", "org.apache.spark.serializer.KryoSerializer");
        JavaSparkContext javaSparkContext = new JavaSparkContext(sparkConf);
        JavaStreamingContext jsc = new JavaStreamingContext(javaSparkContext, Durations.seconds(2));
        JavaInputDStream<ConsumerRecord<String, String>> stream = KafkaUtils.createDirectStream(
                jsc,
                LocationStrategies.PreferConsistent(),
                ConsumerStrategies.<String, String>Subscribe(topics, properties())
        );
        stream.print();
        JavaDStream<ConsumerRecord<String, String>> window = stream.window(Durations.seconds(4), Durations.seconds(2));
        window.mapToPair(record -> {
            User user = JSONUtil.toBean(record.value(), User.class);
            Tuple2<Integer, Integer> tuple2 = new Tuple2<>(1, user.getAge());
            return tuple2;
        }).reduceByKey((x, y) -> (x + y)).print();

        try {
            jsc.start();
            jsc.awaitTermination();
        } catch (InterruptedException e) {
            e.printStackTrace();
        }
    }

    public void run2(){
        SparkConf sparkConf = new SparkConf().setAppName("demo").setMaster("local");
        sparkConf.set("spark.serializer", "org.apache.spark.serializer.KryoSerializer");
        JavaSparkContext javaSparkContext = new JavaSparkContext(sparkConf);
        JavaStreamingContext jsc = new JavaStreamingContext(javaSparkContext, Durations.seconds(2));
        JavaInputDStream<ConsumerRecord<String, String>> stream = KafkaUtils.createDirectStream(
                jsc,
                LocationStrategies.PreferConsistent(),
                ConsumerStrategies.<String, String>Subscribe(topics, properties())
        );
        stream.print();
        JavaDStream<ConsumerRecord<String, String>> window = stream.window(Durations.seconds(4), Durations.seconds(2));
        window.mapToPair(record -> {
            User user = JSONUtil.toBean(record.value(), User.class);
            Tuple2<Integer, Integer> tuple2 = new Tuple2<>(1, user.getAge());
            return tuple2;
        }).reduceByKey((x, y) -> (x + y)).print();

        try {
            jsc.start();
            jsc.awaitTermination();
        } catch (InterruptedException e) {
            e.printStackTrace();
        }
    }

    public void run1(){
        SparkConf sparkConf = new SparkConf().setAppName("demo").setMaster("local");
        sparkConf.set("spark.serializer", "org.apache.spark.serializer.KryoSerializer");
        JavaSparkContext javaSparkContext = new JavaSparkContext(sparkConf);
        JavaStreamingContext jsc = new JavaStreamingContext(javaSparkContext, Durations.seconds(2));

        JavaInputDStream<ConsumerRecord<String, String>> stream =
                KafkaUtils.createDirectStream(
                        jsc,
                        LocationStrategies.PreferConsistent(),
                        ConsumerStrategies.<String, String>Subscribe(topics, properties())
                );
        stream.print();
        JavaDStream<List<String>> maps = stream.map(x -> {
            List<String> kList = new ArrayList<>();
            String[] strings = x.value().split(" ");
            for (String k : strings) {
                kList.add(k + "_1");
            }
            return kList;
        });
        maps.print();
        try {
            jsc.start();
            jsc.awaitTermination();
        } catch (InterruptedException e) {
            e.printStackTrace();
        }
    }


    public static Map<String, Object> properties(){
        Map<String, Object> kafkaParams = new HashMap<>();
        kafkaParams.put("bootstrap.servers", "localhost:9092");
        kafkaParams.put("key.deserializer", StringDeserializer.class);
        kafkaParams.put("value.deserializer", StringDeserializer.class);
        kafkaParams.put("group.id", "group-1");
        kafkaParams.put("auto.offset.reset", "latest");
        kafkaParams.put("enable.auto.commit", false);
        return kafkaParams;
    }

    public void test(){
        SparkConf conf = new SparkConf()
                .setMaster("local")
                .setAppName("myReduce");
        conf.set("spark.serializer", "org.apache.spark.serializer.KryoSerializer");

        JavaSparkContext sc=new JavaSparkContext(conf);
        List<User> numberList = getUser();
        JavaRDD<User> numbers = sc.parallelize(numberList, 2);
        JavaRDD<Integer> ageRDD = numbers.map(user -> user.getAge());
        Integer sum = ageRDD.reduce((x, y) -> (x + y));
        System.out.println("sum:"+sum);
        System.out.println("avg_age = " + (sum / ageRDD.count()));
        sc.close();
    }

    public List<User> getUser(){
        List<User> userList = new ArrayList<>();
        User user1 = new User(1, "zhangsan", 23, "23@qq.com");
        User user2 = new User(2, "lisi", 24, "24@qq.com");
        User user3 = new User(3, "wangwu", 25, "25@qq.com");
        User user4 = new User(4, "zhaoliu", 26, "26@qq.com");
        userList.add(user1);
        userList.add(user2);
        userList.add(user3);
        userList.add(user4);
        return userList;
    }

}
