package com.atguigu.bigdata.spark.streaming;

import org.apache.kafka.clients.consumer.ConsumerConfig;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.spark.SparkConf;
import org.apache.spark.api.java.function.Function;
import org.apache.spark.api.java.function.Function2;
import org.apache.spark.api.java.function.PairFunction;
import org.apache.spark.streaming.Durations;
import org.apache.spark.streaming.Seconds;
import org.apache.spark.streaming.api.java.JavaDStream;
import org.apache.spark.streaming.api.java.JavaInputDStream;
import org.apache.spark.streaming.api.java.JavaPairDStream;
import org.apache.spark.streaming.api.java.JavaStreamingContext;
import org.apache.spark.streaming.kafka010.ConsumerStrategies;
import org.apache.spark.streaming.kafka010.KafkaUtils;
import org.apache.spark.streaming.kafka010.LocationStrategies;
import scala.Tuple2;

import java.util.Collections;
import java.util.HashMap;
import java.util.Map;

/*
    最近一小时广告点击量    最近一分钟，每10秒计算一次
 */
public class SparkStreaming13_Req3_JAVA {
    public static void main(String[] args) throws InterruptedException {
        SparkConf conf = new SparkConf().setMaster("local[*]").setAppName("SparkStreaming");
        JavaStreamingContext jssc = new JavaStreamingContext(conf, Durations.seconds(5));
        Map<String, Object> kafkaPara = new HashMap<>();
        kafkaPara.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, "127.0.0.1:9092");
        kafkaPara.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG,"org.apache.kafka.common.serialization.StringDeserializer");
        kafkaPara.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.StringDeserializer");
        kafkaPara.put(ConsumerConfig.GROUP_ID_CONFIG,"atguigu1");
        String topic = "test111";

        JavaInputDStream<ConsumerRecord<String,String>> kafkaDataDS =  KafkaUtils.createDirectStream(jssc, LocationStrategies.PreferConsistent(),
                ConsumerStrategies.Subscribe(Collections.singleton(topic),kafkaPara));

        JavaDStream<AdClickData> adClickData = kafkaDataDS.map(new Function<ConsumerRecord<String, String>, AdClickData>() {
            @Override
            public AdClickData call(ConsumerRecord<String, String> stringStringConsumerRecord) throws Exception {
                String value = stringStringConsumerRecord.value();
                String[] temp = value.split(" ");
                return new AdClickData(temp[0],temp[1],temp[2],temp[3],temp[4]);
            }
        });

        JavaPairDStream<Long,Integer> pairDS = adClickData.mapToPair(new PairFunction<AdClickData, Long, Integer>() {
            @Override
            public Tuple2<Long, Integer> call(AdClickData adClickData) throws Exception {
                Long ts = Long.valueOf(adClickData.getTs());
                Long tsNew = ts / 10000 * 10000;
                return new Tuple2<Long, Integer>(tsNew, 1);
            }
        });
        // 最近一分钟，每10秒计算一次
        JavaPairDStream<Long,Integer> reduceDS = pairDS.reduceByKeyAndWindow(new Function2<Integer, Integer, Integer>() {
            @Override
            public Integer call(Integer v1, Integer v2) throws Exception {
                return v1 + v2;
            }
        }, Durations.seconds(60),Durations.seconds(10));

       reduceDS.print();

        // 1. 启动采集器
        jssc.start();
        // 2. 等待采集器的关闭
        jssc.awaitTermination();
    }
}
