package service;

import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import org.apache.kafka.clients.consumer.ConsumerConfig;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.common.serialization.StringDeserializer;
import org.apache.spark.SparkConf;
import org.apache.spark.api.java.JavaPairRDD;
import org.apache.spark.api.java.function.*;
import org.apache.spark.streaming.Durations;
import org.apache.spark.streaming.api.java.JavaInputDStream;
import org.apache.spark.streaming.api.java.JavaPairDStream;
import org.apache.spark.streaming.api.java.JavaStreamingContext;
import org.apache.spark.streaming.kafka010.ConsumerStrategies;
import org.apache.spark.streaming.kafka010.KafkaUtils;
import org.apache.spark.streaming.kafka010.LocationStrategies;
import scala.Tuple2;
import util.PropertiesUtils;

import java.util.*;

/**
 * @Auther: wy
 * @Date: 2018/9/12 11:20
 * @Description:
 */

public class SparkStremAlarmKafkaService {
    public static void main(String[] args) throws InterruptedException {
        SparkConf conf = new SparkConf().setAppName("SparkStremAlarmKafkaService");
        JavaStreamingContext jssc = new JavaStreamingContext(conf, Durations.seconds(1));

        Set<String> topicsSet = new HashSet<>(Arrays.asList(PropertiesUtils.getProperty("kafka_source_topics").split(",")));
        Map<String, Object> kafkaParams = new HashMap<>();
        kafkaParams.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, PropertiesUtils.getProperty("kafka_brokers"));
        kafkaParams.put(ConsumerConfig.GROUP_ID_CONFIG, PropertiesUtils.getProperty("kafka_kafka_group"));
        kafkaParams.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class);
        kafkaParams.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class);

        JavaInputDStream<ConsumerRecord<String, String>> stream =
                KafkaUtils.createDirectStream(jssc, LocationStrategies.PreferConsistent(), ConsumerStrategies.Subscribe(topicsSet, kafkaParams));

        JavaPairDStream<String, Integer> stringIntegerJavaPairDStream = stream.mapToPair(line -> {
            JSONObject jsonObject = JSON.parseObject(line.value());
            String groupId = jsonObject.getString("groupId");
            String metric = jsonObject.getString("metric");
            Integer value = jsonObject.getInteger("value");
            return new Tuple2<String, Integer>(groupId + "_" + metric, value);
        });
        JavaPairDStream<String, Integer> stringIntegerJavaPairDStream2 = stringIntegerJavaPairDStream.reduceByKeyAndWindow(new Function2<Integer, Integer, Integer>() {
            @Override
            public Integer call(Integer v1, Integer v2) throws Exception {
                return v1 + v2;
            }
        }, Durations.seconds(60), Durations.seconds(60));

        stringIntegerJavaPairDStream2.foreachRDD(new VoidFunction<JavaPairRDD<String, Integer>>() {
            @Override
            public void call(JavaPairRDD<String, Integer> stringIntegerJavaPairRDD) throws Exception {
                stringIntegerJavaPairRDD.foreach(new VoidFunction<Tuple2<String, Integer>>() {
                    @Override
                    public void call(Tuple2<String, Integer> stringIntegerTuple2) throws Exception {
                        JSONObject jsonObject = new JSONObject();
                        String[] s = stringIntegerTuple2._1().split("_");
                        jsonObject.put("groupId", s[0]);
                        jsonObject.put("metric", s[1]);
                        jsonObject.put("value", stringIntegerTuple2._2());
                        jsonObject.put("time", System.currentTimeMillis());
                        jsonObject.put("windowid", "1");
                        KafkaProducerService.kafkaProducer(jsonObject.toString());
                    }
                });
            }
        });
        // Start the computation
        jssc.start();
        jssc.awaitTermination();
        jssc.close();
    }
}
