//package service;
//
//import com.alibaba.fastjson.JSON;
//import com.alibaba.fastjson.JSONObject;
//import org.apache.kafka.clients.consumer.ConsumerConfig;
//import org.apache.kafka.clients.consumer.ConsumerRecord;
//import org.apache.kafka.common.serialization.StringDeserializer;
//import org.apache.spark.SparkConf;
//import org.apache.spark.api.java.JavaPairRDD;
//import org.apache.spark.api.java.function.Function2;
//import org.apache.spark.api.java.function.VoidFunction;
//import org.apache.spark.streaming.Durations;
//import org.apache.spark.streaming.api.java.JavaInputDStream;
//import org.apache.spark.streaming.api.java.JavaPairDStream;
//import org.apache.spark.streaming.api.java.JavaStreamingContext;
//import org.apache.spark.streaming.kafka010.ConsumerStrategies;
//import org.apache.spark.streaming.kafka010.KafkaUtils;
//import org.apache.spark.streaming.kafka010.LocationStrategies;
//import scala.Tuple2;
//import util.OpentsTest;
//import util.PropertiesUtils;
//
//import java.util.*;
//
//public class SparkStremAlarmServiceTest {
//    public static void main(String[] args) throws InterruptedException {
//        SparkConf conf = new SparkConf().setAppName("SparkStremAlarmService").setMaster("local[7]");
//        JavaStreamingContext jssc = new JavaStreamingContext(conf, Durations.seconds(1));
//
//        Set<String> topicsSet = new HashSet<>(Arrays.asList(PropertiesUtils.getProperty("kafka_source_topics").split(",")));
//        Map<String, Object> kafkaParams = new HashMap<>();
//        kafkaParams.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, PropertiesUtils.getProperty("kafka_brokers"));
//        kafkaParams.put(ConsumerConfig.GROUP_ID_CONFIG, PropertiesUtils.getProperty("kafak_group"));
//        kafkaParams.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class);
//        kafkaParams.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class);
//
//        JavaInputDStream<ConsumerRecord<String, String>> stream =
//                KafkaUtils.createDirectStream(jssc, LocationStrategies.PreferConsistent(), ConsumerStrategies.Subscribe(topicsSet, kafkaParams));
//
//        JavaPairDStream<String, Integer> stringIntegerJavaPairDStream = stream.mapToPair(line -> {
//            JSONObject jsonObject = JSON.parseObject(line.value());
//            String metricGroupId = jsonObject.getString("metricGroupId");
//            String metric = jsonObject.getString("metric");
//            Integer count = jsonObject.getInteger("count");
//            return new Tuple2<String, Integer>(metricGroupId + "_" + metric, count);
//        });
//        JavaPairDStream<String, Iterable<Integer>> stringIterableJavaPairDStream = stringIntegerJavaPairDStream.groupByKey();
//        JavaPairDStream<String, Integer> stringIntegerJavaPairDStream1 = stringIterableJavaPairDStream.mapToPair(tuple -> {
//            int i = 0;
//            Iterator<Integer> it = tuple._2.iterator();
//            while (it.hasNext()) {
//                i += it.next();
//            }
//            return new Tuple2<String, Integer>(tuple._1, i);
//        });
//        JavaPairDStream<String, Integer> stringIntegerJavaPairDStream2 = stringIntegerJavaPairDStream1.reduceByKeyAndWindow(new Function2<Integer, Integer, Integer>() {
//            @Override
//            public Integer call(Integer v1, Integer v2) throws Exception {
//                return null;
//            }
//        }, Durations.seconds(1), Durations.seconds(1));
//
//        stringIntegerJavaPairDStream2.foreachRDD(new VoidFunction<JavaPairRDD<String, Integer>>() {
//            @Override
//            public void call(JavaPairRDD<String, Integer> stringIntegerJavaPairRDD) throws Exception {
//                stringIntegerJavaPairRDD.foreach(new VoidFunction<Tuple2<String, Integer>>() {
//                    @Override
//                    public void call(Tuple2<String, Integer> stringIntegerTuple2) throws Exception {
//                        String[] s = stringIntegerTuple2._1().split("_");
//                        OpentsTest.opentsPut(s[0], stringIntegerTuple2._2(), s[1]);
//                    }
//                });
//            }
//        });
//
//        // Start the computation
//        jssc.start();
//        jssc.awaitTermination();
//        jssc.close();
//    }
//}
