package com.atguigu.bigdata.spark.streaming;

import com.atguigu.bigdata.spark.util.JDBCUtil;
import org.apache.kafka.clients.consumer.ConsumerConfig;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.spark.SparkConf;
import org.apache.spark.api.java.JavaPairRDD;
import org.apache.spark.api.java.function.Function;
import org.apache.spark.api.java.function.Function2;
import org.apache.spark.api.java.function.PairFunction;
import org.apache.spark.api.java.function.VoidFunction;
import org.apache.spark.streaming.Durations;
import org.apache.spark.streaming.api.java.JavaDStream;
import org.apache.spark.streaming.api.java.JavaInputDStream;
import org.apache.spark.streaming.api.java.JavaPairDStream;
import org.apache.spark.streaming.api.java.JavaStreamingContext;
import org.apache.spark.streaming.kafka010.ConsumerStrategies;
import org.apache.spark.streaming.kafka010.KafkaUtils;
import org.apache.spark.streaming.kafka010.LocationStrategies;
import scala.Tuple2;

import java.sql.Connection;
import java.sql.PreparedStatement;
import java.text.SimpleDateFormat;
import java.util.Collections;
import java.util.HashMap;
import java.util.Iterator;
import java.util.Map;
/*
    描述：实时统计每天各地区各城市各广告的点击总流量，并将其存入MySQL。
 */
public class SparkStreaming12_Req2_JAVA {
    public static void main(String[] args) throws InterruptedException {
        SparkConf conf = new SparkConf().setMaster("local[*]").setAppName("SparkStreaming");
        JavaStreamingContext jssc = new JavaStreamingContext(conf, Durations.seconds(3));
        Map<String, Object> kafkaPara = new HashMap<>();
        kafkaPara.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, "127.0.0.1:9092");
        kafkaPara.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG,"org.apache.kafka.common.serialization.StringDeserializer");
        kafkaPara.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.StringDeserializer");
        kafkaPara.put(ConsumerConfig.GROUP_ID_CONFIG,"atguigu1");
        String topic = "test111";

        JavaInputDStream<ConsumerRecord<String,String>> kafkaDataDS =  KafkaUtils.createDirectStream(jssc, LocationStrategies.PreferConsistent(),
                ConsumerStrategies.Subscribe(Collections.singleton(topic),kafkaPara));

        JavaDStream<AdClickData> adClickData = kafkaDataDS.map(new Function<ConsumerRecord<String, String>, AdClickData>() {
            @Override
            public AdClickData call(ConsumerRecord<String, String> stringStringConsumerRecord) throws Exception {
                String value = stringStringConsumerRecord.value();
                String[] temp = value.split(" ");
                return new AdClickData(temp[0],temp[1],temp[2],temp[3],temp[4]);
            }
        });

        JavaPairDStream<AdClickData, Integer> mapDS = adClickData.mapToPair(new PairFunction<AdClickData, AdClickData, Integer>() {
            @Override
            public Tuple2<AdClickData, Integer> call(AdClickData adClickData) throws Exception {
                return new Tuple2<AdClickData, Integer>(adClickData,1);
            }
        });

        JavaPairDStream<AdClickData, Integer> reduceDS = mapDS.reduceByKey(new Function2<Integer, Integer, Integer>() {
            @Override
            public Integer call(Integer v1, Integer v2) throws Exception {
                return v1 + v2;
            }
        });

        reduceDS.foreachRDD(new VoidFunction<JavaPairRDD<AdClickData, Integer>>() {
            @Override
            public void call(JavaPairRDD<AdClickData, Integer> adClickDataIntegerJavaPairRDD) throws Exception {
                adClickDataIntegerJavaPairRDD.foreachPartition(new VoidFunction<Iterator<Tuple2<AdClickData, Integer>>>() {
                    @Override
                    public void call(Iterator<Tuple2<AdClickData, Integer>> tuple2Iterator) throws Exception {
                        Connection conn = JDBCUtil.getConnection();
                        PreparedStatement pstat = conn.prepareStatement("insert into area_city_ad_count ( dt, area, city, adid, count ) values ( ?, ?, ?, ?, ? ) on DUPLICATE KEY  UPDATE count = count + ?".trim());
                        while(tuple2Iterator.hasNext()) {
                            Tuple2<AdClickData, Integer> tuple2 = tuple2Iterator.next();
                            AdClickData adClickData = tuple2._1;
                            SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd");
                            String day = sdf.format(new java.util.Date(Long.valueOf(adClickData.getTs())));
                            pstat.setString(1,day);
                            pstat.setString(2,adClickData.getArea());
                            pstat.setString(3,adClickData.getCity());
                            pstat.setString(4,adClickData.getAd());
                            pstat.setInt(5, tuple2._2);
                            pstat.setInt(6,tuple2._2);
                            pstat.executeUpdate();
                        }
                        pstat.close();
                        conn.close();
                    }
                });
            }
        });
        // 1. 启动采集器
        jssc.start();
        // 2. 等待采集器的关闭
        jssc.awaitTermination();
    }
}
