package com.zhang.sparkstreaming_2

import org.apache.kafka.clients.consumer.{ConsumerConfig, ConsumerRecord}
import org.apache.spark.SparkConf
import org.apache.spark.streaming.dstream.{DStream, InputDStream}
import org.apache.spark.streaming.kafka010.{ConsumerStrategies, KafkaUtils, LocationStrategies}
import org.apache.spark.streaming.{Duration, StreamingContext}

import java.sql.{DriverManager, PreparedStatement}
import java.text.SimpleDateFormat
import java.util.Date

/**
 * @title: 实时统计每天各地区各城市各广告的点击总流量，并将其存入MySQL
 * @author: zhang
 * @date: 2022/2/21 11:12
 */
object SparkStreaming07_req2_plus {

  def main(args: Array[String]): Unit = {
    val conf: SparkConf = new SparkConf().setMaster("local[*]").setAppName("spark streaming")
    val ssc = new StreamingContext(conf, Duration(5 * 1000L))

    //kafka参数定义
    val kafkaPara: Map[String, String] = Map[String, String](
      ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG -> "hadoop102:9092",
      ConsumerConfig.GROUP_ID_CONFIG -> "zhang",
      "key.deserializer" -> "org.apache.kafka.common.serialization.StringDeserializer",
      "value.deserializer" -> "org.apache.kafka.common.serialization.StringDeserializer"
    )


    //从kafka读取数据
    val kafkaDStream: InputDStream[ConsumerRecord[String, String]] = KafkaUtils.createDirectStream[String, String](ssc,
      LocationStrategies.PreferConsistent,
      ConsumerStrategies.Subscribe[String, String](Set("spark_test"), kafkaPara))

    val kafkaDS: DStream[String] = kafkaDStream.map(_.value())
    val result: DStream[((String, String, String, String), Int)] = kafkaDS.map(
      line => {
        val fields: Array[String] = line.split(" ")
        val sdf = new SimpleDateFormat("yyyy-MM-dd")
        ((sdf.format(new Date(fields(0).toLong)), fields(1), fields(2), fields(4)), 1)
      }
    ).reduceByKey(_ + _)

    result.foreachRDD {
      rdd => {
        rdd.foreachPartition {
          iter => {
            val conn = JDBCUtil.getConnection
            iter.foreach {
              case ((day, area, city, ad), cnt) => {
                println(((day, area, city, ad), cnt))
                val state: PreparedStatement = conn.prepareStatement(
                  """
                    | insert into area_city_ad_count
                    | (dt, area, city, adid, count)
                    | values (?, ?, ?, ?, ?)
                    | ON DUPLICATE KEY
                    | UPDATE count = count + ?
                    |""".stripMargin)
                state.setString(1, day)
                state.setString(2, area)
                state.setString(3, city)
                state.setString(4, ad)
                state.setLong(5, cnt)
                state.setLong(6, cnt)
                state.executeUpdate()
                state.close()
              }
            }
            conn.close()
          }
        }
      }
    }

    //启动采集器
    ssc.start()
    ssc.awaitTermination()
  }
}
