package com.central.resident.streaming

import java.util.{Date, Properties}

import com.alibaba.fastjson.{JSON, JSONObject}
import com.central.utils.{ConfigUtil, ParseTime, RedisUtil}
import org.apache.kafka.clients.consumer.ConsumerRecord
import org.apache.kafka.common.serialization.StringDeserializer
import org.apache.spark.sql.{SaveMode, SparkSession}
import org.apache.spark.streaming.dstream.InputDStream
import org.apache.spark.streaming.kafka010.{ConsumerStrategies, KafkaUtils, LocationStrategies}
import org.apache.spark.streaming.{Seconds, StreamingContext}
import org.apache.spark.{SparkConf, SparkContext}

object ResidentStreaming {

  val URL: String = ConfigUtil.properties.getProperty("mysql.url")
  val LOAD_TABLE: String = ConfigUtil.properties.getProperty("mysql.resident.table.load")

  def main(args: Array[String]): Unit = {
    val configs = ConfigUtil.properties
    val RESIDENT_TOPIC = configs.getProperty("resident.topic")
    val KAFKA_SERVERS = configs.getProperty("resident.bootstrap.servers")
    val KAFKA_GROUPID = configs.getProperty("resident.group.id")
    val conf = new SparkConf().setAppName("ResidentStreaming")
      .setMaster("spark://192.168.5.180:7077")
//      .setMaster("local[4]")
    val sc = new SparkContext(conf)
    sc.setLogLevel("ERROR")
    val ssc = new StreamingContext(sc, Seconds(20))
    val session = SparkSession.builder().config(conf).getOrCreate()
    val properties = new Properties()
    properties.put("user", "root")
    properties.put("password", "root")
    val prop = sc.broadcast(properties)
    val kafkaparam = Map(
      "bootstrap.servers" -> KAFKA_SERVERS,
      "key.deserializer" -> classOf[StringDeserializer],
      "value.deserializer" -> classOf[StringDeserializer],
      "group.id" -> KAFKA_GROUPID,
      "auto.offset.reset" -> "latest",
      "enable.auto.commit" -> (false: java.lang.Boolean)
    )
    val topics = Array(RESIDENT_TOPIC)

    val stream: InputDStream[ConsumerRecord[String, String]] = KafkaUtils.createDirectStream(
      ssc,
      LocationStrategies.PreferConsistent,
      ConsumerStrategies.Subscribe[String, String](topics, kafkaparam)
    )

    val valueStream = stream.map(x => {
      x.value()
    })
    valueStream.foreachRDD(rdd => {
      val saveRdd = rdd
        .map(

          data => {
            val nObject = JSON.parseObject(data)
            val imsi = nObject.getString("imsi")
            val mac = nObject.getString("mac")
            val imei = nObject.getString("imei")
            val time = nObject.getString("captime")
            var code_type = ""
            var parm = ""
            if (imsi != null && imsi.trim.length > 0) {
              parm = imsi
              code_type = "imsi"
            }
            else if (mac != null && mac.trim.length > 0) {
              parm = mac
              code_type = "mac"
            }
            else if (imei != null && imei.trim.length > 0) {
              parm = imei
              code_type = "imei"
            }

            RedisUtil.accomulatorResident(parm + "_" + time, code_type)
          }
        )
        .filter(x => {
          x._1
        })
        .map(x => {

          saveResidentMySQL(x._2, x._3, ParseTime.sdf_day.format(new Date))
        })
      import session.implicits._
      saveRdd.toDF().write.mode(SaveMode.Append).jdbc(URL, LOAD_TABLE, prop.value)


    })
    ssc.start()
    ssc.awaitTermination()

  }
}

case class saveResidentMySQL(
                              code: String,
                              code_type: String,
                              create_time: String
                            )
