package com.central.wifi

import java.sql.{DriverManager, ResultSet}
import java.text.SimpleDateFormat
import java.util.{Date, Properties, UUID}

import com.alibaba.fastjson.{JSON, JSONArray, JSONObject}
import com.central.utils.SaveData.{password, url, username}
import com.central.utils._
import org.apache.kafka.clients.consumer.ConsumerRecord
import org.apache.kafka.common.serialization.{IntegerDeserializer, StringDeserializer, StringSerializer}
import org.apache.spark.{SparkConf, SparkContext}
import org.apache.spark.rdd.RDD
import org.apache.spark.sql.{SaveMode, SparkSession}
import org.apache.spark.streaming.dstream.{DStream, InputDStream}
import org.apache.spark.streaming.kafka010.{ConsumerStrategies, KafkaUtils, LocationStrategies}
import org.apache.spark.streaming.{Seconds, StreamingContext}
import org.elasticsearch.spark.rdd.EsSpark


object WifiEachotherRelation {
  def main(args: Array[String]): Unit = {
    val sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss")
    val duration: Int = ConfigUtil.properties.getProperty("wifi.code.ecrelation.threshold", "5").toInt
    val seconds = ConfigUtil.properties.getProperty("wifi.code.ecrelation.seconds", "15").toLong
    val partition = ConfigUtil.properties.getProperty("wifi.code.ecrelation.partition", "3").toInt
    val conf = new SparkConf().setAppName("WifiEachotherRelation").setMaster("local[6]")
      .set("es.nodes", ConfigUtil.properties.getProperty("elasticsearch.host", "192.168.5.180"))
      .set("es.port", ConfigUtil.properties.getProperty("elasticsearch.port", "9200"))
      .set("es.mapping.date.rich", "false")
      .set("spark.speculation", "true")
      .set("spark.speculation.interval", "1000")
      .set("spark.speculation.quantile", "0.9")
      .set("spark.speculation.multiplier", "1.5")
      .set("spark.storage.memoryFraction", "0.6")
      .set("spark.streaming.unpersist", "true")
      .set("spark.serializer", "org.apache.spark.serializer.KryoSerializer")
      .set("spark.streaming.blockInterval", "1s")
      .set("spark.driver.extraJavaOptions", "-Xss30M")

      .registerKryoClasses(Array(classOf[String], classOf[WifiRelation]))
    val ssc = new StreamingContext(conf, Seconds(seconds))
    val session = SparkSession.builder().config(conf).getOrCreate()
    session.sparkContext.setLogLevel("ERROR")
    val properties = new Properties()
    properties.put("user", "root")
    properties.put("password", "root")

    val kafkaparam = Map(
      "bootstrap.servers" -> "192.168.5.180:9092",
      "key.deserializer" -> classOf[IntegerDeserializer],
      "value.deserializer" -> classOf[StringDeserializer],
      "group.id" -> "wifiEtRelation",
      "auto.offset.reset" -> "latest",
      "enable.auto.commit" -> (false: java.lang.Boolean)
    )
    val topics = Array("wifiEtRelation1")
    KafkaUtils.createDirectStream(
      ssc,
      LocationStrategies.PreferConsistent,
      ConsumerStrategies.Subscribe[String, String](topics, kafkaparam)
    ).map(s => {
      val jsb = JSON.parse(s.value()).asInstanceOf[JSONObject]
      val dev = jsb.get("device_code").toString
      (dev, Wifi(jsb.get("dataid").toString, jsb.get("code").toString,
        jsb.get("codetype").toString,
        dev, jsb.get("captime").toString))
    }).foreachRDD(r => {
      import session.implicits._
      r.cache()
      val deviceCodes = r.map(s => (s._1, 1)).reduceByKey(_ + _).map(s => s._1).collect()

      println(sdf.format(new Date) + "----此次处理设备数-----" + deviceCodes.length)
      var relationArr: Array[WifiRelationResult] = Array[WifiRelationResult]()
      deviceCodes.map(d => {

        val devRdd: RDD[(String, Wifi)] = r.filter(s => s._1 == d).map(s => (s._2.code, s._2))
        val codes = devRdd.map(s => s._1).distinct().zipWithIndex
        val devIndRdd = codes.join(devRdd, partition).map(s => s._2)
        var cartesianRdd: RDD[(Wifi, Wifi)] = null; //session.sparkContext.emptyRDD[(Wifi, Wifi)];
        codes.collect().map(s => {
          if (cartesianRdd == null) {
            cartesianRdd = devIndRdd.filter(dir => dir._2.code == s._1).map(dir => dir._2).cartesian(devIndRdd.filter(dir => dir._1 > s._2).map(dir => dir._2))
          } else {
            cartesianRdd = cartesianRdd.union(devIndRdd.filter(dir => dir._1 == s._1).map(dir => dir._2).cartesian(devIndRdd.filter(dir => dir._1 > s._2).map(dir => dir._2)))
          }
        })
        val checkRdd: RDD[(Integer, (Wifi, Wifi))] = cartesianRdd.map(s => {
          val flag: Integer = RedisUtil.checkAndUpdateWifiRelation(s._1.code, s._2.code, duration)
          if (flag < 0) {
            (flag * -1, (s._2, s._1))
          } else {
            (flag, s)
          }
        })
        checkRdd.cache()
        val esArr: RDD[WifiRelation] = checkRdd.map(s => {
          WifiRelation(s._2._2.dataid, s._2._1.code,
            s._2._2.code, s._2._2.captime
          )
        })
        EsSpark.saveToEs(esArr, "stand_wifi_relation/wifirelation")
        val equ5 = checkRdd.filter(s => s._1 == duration).map(s => {
          WifiRelationResult(UUID.randomUUID().toString, s._2._1.code, s._2._1.codetype, s._2._2.code, s._2._2.codetype, 0, 0, duration, 0,System.currentTimeMillis())
        }).collect()
        insertResult(equ5)

        val gig5 = checkRdd.filter(s => s._1 > duration).map(t => (t._2._1.code, t._2._2.code, t._1.longValue(), 0L)).collect()
        updateResult(gig5)

        println(sdf.format(new Date) + "   end device " + d)
      })
    })

    ssc.start()
    ssc.awaitTermination()
  }

  //更新关联结果表
  def updateResult(array: Array[(String, String, Long, Long)]): Unit = {

    try {
      Class.forName("com.mysql.cj.jdbc.Driver")
    } catch {
      case e =>
        e
    }
    val connection = DriverManager.getConnection(url, username, password)
    connection.setAutoCommit(false)
    val statement = connection.prepareStatement(
      "update  t_wifi_eachother_relation set counts =? " +
        ", lasttime=? where srccode = ? and tarcode = ? ",
      ResultSet.TYPE_SCROLL_SENSITIVE,
      ResultSet.CONCUR_READ_ONLY
    )
    array.map(s => {
      statement.setLong(1, s._3)
      statement.setLong(2, s._4)
      statement.setString(3, s._1)
      statement.setString(4, s._2)
      statement.addBatch()
    })

    val ints = statement.executeBatch()
    connection.commit()
    statement.close()
    connection.close()
  }

  //插入关联关系
  def insertResult(wifiDetails: Array[WifiRelationResult]): Unit = {

    try {
      Class.forName("com.mysql.cj.jdbc.Driver")
    } catch {
      case e =>
        e
    }
    val connection = DriverManager.getConnection(url, username, password)
    val statement = connection.prepareStatement("insert into t_wifi_eachother_relation values(?,?,?,?,?,?,?,?,?,?)")
    wifiDetails.map(wifiDetail => {
      statement.setString(1, wifiDetail.id)
      statement.setString(2, wifiDetail.srccode)
      statement.setString(3, wifiDetail.srctype)
      statement.setString(4, wifiDetail.tarcode)
      statement.setString(5, wifiDetail.tartype)
      statement.setLong(6, wifiDetail.firsttime)
      statement.setLong(7, wifiDetail.lasttime)
      statement.setLong(8, wifiDetail.counts)
      statement.setDouble(9, wifiDetail.confidence)
      statement.setLong(10, wifiDetail.createtime)
      statement.addBatch()
    })
    statement.executeBatch()
    statement.close()
    connection.close()
  }
}

//侦码对象
case class Wifi(dataid: String,
                code: String,
                codetype: String,
                device_code: String,
                captime: String)

//侦码关系对象
case class WifiRelation(dataid: String,
                        src_code: String,
                        tag_code: String,
                        captime: String)

//侦码关联结果
case class WifiRelationResult(id: String,
                              srccode: String,
                              srctype: String,
                              tarcode: String,
                              tartype: String,
                              firsttime: Long,
                              lasttime: Long,
                              counts: Long,
                              confidence: Double,
                              createtime:Long)
