package com.central.service

import java.sql.DriverManager
import java.util.{Calendar, Date, Properties, UUID}

import com.alibaba.fastjson.{JSONArray, JSONObject}
import com.central.utils.{ConfigUtil, ParseTime}
import org.apache.spark.rdd.RDD
import org.apache.spark.sql.{SaveMode, SparkSession}
import org.elasticsearch.spark.rdd.EsSpark

class GangFindService {
  val TABLE_CRIME_STATUS = ConfigUtil.properties.getProperty("table.gang.task")
  val TABLE_GANG_RESULT = ConfigUtil.properties.getProperty("table.gang.result")
  val TABLE_HISTORY_FELLOW = ConfigUtil.properties.getProperty("table.relation")
  val TABLE_FOCUS_PERSON = ConfigUtil.properties.getProperty("table.focus_person")
  val JDBC_URL = ConfigUtil.properties.getProperty("mysql.url")
  val USER_NAME = ConfigUtil.properties.getProperty("mysql.username")
  val PASSWORD = ConfigUtil.properties.getProperty("mysql.password")
  val ES_INDEX = ConfigUtil.properties.getProperty("gang.es.index")

  val properties: Properties = new Properties()
  properties.put("user", "root")
  properties.put("password", "root")

  def findCrime(sparkSession: SparkSession, condition: GangFindCondition) = {
    val starttime = condition.starttime
    val endtime = condition.endtime
    val device_codes = condition.device_codes
    val activitytimes = condition.activitytime
    val code = condition.code
    val job_id = condition.job_id
    val hashisrelation = condition.hashisrelation
    val hascriminalrecord = condition.hascriminalrecord
    val hasliferow = condition.hasliferow
    val accumulatedays = condition.accumulatedays
    var resultCount = 0

    val esRdd = loadEsData(sparkSession, starttime, endtime, device_codes, ES_INDEX).cache()
    println(esRdd.count())
    if (esRdd != null) {


      //原始数据通过伴随记录过滤

      val fellowFilterRdd =
        if (hashisrelation == 1) {
          val fellowRdd = loadFellowData(sparkSession, code)
          //            .cache()
          //          println(fellowRdd.count())
          esRdd
            .map(x => {
              (x.code, x.code + "_" + x.code_type + "_" + x.captime + "_" + x.address)
            })
            .join(fellowRdd)
            .map(x => {
              val code = x._1
              val count = x._2._2
              (code, x._2._1 + "_" + count)
            })
        }
        else esRdd
          .map(x => {
            (x.code, x.code + "_" + x.code_type + "_" + x.captime + "_" + x.address + "_" + "1")
          })
      //经过伴随记录过滤后再通过案底记录过滤
      import sparkSession.implicits._

      val crimeFilterRdd = if (hascriminalrecord == 1) {
        val historyCrimeRdd = loadCrimeData(sparkSession)
        fellowFilterRdd
          .join(historyCrimeRdd)

          .map(x => {
            (x._1, x._2._1)
          })
      }
      else {
        fellowFilterRdd
      }

      //是否进行生活规律分析

      val resultRdd = if (hasliferow == 1) {
        val recentRdd = loadEsRecent(sparkSession, ES_INDEX, accumulatedays, activitytimes).cache()
        println(recentRdd.count())

        crimeFilterRdd.join(recentRdd)
          .map(x => {
            (x._1, x._2._1)
          })
      }
      else {
        crimeFilterRdd
      }
      //        .cache()
      //      println(resultRdd.count())

      resultRdd.cache()
      resultCount = resultRdd.count().toInt
      resultRdd.map(x => {
        val code_type_time_address_count = x._2.split("_")
        val resultid = UUID.randomUUID() + ""
        val code = code_type_time_address_count(0)
        val code_type = code_type_time_address_count(1)
        //        val time = code_type_time_address_count(2)
        //        val address = code_type_time_address_count(3).toLong
        val count = code_type_time_address_count(4).toLong
        GangResult(resultid, code_type, count, ParseTime.LongToDate(System.currentTimeMillis() + ""), job_id.toLong, code)
      })
        .toDF()
        .write
        .mode(SaveMode.Append)
        .jdbc(JDBC_URL, TABLE_GANG_RESULT, properties)
    }

    updateStatus(job_id,resultCount)
  }


  def loadEsData(session: SparkSession, starttime: String, endtime: String, device_codes: String, index: String) = {

    val devices = device_codes.split(",")
    val queryPara = new JSONObject()
    val queryBool = new JSONObject()
    val queryDevice = new JSONObject()
    queryDevice.put("device_code", devices)
    val queryTerms = new JSONObject()
    queryTerms.put("terms", queryDevice)
    val queryFilters = new JSONArray()
    queryFilters.add(queryTerms)
    if (starttime != null && !starttime.equals("") && endtime != null && !endtime.equals("")) {
      val queryTime = new JSONObject()
      queryTime.put("gte", starttime)
      queryTime.put("lte", endtime)
      val querycaptime = new JSONObject()
      querycaptime.put("captime", queryTime)
      val queryRange = new JSONObject()
      queryRange.put("range", querycaptime)
      queryFilters.add(queryRange)
    }
    queryBool.put("filter", queryFilters)
    queryPara.put("bool", queryBool)
    //    println(queryPara.toJSONString)
    val esData = EsSpark.esRDD(session.sparkContext, index, queryPara.toJSONString)
    var result: RDD[EsWifiData] = null
    if (esData.count() > 0) {

      result = esData.map(x => {
        if (x._2.get("mac").get != None) {
          var code = x._2.get("mac").get + ""
          var code_type = "mac"
          var captime = x._2.get("captime").get + ""
          var address = x._2.get("address").get + ""
          EsWifiData(code, code_type, captime, address)
        }
        else if (x._2.get("imsi").get != None) {
          var code = x._2.get("imsi").get + ""
          var code_type = "imsi"
          var captime = x._2.get("captime").get + ""
          var address = x._2.get("address").get + ""
          EsWifiData(code, code_type, captime, address)
        }
        else {
          var code = x._2.get("imei").get + ""
          var code_type = "imei"
          var captime = x._2.get("captime").get + ""
          var address = x._2.get("address").get + ""
          EsWifiData(code, code_type, captime, address)
        }
      })
    }
    if (result != null) {
      //去重,并选取获取时间最新的一条记录
      val value: RDD[EsWifiData] = result
        .groupBy(x => {
          x.code
        })
        .map(x => {
          x._2.toList
            .sortBy(tuple => {
              tuple.captime
            })
            .reverse
            .head
        })
      value
    }
    else result


  }

  def loadEsRecent(sparkSession: SparkSession, index: String, accumulatedays: Integer, activitytimes: String) = {
    val activeTime = activitytimes.split(",")
    val calendar = Calendar.getInstance()
    val date = new Date()
    calendar.setTime(date)
    calendar.add(Calendar.DATE, 0 - accumulatedays)
    calendar.set(Calendar.HOUR, 0)
    calendar.set(Calendar.AM_PM, 0)
    calendar.set(Calendar.MINUTE, 0)
    calendar.set(Calendar.SECOND, 0)
    val starttime = ParseTime.sdf.format(calendar.getTime)
    calendar.add(Calendar.DATE, accumulatedays)
    val endtime = ParseTime.sdf.format(calendar.getTime)

    val queryPara = new JSONObject()
    val queryBool = new JSONObject()
    val queryDevice = new JSONObject()
    val queryMatchAll = new JSONObject()
    queryMatchAll.put("match_all", queryDevice)
    val queryFilters = new JSONArray()
    queryFilters.add(queryMatchAll)
    if (starttime != null && !starttime.equals("") && endtime != null && !endtime.equals("")) {
      val queryTime = new JSONObject()
      queryTime.put("gte", starttime)
      queryTime.put("lte", endtime)
      val querycaptime = new JSONObject()
      querycaptime.put("captime", queryTime)
      val queryRange = new JSONObject()
      queryRange.put("range", querycaptime)
      queryFilters.add(queryRange)
    }
    queryBool.put("filter", queryFilters)
    queryPara.put("bool", queryBool)
    println(queryPara.toJSONString)
    val esData = EsSpark.esRDD(sparkSession.sparkContext, index, queryPara.toJSONString).cache()
    println(esData.count())
    val parsRdd: RDD[(String, String)] = esData.map(x => {
      if (x._2.get("mac").get != None) {
        var code = x._2.get("mac").get + ""
        var code_type = "mac"
        var captime = x._2.get("captime").get + ""
        (code, captime)
      }
      else if (x._2.get("imsi").get != None) {
        var code = x._2.get("imsi").get + ""
        var code_type = "imsi"
        var captime = x._2.get("captime").get + ""
        (code, captime)
      }
      else {
        var code = x._2.get("imei").get + ""
        var code_type = "imei"
        var captime = x._2.get("captime").get + ""
        (code, captime)
      }
    })
    parsRdd.filter(x => {
      val hour_min_sec = x._2.split(" ")(1)
      hour_min_sec >= activeTime(0) && hour_min_sec <= activeTime(1)
    })
      .map(x => {
        val day = x._2.split(" ")(0)
        (x._1 + "_" + day, 1)
      })
      .reduceByKey(_ + _)
      .filter(x => {
        x._2 >= accumulatedays
      })
      .map(x => {
        (x._1, x._1)
      })
  }

  def loadFellowData(sparkSession: SparkSession, code: String) = {
    //创建由任务code生成的Rdd
    val param = sparkSession.sparkContext.parallelize(Array(code))
      .map(x => {
        (x, x)
      })
    //从历史关联表读取生成Rdd，Rdd包括源侦码，关联侦码，关联侦码次数三个字段
    val relationRdd = sparkSession.read.jdbc(JDBC_URL, TABLE_HISTORY_FELLOW, properties)
      .rdd
      .map(x => {
        //srccode, tarcode_count
        (x.getString(1), x.get(3) + "_" + x.get(7))
      })

    //两个Rdd进行join，得到包含任务code的Rdd
    param
      .join(relationRdd)
      .map(x => {
        x._2
      })
      .map(x => {
        val code = x._1
        val relateCode_count = x._2.split("_")
        //得到历史伴随记录以及对应的count
        (relateCode_count(0), relateCode_count(1))
      })
  }

  def loadCrimeData(sparkSession: SparkSession) = {
    val allTypeRdd = sparkSession.read.jdbc(JDBC_URL, TABLE_FOCUS_PERSON, properties)
      .rdd
      .cache()

    val rdd1 = allTypeRdd
      .map(x => {
        (x.getString(9))
      })
      .filter(x => {
        x != null && x.trim.length > 0
      })
    val rdd2 = allTypeRdd
      .map(x => {
        (x.getString(10))
      })
      .filter(x => {
        x != null && x.trim.length > 0
      })
    val rdd3 = allTypeRdd
      .map(x => {
        (x.getString(12))
      })
      .filter(x => {
        x != null && x.trim.length > 0
      })

    rdd1.union(rdd2).union(rdd3)
      .map(x => {
        (x, x)
      })

  }

  def updateStatus(jobId: String,resultcount:Int) = {
    val table = TABLE_CRIME_STATUS
    try {
      Class.forName("com.mysql.cj.jdbc.Driver")
    } catch {
      case e =>
        e.printStackTrace()
    }
    val connection = DriverManager.getConnection(JDBC_URL, USER_NAME, PASSWORD)
    val statement = connection.prepareStatement(
      s"update  $table set status = ?, successtime=? ,resultcount=? where id = ?  "
    )
    statement.setInt(1, 2)
    statement.setString(2, ParseTime.LongToDate(System.currentTimeMillis().toString))
    statement.setInt(3, resultcount)
    statement.setInt(4, jobId.toInt)
    statement.executeUpdate()
    statement.close()
    connection.close()
  }


}

case class GangFindCondition(
                              code: String,
                              code_type: String,
                              job_id: String,
                              device_codes: String,
                              activitytime: String,
                              starttime: String,
                              endtime: String,
                              hashisrelation: Integer,
                              hascriminalrecord: Integer,
                              hasliferow: Integer,
                              accumulatedays: Integer
                            )

case class GangResult(
                       resultid: String,
                       codetype: String,
                       coincidecount: java.lang.Long,
                       createtime: String,
                       jobid: java.lang.Long,
                       tagcode: String
                       //                       lastaddress: java.lang.Long,
                       //                       lastcaptime: String
                     )

case class EsWifiData(
                       code: String,
                       code_type: String,
                       captime: String,
                       address: String
                     )
