package com.central.service

import java.util.{Properties, UUID}

import com.alibaba.fastjson.{JSONArray, JSONObject}
import com.central.utils.{ConfigUtil, ParseTime}
import org.apache.spark.sql.{SaveMode, SparkSession}
import org.elasticsearch.spark.rdd.EsSpark

class DayNightService {
  val JDBC_URL = ConfigUtil.getProperties().getProperty("mysql.url")
  val WIFI_ES_INDEX: String = ConfigUtil.getProperties().getProperty("follow.es.index")
  val TABLE_DAYNIGHT_RESULT = ConfigUtil.getProperties().getProperty("table.daynight.result")

  def dayNightAnalysis(sparkSession: SparkSession, condition: NightCondition) = {
    val code = condition.code
    val code_type = condition.codetype
    val jobid = condition.jobid
    val properties = new Properties()
    properties.put("user", "root")
    properties.put("password", "root")
    val esRdd = loadEsByCode(sparkSession, WIFI_ES_INDEX, code, code_type)
    import sparkSession.implicits._
    var reduceRdd =if (esRdd.isEmpty()) {
      sparkSession
        .sparkContext
        .parallelize(
          Array( NightAnalayszResult(UUID.randomUUID() + "",code,code_type,ParseTime.LongToDate(System.currentTimeMillis()+""),jobid,"notFind"))
        )
    }
    else
     esRdd
      .distinct()
      .groupBy(_._3)
      .map(x => {
        var day_night = "day"
        val list = x._2.toList
        val nightDataSize = list.filter(data => {
          data._2.toInt > 20 || data._2.toInt < 6
        }).size
        val dayDataSize = list.filter(data => {
          data._2.toInt <= 20 && data._2.toInt >= 6
        }).size

        if (dayDataSize < nightDataSize) {
          day_night = "night"
        }
        (day_night, 1)
      })
      .reduceByKey(_ + _)
      .sortBy(0-_._2)
      .zipWithIndex()
      .filter(x=>{
        x._2<1
      })
      .map(x=>{
        x._1
      })
      .map(x => {
        NightAnalayszResult(UUID.randomUUID() + "",code,code_type,ParseTime.LongToDate(System.currentTimeMillis()+""),jobid,x._1)
//        (code, x._1)
      })

    reduceRdd
      .toDF()
      .write
      .mode(SaveMode.Append)
      .jdbc(JDBC_URL,TABLE_DAYNIGHT_RESULT,properties)



  }

  def loadEsByCode(sparkSession: SparkSession, index: String, code: String, code_type: String) = {
    val queryParam = new JSONObject()
    val queryBool = new JSONObject()
    val queryTerm = new JSONObject()

    val query_code = new JSONObject()
    val queryFilters = new JSONArray()
    query_code.put(code_type, code)
    queryTerm.put("term", query_code)
    queryFilters.add(queryTerm)
    queryBool.put("filter", queryFilters)
    queryParam.put("bool", queryBool)
    println(queryParam.toJSONString)
    val esRdd = EsSpark.esRDD(sparkSession.sparkContext, index, queryParam.toJSONString)

    esRdd.map(data => {
      val code = data._2.get(code_type).get + ""
      val captime = data._2.get("captime").get + ""
      val day = captime.split(" ")(1)
      val hour = day.split(":")(0)
      val device_code = data._2.get("device_code").get + ""
      (code, hour, day, device_code)
    })

  }

  def main(args: Array[String]): Unit = {
    val condition = NightCondition(132,"460068832908567","imsi")
    val sparksession = SparkSession
      .builder()
      .appName("testRecord")
      .master("local")
      .config("es.nodes", ConfigUtil.properties.getProperty("elasticsearch.host", "192.168.5.180"))
      .config("es.port", ConfigUtil.properties.getProperty("elasticsearch.port", "9200"))
      .config("es.mapping.date.rich", "false")

      .getOrCreate()
    sparksession.sparkContext.setLogLevel("error")

    dayNightAnalysis(sparksession,condition)
  }
}
case class NightAnalayszResult(
                              resultid:String,
                              code:String,
                              codetype:String,
                              createtime:String,
                              jobid:Integer,
                              daynight:String
                              )
case class NightCondition(jobid:Integer,code:String,codetype:String)