package com.central.resident.batch_job

import java.text.SimpleDateFormat
import java.util
import java.util.{Date, Properties}

import com.alibaba.fastjson.JSONObject
import com.central.utils.{ConfigUtil, ParseTime}
import com.central.utils.es.ElasticSearchPoolUtil
import org.apache.http.HttpHost
import org.apache.spark.{SparkConf, SparkContext}
import org.apache.spark.rdd.RDD
import org.apache.spark.sql.{SaveMode, SparkSession}
import org.apache.spark.streaming.{Seconds, StreamingContext}
import org.elasticsearch.action.search.SearchRequest
import org.elasticsearch.client.{RestClient, RestClientBuilder, RestHighLevelClient}
import org.elasticsearch.index.query.QueryBuilders
import org.elasticsearch.search.builder.SearchSourceBuilder
import org.elasticsearch.spark.rdd.EsSpark

object ResidentAnalysis {
  val URL: String = ConfigUtil.properties.getProperty("mysql.url")
  val LOAD_TABLE: String = ConfigUtil.properties.getProperty("mysql.resident.table.load")
  val SAVE_TABLE: String = ConfigUtil.properties.getProperty("mysql.resident.table.save")
  val ES_INDEX: String = ConfigUtil.properties.getProperty("es.resident.load.index")
  val properties = new Properties()
  properties.put("user", ConfigUtil.properties.getProperty("mysql.username"))
  properties.put("password", ConfigUtil.properties.getProperty("mysql.password"))

  def getResidentResult(string: String, sc: SparkSession) = {

    val sdf = new SimpleDateFormat("yyyy-MM-dd")
    val date = new Date
    val time_now = sdf.format(date)
    import sc.implicits._
    //读取满足连续出现天数的数据。
    val paramDataFrame = sc.read
      .jdbc(URL, LOAD_TABLE, properties)
    //      .format("")
    //      .option("", "")
    //      .load()
    val paramRdd = paramDataFrame.rdd.map(x => {
      //字段index从1开始
//      println(x.getString(1) + "===============11")
      (x.getString(1), x.getString(2)
        //        ,x.getString(3)
      )
    }).cache()

    import scala.collection.JavaConverters._
    val imsiData = paramRdd.filter(x => {
      x._2.equals("imsi")
    })
      .map(x => {
        x._1
      })
      .collect()

    val macData = paramRdd.filter(x => {
      x._2.equals("mac")
    })
      .map(x => {
        x._1
      })
      .map(x=>{
        x.toLowerCase
      })
      .collect()
    val imeiData = paramRdd.filter(x => {
      x._2.equals("imei")
    })
      .map(x => {
        x._1
      })
      .collect()


    val imsiRdd = loadEsImsi(paramRdd, imsiData.toList.asJava)
    val macRdd = loadEsMac(paramRdd, macData.toList.asJava)
    val imeiRdd = loadEsImei(paramRdd, imeiData.toList.asJava)
    imsiRdd.union(macRdd).union(imeiRdd).map(x => {
      val code = x._1.split("_")(1)
      val code_type = x._1.split("_")(2)
      CodeDevice(x._1, code,code_type)
    }).toDF()
      .createOrReplaceTempView("table1")

    sc.sql(
      """
     select code_device,code,count_device,code_type from
     (select t1.code_device,t1.code,t1.count_device,code_type, row_number() over(partition by code sort by count_device desc) rank
        from
        (select code_device,code,code_type,count(1) count_device from table1 group by code_device,code,code_type) t1)
      where rank <=2

      """.stripMargin)
      .rdd
      .map(x => {
        val device_code = x.getString(0).split("_")

        SaveResidentTop2( x.get(1)+"", device_code(0),(x.get(2) + "").toInt,x.getString(3),ParseTime.LongToDate(System.currentTimeMillis()+""))
      })
      .toDF()
      .write.mode(SaveMode.Append).jdbc(URL, SAVE_TABLE, properties)

  }


  def loadEsDataBySQL(param: RDD[(String, String)], codes: util.List[String]) = {

    val queryPara = new JSONObject()
    val queryCode = new JSONObject()
    queryCode.put("imsi", codes)
    val queryTerms = new JSONObject()
    queryTerms.put("terms", queryCode)
    queryPara.put("query", queryTerms)
    val esRdd = EsSpark.esRDD(param.sparkContext, "stand_wifi+resident/wifi", "")
    esRdd
  }

  def loadEsImsi(param: RDD[(String, String)], codes: util.List[String]) = {
    val queryPara = new JSONObject()
    val queryCode = new JSONObject()
    queryCode.put("imsi", codes)
    val queryTerms = new JSONObject()
    queryTerms.put("terms", queryCode)
    queryPara.put("query", queryTerms)
    println(queryPara)
    val res = EsSpark.esRDD(param.sparkContext, ES_INDEX, queryPara.toJSONString)

    res.map(x => {
      var device_code = x._2.get("device_code").get + ""
      var imsi = x._2.get("imsi").get + ""
      (device_code + "_" + imsi+"_"+"imsi", imsi)
    })
  }

  def loadEsMac(param: RDD[(String, String)], codes: util.List[String]) = {
    val queryPara = new JSONObject()
    val queryCode = new JSONObject()

    queryCode.put("mac", codes)
    val queryTerms = new JSONObject()
    queryTerms.put("terms", queryCode)
    queryPara.put("query", queryTerms)
    val res = EsSpark.esRDD(param.sparkContext, ES_INDEX, queryPara.toJSONString)
    res.map(x => {
      var device_code = x._2.get("device_code").get + ""
      var mac = x._2.get("mac").get + ""
      (device_code + "_" + mac+"_"+"mac", mac)
    })
  }

  def loadEsImei(param: RDD[(String, String)], codes: util.List[String]) = {
    val queryPara = new JSONObject()
    val queryCode = new JSONObject()
    queryCode.put("imei", codes)
    val queryTerms = new JSONObject()
    queryTerms.put("terms", queryCode)
    queryPara.put("query", queryTerms)
    val res = EsSpark.esRDD(param.sparkContext, ES_INDEX, queryPara.toJSONString)

    res.map(x => {
      var device_code = x._2.get("device_code").get + ""
      var imei = x._2.get("imei").get + ""
      (device_code + "_" + imei+"_"+"imei", imei)
    })
  }

  def loadData(code: String, code_type: String) = {
    val client = new RestHighLevelClient(
      RestClient.builder(
        new HttpHost("192.168.5.180", 9200, "http")))
    //    val client = ElasticSearchPoolUtil.getClinet()
    val searchRequest = new SearchRequest("stand_wifi_resident")
    searchRequest.types("wifi")
    val builder = new SearchSourceBuilder
    builder.query(QueryBuilders.termQuery(code_type, code))


    searchRequest.source(builder)

    val response = client.search(searchRequest)

    val hits = response.getHits

    val tuples: Array[(String, String)] = hits.getHits.map(x => {
      val resultMap = x.getSourceAsMap
      val deviceId = resultMap.get("device_code").toString
      val code = resultMap.get("imsi").toString
      (deviceId + "_" + code, code)
    })

    val topNArray = tuples
      .groupBy(x => {
        x._1
      })
      .map(x => {

        (x._1, x._2.size)
      })
      .toArray
      .sortBy(x => {
        x._2
      })
      .reverse
      .splitAt(2)._1


    val results: Array[(String, String, Int)] = topNArray.map(x => {
      val strings = x._1.split("_")
      (strings(0), strings(1), x._2)
    })

    client.close()
    //    ElasticSearchPoolUtil.returnClinet(client)
    results
  }

  def main(args: Array[String]): Unit = {
    val conf = new SparkConf().setAppName("ResidentStreaming").setMaster("local[8]")

    val sc = new SparkContext(conf)
    sc.setLogLevel("ERROR")
    val session = SparkSession.builder().config(conf)
      .config("es.nodes", properties.getProperty("elasticsearch.host", "192.168.5.180"))
      .config("es.port", properties.getProperty("elasticsearch.port", "9200"))
      .config("es.mapping.date.rich", "false")
      .getOrCreate()

    getResidentResult("", session)
  }

}

case class SaveResidentTop2(
                             code: String,
                             device_id: String,
                             count: Integer,
                             code_type:String,
                             create_time:String
                           )

case class CodeDevice(
                       code_device: String,
                       code: String,
                       code_type:String
                     )