package com.central.utils

import java.text.SimpleDateFormat
import java.util
import java.util.{Calendar, Date, Properties}

import com.alibaba.fastjson.{JSON, JSONArray, JSONObject}
import com.central.car.streaming.SaveEsCarData
import com.central.controller.ActivityTime
import com.central.face.streaming.SaveEsData
import org.apache.spark.rdd.RDD
import org.apache.spark.sql.SparkSession
import org.elasticsearch.spark.rdd.EsSpark

import scala.collection.Map

object LoadESData {
  val sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss")
  val sdf_day = new SimpleDateFormat("yyyy-MM-dd")
  val LOAD_ES_INDEX: String = ConfigUtil.getProperties().getProperty("face.load.es.index")
  val CAR_SAVE_ES_INDEX: String = ConfigUtil.getProperties().getProperty("car.save.es.index")

  def loadProperties(): Properties = {
    val properties = new Properties()
    properties.load(LoadESData.getClass.getResourceAsStream("/public.properties"))
    properties
  }

  def getOneDeviceData(sc: SparkSession, index: String, deviceCode: String, startTime: String, endTime: String): RDD[(String, Map[String, AnyRef])] = {
    val queryPara = new JSONObject()
    val queryBool = new JSONObject()

    val queryMatch = new JSONObject()
    queryMatch.put("device_code", deviceCode)
    val queryMast = new JSONObject()
    queryMast.put("match", queryMatch)
    queryBool.put("must", queryMast)

    if (startTime != null && !startTime.equals("") && endTime != null && !endTime.equals("")) {
      val queryTime = new JSONObject()
      queryTime.put("gte", startTime)
      queryTime.put("lte", endTime)
      val querycaptime = new JSONObject()
      querycaptime.put("captime", queryTime)
      val queryRange = new JSONObject()
      queryRange.put("range", querycaptime)
      val queryMasts = new JSONArray()
      queryMasts.add(queryRange)
      val queryBools = new JSONObject()
      queryBools.put("must", queryMasts)
      val queryFilter = new JSONObject()
      queryFilter.put("bool", queryBools)
      queryBool.put("filter", queryFilter)
    }
    queryPara.put("bool", queryBool)


    //    queryPara{bool:queryBool}
    //    queryRange{range:{"captime":{gte:startTime,lte:endTime}}}
    //    queryMasts[queryRange]
    //    queryMast{"match:{"device_code:deviceCode"}}
    //    queryBools{must:queryMusts}
    //    queryBool{must:queryMust,filter:queryBools}
    //    queryPara{bool:{must:{"match":{"device_code:deviceCode"}}},"filter":{"bool":{"must":{"queryRange:[queryRange]"}}}}
    println(queryPara.toJSONString+"=================")
    EsSpark.esRDD(sc.sparkContext, index, queryPara.toJSONString)
  }

  def getMutliDeviceData(spark: SparkSession, index: String, deviceCodes: util.List[String], startTime: String, endTime: String): RDD[(String, Map[String, AnyRef])] = {
    val queryPara = new JSONObject()
    val queryBool = new JSONObject()
    val queryDevice = new JSONObject()
    queryDevice.put("device_code", deviceCodes)
    val queryTerms = new JSONObject()
    queryTerms.put("terms", queryDevice)
    val queryFilters = new JSONArray()
    queryFilters.add(queryTerms)
    if (startTime != null && !startTime.equals("") && endTime != null && !endTime.equals("")) {
      val queryTime = new JSONObject()
      queryTime.put("gte", startTime)
      queryTime.put("lte", endTime)
      val querycaptime = new JSONObject()
      querycaptime.put("captime", queryTime)
      val queryRange = new JSONObject()
      queryRange.put("range", querycaptime)
      queryFilters.add(queryRange)
    }
    queryBool.put("filter", queryFilters)
    queryPara.put("bool", queryBool)
    println(queryPara.toJSONString)
    EsSpark.esRDD(spark.sparkContext, index, queryPara.toJSONString)
  }

  def loadDataFromES(rdd: RDD[(String, String)]) = {

    val tuples = rdd.collect()
    //获取device_code
    val distinct: List[String] = tuples.map(tuple => {
      tuple._1
    }).toList.distinct
    //      .filter(x=>{
    //        x.equals("212")||x.equals("2122")
    //      })
    val device_codes = new util.ArrayList[String]()
    for (i <- 0 to distinct.length - 1) {
      device_codes.add(distinct(i))
    }
    val tiems = tuples.map(tuple => {
      tuple._2.split("\\\\u001")(1)

    }).sortBy(x => {
      x
    })
    val startTime = tiems.head
    val begin = java.lang.Long.parseLong(startTime) - 3 * 60 * 1000
    val begintime = sdf.format(begin)
    val endTime = sdf.format(java.lang.Long.parseLong(tiems(tiems.length - 1)))
    val queryPara = new JSONObject()
    val queryBool = new JSONObject()
    val queryDevice = new JSONObject()
    queryDevice.put("device_code", device_codes)
    val queryTerms = new JSONObject()
    queryTerms.put("terms", queryDevice)
    val queryFilters = new JSONArray()
    queryFilters.add(queryTerms)
    if (startTime != null && !startTime.equals("") && endTime != null && !endTime.equals("")) {
      val queryTime = new JSONObject()
      queryTime.put("gte", begintime)
      queryTime.put("lte", endTime)
      val querycaptime = new JSONObject()
      querycaptime.put("captime", queryTime)
      val queryRange = new JSONObject()
      queryRange.put("range", querycaptime)
      queryFilters.add(queryRange)
    }
    queryBool.put("filter", queryFilters)
    queryPara.put("bool", queryBool)
    println(queryPara.toJSONString)
    val esRdd = EsSpark.esRDD(rdd.sparkContext, LOAD_ES_INDEX, queryPara.toJSONString)

    val value = esRdd.map(x => {

      val x1 = x._1
      val x2 = x._2
      if (x._2.get("mac").get != None) {

        (x._2.get("device_code").get + "", //0
          x._2.get("mac").get + "", //1
          "mac", //2
          x._2.get("captime").get + "",
          x._2.get("address").get + "",
          x._2.get("device_name").get + "",
          x._2.get("longitude").get + "",
          x._2.get("latitude").get + ""

          , x1
        )

      }
      else {
        (x._2.get("device_code").get + "",
          x._2.get("imsi").get + "",
          "imsi",
          x._2.get("captime").get + "",
          x._2.get("address").get + "",
          x._2.get("device_name").get + "",
          x._2.get("longitude").get + "",
          x._2.get("latitude").get + ""

          , x1
        )
      }
    })
    value


  }

  def loadDataFromESOld(rdd: RDD[(String, String, String, String, String, String, String)]) = {

    val tuples = rdd.collect()
    //获取device_code
    val distinct: List[String] = tuples.map(tuple => {
      tuple._2
    }).toList.distinct
    //      .filter(x=>{
    //        x.equals("212")||x.equals("2122")
    //      })
    val device_codes = new util.ArrayList[String]()
    for (i <- 0 to distinct.length - 1) {
      device_codes.add(distinct(i))
    }
    val tiems = tuples.map(tuple => {
      tuple._1

    }).sortBy(x => {
      x
    })
    val startTime = tiems.head
    val begin = java.lang.Long.parseLong(startTime) - 3 * 60 * 1000
    val begintime = sdf.format(begin)
    val endTime = sdf.format(java.lang.Long.parseLong(tiems(tiems.length - 1)))
    val queryPara = new JSONObject()
    val queryBool = new JSONObject()
    val queryDevice = new JSONObject()
    queryDevice.put("device_code", device_codes)
    val queryTerms = new JSONObject()
    queryTerms.put("terms", queryDevice)
    val queryFilters = new JSONArray()
    queryFilters.add(queryTerms)
    if (startTime != null && !startTime.equals("") && endTime != null && !endTime.equals("")) {
      val queryTime = new JSONObject()
      queryTime.put("gte", begintime)
      queryTime.put("lte", endTime)
      val querycaptime = new JSONObject()
      querycaptime.put("captime", queryTime)
      val queryRange = new JSONObject()
      queryRange.put("range", querycaptime)
      queryFilters.add(queryRange)
    }
    queryBool.put("filter", queryFilters)
    queryPara.put("bool", queryBool)

    val esRdd = EsSpark.esRDD(rdd.sparkContext, "stand_wifi_test/wifi", queryPara.toJSONString)

    val value = esRdd.map(x => {
      val x1 = x._1
      val x2 = x._2
      if (x._2.get("mac").get != None) {

        (x._2.get("device_code").get + "",
          x._2.get("mac").get + "",
          "mac",
          x._2.get("captime").get + "",
          x._2.get("address").get + "",
          x._2.get("device_name").get + "",
          x._2.get("longitude").get + "",
          x._2.get("latitude").get + ""
        )

      }
      else {
        (x._2.get("device_code").get + "",
          x._2.get("imsi").get + "",
          "imsi",
          x._2.get("captime").get + "",
          x._2.get("address").get + "",
          x._2.get("device_name").get + "",
          x._2.get("longitude").get + "",
          x._2.get("latitude").get + ""
        )
      }
    })
    value


  }

  def loadHistory(rdd: RDD[(Boolean, String, String, String, String, SaveEsData, String)]) = {
    //    flag, key, count, face_captime, codeType, savedata
    val tuples = rdd.collect()
    val distinct: List[String] = tuples.map(tuple => {
      tuple._2
    }).toList.distinct
    val device_codes = new util.ArrayList[String]()
    for (i <- 0 to distinct.length - 1) {
      device_codes.add(distinct(i))
    }

    val list = tuples.toList
    val queryPara = new JSONObject()
    val queryTerms = new JSONObject()
    val queryDevice = new JSONObject()

    queryDevice.put("face_code", device_codes)
    queryTerms.put("terms", queryDevice)
    queryPara.put("query", queryTerms)


    val esRdd: RDD[(String, collection.Map[String, AnyRef])] = EsSpark.esRDD(rdd.sparkContext, "face_wifi/face_code", queryPara.toJSONString)
      .distinct()
    val value = esRdd.map(result => {
      (result._2.get("face_code").get + "", //1
        result._2.get("deviceId").get + "", //2
        result._2.get("code_types").get + "", //3
        result._2.get("code_capTime").get + "", //4
        result._2.get("code_address").get + "", //5
        result._2.get("code_deviceName").get + "", //6
        result._2.get("code_longitude").get + "", //7
        result._2.get("code_latitude").get + "", //8
        result._2.get("face_captime").get + "", //9
        result._2.get("face_address").get + "", //10
        result._2.get("face_deviceName").get + "",
        result._2.get("face_longitude").get + "",
        result._2.get("face_latitude").get + ""
      )
    })
    value

  }

  def loadCarHistory(rdd: RDD[(Boolean, String, String, String, String, SaveEsCarData, String)]) = {
    //    flag, key, count, face_captime, codeType, savedata
    val tuples = rdd.collect()
    val distinct: List[String] = tuples.map(tuple => {
      tuple._2
    }).toList.distinct
    val device_codes = new util.ArrayList[String]()
    for (i <- 0 to distinct.length - 1) {
      device_codes.add(distinct(i))
    }

    val list = tuples.toList
    val queryPara = new JSONObject()
    val queryTerms = new JSONObject()
    val queryDevice = new JSONObject()

    queryDevice.put("car_code", device_codes)
    queryTerms.put("terms", queryDevice)
    queryPara.put("query", queryTerms)


    val esRdd: RDD[(String, collection.Map[String, AnyRef])] = EsSpark.esRDD(rdd.sparkContext, "car_wifi/car_code", queryPara.toJSONString)
      .distinct()
    val value = esRdd.map(result => {
      (result._2.get("car_code").get + "", //1
        result._2.get("deviceId").get + "", //2
        result._2.get("code_types").get + "", //3
        result._2.get("code_capTime").get + "", //4
        result._2.get("code_address").get + "", //5
        result._2.get("code_deviceName").get + "", //6
        result._2.get("code_longitude").get + "", //7
        result._2.get("code_latitude").get + "", //8
        result._2.get("car_captime").get + "", //9
        result._2.get("car_address").get + "", //10
        result._2.get("car_deviceName").get + "",
        result._2.get("car_longitude").get + "",
        result._2.get("car_latitude").get + ""
      )
    })
    value

  }

  def loadBatchHistory(rdd: RDD[(Boolean, String, String, String, String, SaveEsData)]) = {
    rdd.map(x => {
      val key = x._2
      val time = x._4
      (key, time)
    })
    val tuples = rdd.collect()
    val distinct: List[String] = tuples.map(tuple => {
      tuple._2
    }).toList.distinct
    val device_codes = new util.ArrayList[String]()
    for (i <- 0 to distinct.length - 1) {
      device_codes.add(distinct(i))
    }

    val times = tuples.map(x => {
      x._4
    }).toList.sortBy(x => {
      x
    })
    val start = times.head
    val end = times(times.length - 1)
    val begin = java.lang.Long.parseLong(start) - 3 * 60 * 1000
    val begintime = sdf.format(begin)
    val endTime = sdf.format(java.lang.Long.parseLong(end))
    val queryPara = new JSONObject()
    val queryTerms = new JSONObject()
    val queryDevice = new JSONObject()

    queryDevice.put("face_code", device_codes)
    queryTerms.put("terms", queryDevice)
    queryPara.put("query", queryTerms)


    val esRdd: RDD[(String, collection.Map[String, AnyRef])] = EsSpark.esRDD(rdd.sparkContext, "face_wifi/face_code", queryPara.toJSONString)
    val value = esRdd.map(result => {
      (result._2.get("face_code").get + "",
        result._2.get("deviceId").get + "",
        result._2.get("code_types").get + "",
        result._2.get("code_capTime").get + "",
        result._2.get("code_address").get + "",
        result._2.get("code_deviceName").get + "",
        result._2.get("code_longitude").get + "",
        result._2.get("code_latitude").get + "",
        result._2.get("face_captime").get + "",
        result._2.get("face_address").get + "",
        result._2.get("face_deviceName").get + "",
        result._2.get("face_longitude").get + "",
        result._2.get("face_latitude").get + ""
      )
    })
    value


  }

  def loadData(param: (Boolean, String, String, String, String, SaveEsData)) {

  }


  /**
    * 生活规律分析，查询出满足生活规律的条件侦码数据
    *
    * @param sparkSession
    * @param index
    * @param accumulatedays
    * @param activitytimes
    */
  def loadRecentEs(sparkSession: SparkSession, index: String, activitytimes: List[ActivityTime]) = {

    val list = activitytimes.map(x => {

      val days = x.days.split("~")
      val times = x.time.split("~")
      //获取连续天数
      val startday = sdf_day.parse(days(0))
      val endday = sdf_day.parse(days(1))
      val day = (endday.getTime - startday.getTime) / (24 * 60 * 60 * 1000)

      var starttime = days(0) + " " + times(0)
      var endtime = days(1) + " " + times(1)
      val queryPara = new JSONObject()
      val queryBool = new JSONObject()
      val queryDevice = new JSONObject()
      val queryMatchAll = new JSONObject()
      queryMatchAll.put("match_all", queryDevice)
      val queryFilters = new JSONArray()
      queryFilters.add(queryMatchAll)
      if (starttime != null && !starttime.equals("") && endtime != null && !endtime.equals("")) {
        val queryTime = new JSONObject()
        queryTime.put("gte", starttime)
        queryTime.put("lte", endtime)
        val querycaptime = new JSONObject()
        querycaptime.put("captime", queryTime)
        val queryRange = new JSONObject()
        queryRange.put("range", querycaptime)
        queryFilters.add(queryRange)
      }
      queryBool.put("filter", queryFilters)
      queryPara.put("bool", queryBool)
      println(queryPara.toJSONString)
      val esData = EsSpark.esRDD(sparkSession.sparkContext, index, queryPara.toJSONString).cache()
      println(esData.count())
      val parsRdd: RDD[(String, String)] = esData.map(x => {
        if (x._2.get("mac").get != None) {
          var code = x._2.get("mac").get + ""
          var code_type = "mac"
          var captime = x._2.get("captime").get + ""
          (code, captime)
        }
        else if (x._2.get("imsi").get != None) {
          var code = x._2.get("imsi").get + ""
          var code_type = "imsi"
          var captime = x._2.get("captime").get + ""
          (code, captime)
        }
        else if (x._2.get("imei").get != None){
          var code = x._2.get("imei").get + ""
          var code_type = "imei"
          var captime = x._2.get("captime").get + ""
          (code, captime)
        }
        else if (x._2.get("faceid").get != None){
          var code = x._2.get("faceid").get + ""
          var code_type = "face"
          var captime = x._2.get("captime").get + ""
          (code, captime)
        }
        else {
          var code = x._2.get("licenseplateid").get + ""
          var code_type = "licenseplate"
          var captime = x._2.get("captime").get + ""
          (code, captime)
        }
      })
      parsRdd.filter(x => {
        val hour_min_sec = x._2.split(" ")(1)
        hour_min_sec >= times(0) && hour_min_sec <= times(1)
      })
        //转换成code——day,进行去重。去重后转换为code，1，保证相同的code一天出现一次
        .map(x => {
        val day = x._2.split(" ")(0)
        (x._1 + "_" + day, 1)
      })
        .reduceByKey(_ + _)

        .map(x => {
          val code = x._1.split("_")(0)
          (code, 1)
        })
        //对（code，1）进行聚合，过滤出聚合结果集大于等于连续的天数
        .groupByKey()
        .filter(x => {
          x._2.size >= day
        })
        .map(x => {
          (x._1, x._1)
        })
    })
    list.foldLeft(sparkSession.sparkContext.emptyRDD[(String,String)]){
      case (pre,one)=>{
        one.union(pre)
      }
    }


  }

}
