import java.text.SimpleDateFormat
import java.util.{GregorianCalendar, Date}
import java.util.concurrent.TimeUnit

import com.sugon.bd.ga.Filters._
import org.apache.spark.SparkContext
import org.apache.spark.rdd.RDD
import org.elasticsearch.spark.rdd.EsSpark
import play.api.libs.json.{JsDefined, JsUndefined, JsValue, Json}

import scala.collection.JavaConversions._
import scala.collection.mutable
import scala.collection.mutable.ArrayBuffer
import scala.math.{abs, _}

package object xjcs extends Serializable {

  val BYTE_SIZE = 8
  val BYTE_MASK = 0xFF
  val FLOAT_SIZE = 4
  val DOUBLE_SIZE = 8
  val decoder = new org.apache.commons.codec.binary.Base64(-1, Array(), false)

  def int2float = java.lang.Float.intBitsToFloat _

  val fieldNames = Array(("camera_id", 1), ("feature", 1), ("mv_classification", 1), ("screenshot_path", 1)
    , ("company", 1), ("feature", 1), ("url", 1), ("track_id", 2)
    , ("bag_handbag", 2), ("banner", 2), ("factor", 2), ("fps", 2), ("timestamp", 2), ("gender", 2), ("glass", 2)
    , ("hat", 2), ("height", 2), ("logo", 2), ("long_hair", 2), ("lower_color", 2), ("mask", 2), ("mv_color", 2),
    ("mv_type", 2), ("nv_color", 2), ("nv_type", 2), ("objnum", 2), ("offset", 2), ("pants", 2), ("pts", 2), ("ride", 2), ("scale", 2)
    , ("sleeve", 2), ("stripe", 2), ("track_type", 2), ("track_xmax", 2), ("track_ymax", 2), ("track_xmin", 2), ("track_ymin", 2)
    , ("upper_clothing", 2), ("upper_color_conf", 2), ("weapon", 2), ("weapon_conf", 2), ("wide", 2),
    ("bag_handbag_conf", 3), ("gender_conf", 3), ("glass_conf", 3), ("hat_conf", 3), ("long_hair_conf", 3)
    , ("mask_conf", 3), ("mv_classification_conf", 3), ("mv_type_conf", 3), ("nv_type_conf", 3), ("pants_conf", 3), ("ride_conf", 3)
    , ("sleeve_conf", 3), ("stripe_conf", 3), ("track_conf", 3), ("upper_clothing_conf", 3), ("upper_color_conf", 3)
  )
  val fieldNamesRemain = Array("url", "feature", "screeenshot_path", "timestamp", "objnum", "track_xmin", "track_xmax", "track_ymin", "track_ymax")

  def extractField(data: JsValue, field: String): String = {
    val dd = data \ field match {
      case JsDefined(name) => name
      case JsUndefined() => "no"
    }
    dd.toString.replace("\"", "")
  }

  def jsonToRdd(fields: Array[(String, Int)], js: JsValue): Map[String, Any] = {
    val data = fields.map { case (m, n) =>
      val matchValue = n.asInstanceOf[Int]
      if (matchValue == 1) {
        val res1 = extractField(js, m)
        (m, res1)
      }
      else {
        val res2 = extractField(js, m)
        if (res2 != "no") (m, res2.toDouble)
        else (m, "no")
      }
    }
    data.filter(_._2 != "no").toMap
  }


  def timeStamp2TimeString(timestamp: String): String = {
    val outputFormat = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss")
    val timeStr = outputFormat.format(timestamp)
    timeStr

  }


  //从一批数据中采用多数投票法返回属性值和对应的double值,例如Map("mv_type"->3.0)
  def majorityVote(feature_set: Seq[String]): Map[String, Double] = {
    val data = feature_set.map(Json.parse).map(jsonToArray)
    val featureLength = data.head.length
    val fields = fieldNames.filter(_._1.toString.endsWith("conf")).map(_._1.split("_")(0))
    val dd = scala.collection.mutable.ArrayBuffer[Double]()
    for (i <- 0 until featureLength) {
      val newArray = scala.collection.mutable.ArrayBuffer[Double]()
      data.foreach { arrayFeature =>
        newArray += arrayFeature.apply(i)
      }
      dd += voteMaxElement(newArray.toArray)
    }
    assert(dd.length == fields.length, "array size not match")
    fields.zip(dd).toMap //Map("mv_type"->1.0......)
  }

  //由json文件返回数组格式(只有属性)
  def jsonToArray(js2: JsValue): Array[Double] = {
    val data = fieldNames.filter(_._1.toString.endsWith("conf")).map(_._1.split("_")(0)).toList.map { t =>
      val res = extractField(js2, t)
      if (res != "no") res.toDouble else 3.14
    }
    data.toArray //并没有过滤3.14，是为了保证每个数组属性的完整
  }

  //投票选出数组中个数最多的元素
  def voteMaxElement(data: Array[Double]): Double = {
    val uniqueSet = scala.collection.mutable.Set[Double]()
    data.foreach { t =>
      uniqueSet += t
    }
    val re = uniqueSet.map { e =>
      val len = data.count(_ == e)
      (e, len)
    }.toSeq.maxBy(_._2)._1
    re
  }


  def jsonToArray2(js3: JsValue, characterMap: Map[String, Double]): Array[Double] = {
    val data = characterMap.toSeq.map(_._1).map(_.concat("_conf")).map { t =>
      val res = extractField(js3, t)
      if (res != "no") res.toDouble else 0.0
    }
    data.toArray
  }

  def computeTime(time1: Double, time2: Double, deltaTime: Double): Boolean = {
    if (abs(time1 - time2) < deltaTime) true else false
  }

  def computeConf(featureSet: Seq[String], characterMap: Map[String, Double]): Map[String, Double] = {
    val dataConf = featureSet.map(Json.parse).map { t =>
      jsonToArray2(t, characterMap)
    }
    val length = dataConf.size
    val selectConf = dataConf.toList.apply(length / 2)

    val fields = fieldNames.filter(_._1.toString.endsWith("conf")).map(_._1)
    fields.zip(selectConf).toMap
  }

  def computeOtherFeature(featureSet: Seq[String]): Map[String, Any] = {
    val data = featureSet.map(Json.parse).map { t =>

      /**
       *
       */
      val timestamp = if (extractField(t, "frame_timestamp") != "no") extractField(t, "frame_timestamp") else 3.14
      val feature = extractField(t, "feature")
      val url = extractField(t, "url")
      val screenshot_path = extractField(t, "screenshot_path")
      val camera_id = extractField(t, "camera_id")
      val track_id = extractField(t, "track_id")
      val track_xmax = if (extractField(t, "track_xmax") != "no") extractField(t, "track_xmax") else 3.14
      val track_xmin = if (extractField(t, "track_xmin") != "no") extractField(t, "track_xmin") else 3.14
      val track_ymax = if (extractField(t, "track_ymax") != "no") extractField(t, "track_ymax") else 3.14
      val track_ymin = if (extractField(t, "track_ymin") != "no") extractField(t, "track_ymin") else 3.14
      val objnum = if (extractField(t, "track_ymin") != "no") extractField(t, "track_ymin") else 3.14
      (timestamp, feature, url, screenshot_path, camera_id, track_id, track_xmax, track_xmin, track_ymax, track_ymin, objnum)
    }
    val data2 = data.toList.apply(data.toList.size / 2)
    val res = Map("timestamp" -> data2._1, "feature" -> data2._2, "url" -> data2._3, "screenshot_path" -> data2._4
      , "camera_id" -> data2._5, "track_id" -> data2._6, "track_xmax" -> data2._7, "track_xmin" -> data2._8, "track_ymax" -> data2._9,
      "track_ymin" -> data2._10, "objnum" -> data2._11)
    res.toSeq.filter(_ != "no").filter(_ != 3.14).toMap
  }

  def computeEarlyTime(myRdd: RDD[(String, Iterable[JsValue])]): Double = {
    val dd = myRdd.map { case (_, collectionValue) =>
      val data = collectionValue.toSeq.map(e => (e \ "frame_timestamp").as[Double])
      val timeFirst = data.min
      timeFirst
    }
    dd.collect.min
  }

  def computeLastTime(myRdd: RDD[(String, Iterable[JsValue])]): Double = {
    val dd = myRdd.map { case (_, collectionValue) =>
      val data = collectionValue.toSeq.map(e => (e \ "frame_timestamp").as[Double])
      val timeFirst = data.max
      timeFirst
    }
    dd.collect.max
  }

  def rdd2rddByTime(myRdd: RDD[(String, Iterable[JsValue])], timeFilter: Double): RDD[(String, Iterable[(Double, JsValue)])] = {
    val data = myRdd.map { case (strId, collectValue) =>
      val timeAndJson = collectValue.map { e =>
        val time = (e \ "frame_timestamp").as[Double]
        (time, e)
      }
      val value1 = timeAndJson.filter(_._1 == timeFilter)
      (strId, value1)
    }
    data
  }

  def timeSeq(startTime: Double, endTime: Double, timeDelta: Double): Seq[Double] = {
    var time = startTime
    val timeSeqs = ArrayBuffer[Double]()
    while (time < endTime) {
      time += timeDelta
      timeSeqs += time
    }
    timeSeqs
  }


  def computeEachId(myRdd: RDD[(String, Iterable[JsValue])]): Seq[String] = {
    myRdd.map(_._1).collect().toSeq
  }

  def data2redis(myRdd: RDD[(String, Iterable[JsValue])]): Unit = {

    myRdd.foreachPartition { partitionOfRecords =>
      val jedisInner = RedisClient.pool.getResource
      partitionOfRecords.foreach { case (m: String, n: Iterable[JsValue]) =>
        val str_id = m
        val value = n.map { jsonValue =>
          val timestamp = (jsonValue \ "frame_timestamp").as[Double]
          println("timestamp",timestamp,timestamp.toInt)
          (jsonValue.toString(), timestamp)
        } // value里面的每一个值都是Seq(jsonvalue,timestamp)
      val sortedValue = value.toSeq.sortBy(_._2)
        val vv = mapAsJavaMap(sortedValue.toMap)
          .asInstanceOf[java.util.Map[java.lang.String, java.lang.Double]]
        jedisInner.zadd(str_id, vv)
        println("successfully to redis redis database!")
      }
      jedisInner.close()
    }
  }

  def get_distance(xs: Array[Double], ys: Array[Double]): Double = {
    sqrt((xs zip ys).map { case (x, y) => pow(y - x, 2) }.sum)
  }

  def log2(x: Double): Double = scala.math.log(x)

  //  def log2(x: Double): Double = scala.math.log(x) / scala.math.log(2)

  def get_entropy(values: List[Int]): Double = {
    val uniqueVals = collection.SortedSet(values: _*)
    val lengthDouble: Double = values.length.toDouble
    var totalEntropy: Double = 0.0

    uniqueVals.foreach {
      value => {
        val occurrence: Double = values.count(_ == value) / lengthDouble
        totalEntropy += (log2(occurrence) * occurrence) * (-1.0)
      }
    }

    totalEntropy
  }

  def get_degree(y: List[Double], x: List[Double]): List[Double] = {
    val list_degree = scala.collection.mutable.MutableList[Double]()

    val merge = (x zip y).map { case (x_i, y_i) => {
      if (y_i > 0 & x_i > 0) list_degree += atan(y_i / x_i)
      else if (x_i < 0) list_degree += atan(y_i / x_i) + Pi
      else list_degree += atan(y_i / x_i) + 2 * Pi

    }
    }
    list_degree.toList

  }


  def get_direction(y: List[Double], x: List[Double]): List[Int] = {
    val degree = get_degree(y, x)
    val list_direction = scala.collection.mutable.MutableList[Int]()
    degree.foreach {
      x => {
        if (x >= 0 & x < Pi / 4) list_direction += 0
        else if (x >= Pi / 4 & x < Pi / 2) list_direction += 1
        else if (x >= Pi / 2 & x < 3 * Pi / 4) list_direction += 2
        else if (x >= 3 * Pi / 4 & x < Pi) list_direction += 3
        else if (x >= Pi & x < 5 * Pi / 4) list_direction += 4
        else if (x >= 5 * Pi / 4 & x < 3 * Pi / 2) list_direction += 5
        else if (x >= 3 * Pi / 2 & x < 7 * Pi / 4) list_direction += 6
        else list_direction += 7
      }
    }
    list_direction.toList
  }

  def getDateDiff(date1: Date, date2: Date, timeUnit: TimeUnit): Long = {
    val diffInMillies = date2.getTime() - date1.getTime()
    timeUnit.convert(diffInMillies, TimeUnit.MILLISECONDS)
  }

  def wandering_highspeed_night_detect(k: String, history: List[String], single_wandering_duration: Int, single_speed_lower: Double, single_speed_higher: Double, single_wandering_entropy: Double, single_night_limit: Int): Option[Map[String, Any]] = {
    val time_format = new java.text.SimpleDateFormat("yyyy-MM-dd HH:mm:ss")
    if (history.length > single_wandering_duration) {
      var record_speed = Map.empty[String, Any]
      var record_wandering = Map.empty[String, Any]
      var record_night = Map.empty[String, Any]
      val list = scala.collection.mutable.MutableList[(Double,Double,Int,Date)]()
      history.foreach {
        e => {
          val value = Json.parse(e)
          val x: Double = ((value \ "track_xmin").as[Int] + (value \ "track_xmax").as[Int]) / 2
          val y: Double = ((value \ "track_ymin").as[Int] + (value \ "track_ymax").as[Int]) / 2
          val w = abs((value \ "track_xmin").as[Int] - (value \ "track_xmax").as[Int])
          val time = time_format.parse((value \ "timestamp").as[String])
          list += Tuple4(x, y, w, time)
        }
      }

      val start_time = outputFormat.format(list.head._4)
      val end_time = outputFormat.format(list.last._4)

      val list_diff_x = scala.collection.mutable.MutableList[Double]()
      val list_diff_y = scala.collection.mutable.MutableList[Double]()

      var distance: Double = 0
      for (i <- 1 to list.size - 1) {
        list_diff_x += (list(i - 1)._1 - list(i)._1 + 0.000000001)
        list_diff_y += (list(i - 1)._2 - list(i)._2)
        // 取最近的数据做速度检测
        if (i > list.size - 9) {
          distance = distance + sqrt((list(i - 1)._1 - list(i)._1) * (list(i - 1)._1 - list(i)._1) + (list(i - 1)._2 - list(i)._2) * (list(i - 1)._2 - list(i)._2)) / ((list(i - 1)._3 + list(i)._3) / 2)
          //          distance_speed = distance_speed + sqrt((list_x(i-1)-list_x(i))*(list_x(i-1)-list_x(i))+(list_y(i-1)-list_y(i))*(list_y(i-1)-list_y(i)))
        }
      }
      val duration = getDateDiff(list.takeRight(8).head._4, list.last._4, TimeUnit.SECONDS).toInt
      //      println("speed",distance/duration)
      if (distance / duration > single_speed_lower & distance / duration < single_speed_higher) {
        record_speed = Map("id" -> k, "check_time" -> end_time, "type" -> 1)
        println("high_speed")
      }
      val list_direction = get_direction(list_diff_y.toList, list_diff_x.toList)
      val entropy = get_entropy(list_direction)
      if (entropy > single_wandering_entropy) {
        record_wandering = Map("id" -> k, "start_time" -> start_time, "end_time" -> end_time, "type" -> 0)
        println("wandering")
      }
      if (list.last._4.getHours < single_night_limit) {
        record_night = Map("id" -> k, "check_time" -> end_time, "type" -> 2)
      }
      if (record_speed.nonEmpty || record_wandering.nonEmpty || record_night.nonEmpty) {
        Some(record_speed ++ record_wandering ++ record_night)
      }
      else {
        None
      }
    }
    else {
      None
    }


  }

  def parseFloat(ba: Array[Byte], startByte: Int): Float = {
    var i: Int = 0
    for (a <- startByte until startByte + FLOAT_SIZE)
      i = i | ((ba(a).toInt & BYTE_MASK) << (a * BYTE_SIZE))
    int2float(i)
  }

  def parseFloatArray(obj: Any): Array[Double] = {
    obj match {
      case text: String => parseFloatArray(decoder.decode(text))
      case arr: Array[Byte] => {
        var rest = arr.length % FLOAT_SIZE
        //if (rest != 0) throw new IllegalArgumentException("ByteArray not multiple of " + bytes + " long.")
        var retSize = arr.length / FLOAT_SIZE
        var ret = new Array[Double](retSize + {
          if (rest != 0) 1 else 0
        })
        for (f <- 0 until retSize)
          ret(f) = parseFloat(arr, f * FLOAT_SIZE)

        if (rest != 0) {
          ret(retSize) = 0
          for (b <- 0 until rest)
            ret(retSize) += arr(retSize * FLOAT_SIZE + b) << b * BYTE_SIZE
        }
        ret
      }
      case _ => throw new IllegalArgumentException("Unparsable type.")
    }
  }

  def create_querystr1(queryarray: Array[String]): String = {
    """{"query": {"bool": {"must": [{"term": {"track_type":"""" + queryarray(0) + """"}},{"term": {"commany":"""" + queryarray(7) + """"}},{"range": {"timestamp": {"gte":"""" + queryarray(1) + """","lt": """" + queryarray(2) + """"}}}],"must_not": [{"term": {"camera_id":"""" + queryarray(3) + """"}}],"filter": {"geo_distance": {"distance":"""" + queryarray(4) + """","camera_gis": {"lat": """ + queryarray(5) + ""","lon": """ + queryarray(6) + """}}}}},"_source": ["p_feature","timestamp"]}"""
  }

  def create_querystr2(queryarray:Array[String]):String={
    """{"query": {"terms": {""""+queryarray(0)+"""": [""""+queryarray(1)+"""",""""+queryarray(2)+"""",""""+queryarray(3)+""""]}}}"""
  }


  def dotProduct(x: Array[Double], y: Array[Double]): Double = {
    (for ((a, b) <- x zip y) yield a * b) sum
  }

  /*
   * Return the magnitude of an array
   * We multiply each element, sum it, then square root the result.
   */
  def magnitude(x: Array[Double]): Double = {
    math.sqrt(x map (i => i * i) sum)
  }

  def cosineSimilarity(x: Array[Double], y: Array[Double]): Double = {
    require(x.size == y.size)
    dotProduct(x, y) / (magnitude(x) * magnitude(y))
  }

  def retrieve_close2(other_feature: Array[Double], my_feature: Array[Double], company: String, EM_dis: Double, Cosine_sim: Double): Boolean = {
    val other_features_head = other_feature(0)
    val my_head = my_feature(0)
    if (other_features_head / my_head > 0.8 & other_features_head / my_head < 1.2) {
      if (company == "KS") {
        val distance = get_distance(other_feature, my_feature)
        //      println("distance--",distance)
        if (distance < EM_dis) {
          true
        }
        else {
          false
        }
      }
      else {
        val distance = cosineSimilarity(other_feature, my_feature)
        if (distance > Cosine_sim) {
          true
        }
        else {
          false
        }
      }

    }
    else {
      false
    }
  }


  def get_same_pedestrian(records: RDD[(java.lang.String, mutable.LinkedHashMap[java.lang.String, AnyRef])], feature_single: Array[Double], lat: Double, lon: Double, starttime: String, company: String, EM_dis: Double, Cosine_sim: Double): (Option[RDD[(String, ((Float, Float), Date))]], Double, Double, String, String) = {
    // records: RDD[(String,mutable.LinkedHashMap[String,AnyRef])]     ->docs
    val outputFormat2 = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss")
    /**
     * 做feature比对,取出distance大于阈值的记录
     */

    val potentials_pre = records.map {
      case (k_pre, v_pre) => {
        val k_aft: String = k_pre
        val v_aft: Map[String, AnyRef] = v_pre.toMap
        (k_aft -> v_aft)
      }
    }
    val potentials = potentials_pre.map {
      case (k1, v1) => {
        val other_key = "p_" + v1("camera_id").toString + "_" + v1("track_id").toString
        val other_time = outputFormat2.parse(v1("timestamp").asInstanceOf[String])
        val other_gis = v1("camera_gis").asInstanceOf[collection.mutable.LinkedHashMap[String, AnyRef]]
        val other_lat = other_gis("lat").asInstanceOf[Double].toFloat
        val other_lon = other_gis("lon").asInstanceOf[Double].toFloat
        val other_feature = parseFloatArray(v1("feature").toString)
        (other_key, (other_feature, (other_lat, other_lon), other_time))
      }
    }
    val same_pedestrians = potentials.filter {
      case (k3, v3) => {
        retrieve_close2(v3._1, feature_single, company, EM_dis, Cosine_sim)
      }
    }
    val same_pedestrians_format = same_pedestrians.map {
      case (k4, v4) => {
        (k4, (v4._2, v4._3))
      }
    }
    if (!same_pedestrians.isEmpty()) {
      (Option(same_pedestrians_format), lat, lon, starttime, company)
    }
    else {
      (None, lat, lon, starttime, company)
    }
  }

  def multiple_cameras_detect_step1(key: String, mylast_data: JsValue, scc: SparkContext, EM_dis: Double = 0.33, Cosine_sim: Double = 0.9): (Option[RDD[(String, ((Float, Float), Date))]], Double, Double, String, String) = {
    val my_lat = (mylast_data \ "camera_gis" \ "lat").as[Double]
    val my_lon = (mylast_data \ "camera_gis" \ "lon").as[Double]
    val my_feature = parseFloatArray((mylast_data \ "feature").as[String])
    val camera_id = key.split("_")(0)
    val my_company = (mylast_data \ "company").as[String]
    val my_exittime = (mylast_data \ "timestamp").as[String]
    val outputFormat = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss")
    val my_exittime_date = outputFormat.parse(my_exittime)
    val pre_time = outputFormat.format(new GregorianCalendar(my_exittime_date.getYear + 1900, my_exittime_date.getMonth, my_exittime_date.getDate, my_exittime_date.getHours, my_exittime_date.getMinutes - 15, my_exittime_date.getSeconds).getTime)

    /**
     * 在es中查找 geo<500m,time<15s,track_type=0,camera_id != xx的记录
     */
    val queryarray1 = new Array[String](8)
    queryarray1.update(0, "1")
    queryarray1.update(1, pre_time)
    queryarray1.update(2, my_exittime)
    queryarray1.update(3, camera_id)
    queryarray1.update(4, "0.05km")
    queryarray1.update(5, my_lat.toString)
    queryarray1.update(6, my_lon.toString)
    queryarray1.update(7, my_company)
    val es_index = "history/logs"
    val querystr1 = create_querystr1(queryarray1)
    val docs = EsSpark.esRDD(scc, es_index, querystr1).asInstanceOf[RDD[(java.lang.String, mutable.LinkedHashMap[java.lang.String, AnyRef])]]
    if (!docs.isEmpty()) {
      val same_pedestrian_option = get_same_pedestrian(docs, my_feature, my_lat, my_lon, my_exittime, my_company, EM_dis, Cosine_sim)
      if (same_pedestrian_option._1.nonEmpty) {
        same_pedestrian_option
      }
      else {
        (None, my_lat, my_lon, my_exittime, my_company)
      }

    }
    else {
      (None, my_lat, my_lon, my_exittime, my_company)
    }
  }

  def multiple_cameras_detect_step2(k:String,v:((Float,Float),Date),scc:SparkContext,lat:Double,lon:Double,starttime:String):Option[(RDD[(String,Map[String,AnyRef])],Double,Double,String)]={
    val outputFormat = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss")
    val queryarray2 = new Array[String](4)
    queryarray2.update(0, k) // camera_id+track_id
    queryarray2.update(1, v._1._1.toString) //lat
    queryarray2.update(2, v._1._2.toString) //lon
    queryarray2.update(3, outputFormat.format(v._2)) //time
    val querystr2 = create_querystr2(queryarray2)
    println("querystr2", querystr2)
    // path_docs内数据可能多个
    val path_docs = EsSpark.esRDD(scc, "path_multiple_cameras_tmp/logs", querystr2).asInstanceOf[RDD[(java.lang.String,mutable.LinkedHashMap[java.lang.String,AnyRef])]]
    if (!path_docs.isEmpty()){
      //      println("path_docs.size",path_docs.count())
      val path_docs_map = path_docs.map{
        case (k_pre,v_pre)=>{
          val k_aft:String = k_pre
          val v_aft: Map[String,AnyRef] = v_pre.toMap
          (k_aft->v_aft)
        }
      }
      Option(path_docs_map,lat,lon,starttime)
    }
    else{
      None
    }

  }

  def filter_gis(value: (String, AnyRef)): Boolean = {
    if (value._1 == "id") {
      false
    }
    else {
      true
    }
  }

  def compare_time(time1:Date,time2:Date):Boolean={
    if (time1.before(time2)){
      true
    }
    else{
      false
    }
  }

  def multiple_wandering_cal(history:List[((Float,Float),Date)],path_id:String,mutiple_wandering_duration:Int,avg_occspeed:Double):Option[Map[String,Any]]={

    val time = history.last._2
    val base_time = outputFormat.parse(outputFormat.format(new Date(time.getYear,time.getMonth,time.getDate,time.getHours,time.getMinutes-45,time.getSeconds)))
    val last_history = history.filter{t=>{compare_time(base_time,t._2)}}
    val duration = getDateDiff(last_history.head._2,last_history.last._2,TimeUnit.SECONDS).toInt
    if (duration > mutiple_wandering_duration){
      val maxlat = last_history.maxBy(_._1._1)._1._1
      val minlat = last_history.minBy(_._1._1)._1._1
      val maxlon = last_history.maxBy(_._1._2)._1._2
      val minlon = last_history.minBy(_._1._2)._1._2
      val area = (maxlat-minlat)* (maxlon-minlon)
      val avg_speed = area/duration
      val occurrence_map = last_history.groupBy(l => l._1).mapValues(_.size)
      val avg_occurrence = occurrence_map.foldLeft(0)(_+_._2)/occurrence_map.keys.size
      //      println("avg_occurrence",avg_occurrence)
      //      println("avg_speed",avg_speed)
      if (avg_occurrence/avg_speed > avg_occspeed){
        //      if (avg_occurrence/avg_speed > 0){
        val record = Map("type"->"multiple_wandering","id"->path_id,"starttime"->outputFormat.format(last_history.head._2),"endtime"->outputFormat.format(last_history.last._2))
        Some(record)
      }
      else{
        None
      }
    }
    else{
      None
    }
  }

  def multiple_cameras_detect_step3(k:String,v:Map[String,AnyRef],lat:Double,lon:Double,starttime:String,company:String,initial_k:String,mutiple_wandering_duration:Int=900,avg_occspeed:Double=0.000000005):Option[(Option[Map[String,Any]],Map[String,Any])]={
    val outputFormat = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss")
    val path_value = v.filter {
      // 去除field id
      case (k3, v3) => filter_gis((k3, v3))
    }
    val available_format = path_value.map{
      case (k4,v4)=>{
        val lat_lon_time = v4.asInstanceOf[collection.mutable.Buffer[String]].toArray
        val my_other_lat = lat_lon_time.head.toFloat
        val my_other_lon = lat_lon_time(1).toFloat
        val my_other_time = outputFormat.parse(lat_lon_time(2))
        ((my_other_lat, my_other_lon), my_other_time)
      }
    }.toList
    val total_path = ((lat.toFloat, lon.toFloat), outputFormat.parse(starttime))::available_format
    val sorted_path = total_path.sortBy(_._2)
    val multiple_wandering = multiple_wandering_cal(sorted_path,k,mutiple_wandering_duration,avg_occspeed)
    val data = v++Map(initial_k-> ArrayBuffer(lat.toString, lon.toString, starttime), "id" -> k)
    if (multiple_wandering.nonEmpty) {
      Option(multiple_wandering,data)
    }
    else{
      Option(None,data)
    }
  }

  def multi_cameras_detect(key:String,mylast_data:JsValue,scc:SparkContext,EM_dis: Double = 0.33, Cosine_sim: Double = 0.9): Unit ={

  }


}
