package xjcs

import org.apache.kafka.common.serialization.StringDeserializer
import org.apache.spark.SparkConf
import org.apache.spark.rdd.UnionRDD
import org.apache.spark.streaming.kafka010.ConsumerStrategies._
import org.apache.spark.streaming.kafka010.KafkaUtils
import org.apache.spark.streaming.kafka010.LocationStrategies._
import org.apache.spark.streaming.{Seconds, StreamingContext}
import org.elasticsearch.spark._
import org.elasticsearch.spark.rdd.EsSpark
import play.api.libs.json.{JsValue, Json}

import scala.collection.JavaConversions._
import scala.collection.mutable.ArrayBuffer

/**
  * Created by fanweiming on 2017/7/20.
  */

object StoreTemporaryData {

  /**
   * 参数
   */
  val EM_distacne = 0.33
  val Cosine_sim = 0.9
  val mutiple_wandering_duration = 900
  val mutiple_avg_occspeed = 0.000000005
  val single_wandering_duration = 2
  val single_speed_lower = 0.4
  val single_speed_higher = 0.8
  val single_wandering_entropy = 2.0
  val single_night_limit = 5

  def main(args: Array[String]): Unit = {
    val topic = Array("test")
    val conf = new SparkConf()
      .setMaster("local[*]")
      .setAppName("My App")
      .set("es.index.auto.create", "true")
      .set("es.nodes", "localhost")
      .set("es.port", "9200")
      .set("es.query", "?q=me*")
      .set("spark.driver.allowMultipleContexts", "true")


    val kafkaParams = Map[String, Object](
      "bootstrap.servers" -> "localhost:9092",
      "key.deserializer" -> classOf[StringDeserializer],
      "value.deserializer" -> classOf[StringDeserializer],
      "group.id" -> "example",
      "auto.offset.reset" -> "latest"
      //    "enable.auto.commit" -> (false: java.lang.Boolean)
    )
    val sc = new StreamingContext(conf, Seconds(10))
    //val scc = sc.sparkContext    //error sparkContext not serializable
    //val spark=SparkSession.builder().config(conf)
    val logs = KafkaUtils.createDirectStream[String, String](sc, PreferConsistent, Subscribe[String, String](topic, kafkaParams)).map(record => record.value)
    val jsonLines = logs.map[JsValue](Json.parse)
    jsonLines.print()
    val parsedLines = jsonLines.map { json =>
      val camera_id = (json \ "camera_id").as[String]
      val track_id = (json \ "track_id").as[Int].toString
      val track_type = (json \ "track_type").as[Int].toString
      println("track_type",track_type)
      val str_id = camera_id + "_" + track_id+"_"+track_type
      (str_id, json)
    }
//    parsedLines.print()
    parsedLines.foreachRDD { rdd =>
      //val scc = rdd.sparkContext
      if (!rdd.isEmpty) {
        val scc = rdd.sparkContext
        val newRdd = rdd.groupByKey()
        val no = newRdd.getNumPartitions
        println("no",no)
        data2redis(newRdd)

        val beforeTime = computeEarlyTime(newRdd)
        val lastTime = computeLastTime(newRdd)
        val timeArray=timeSeq(beforeTime,lastTime,4)
        val rdds = new UnionRDD(scc,timeArray.map{time=>
        rdd2rddByTime(newRdd,time)})
        val myRdd=rdds.flatMap(_._2)
        myRdd.map(_._2.toString).saveJsonToEs("temporary/logs")  //根据deltatime也就是每隔上4s就存入ES

        val idCollection = computeEachId(newRdd)
        // 单路检测
        idCollection.foreach{
          k=>{
            println("k",k)
            val track_type = k.split("_")(2)
            if (track_type=="1"){

              println("单路徘徊检测")
              val jedis_outer = RedisClient.pool.getResource
              val current_path = jedis_outer.zrange(k, 0, -1).toList
              jedis_outer.close()
              val wandering = wandering_highspeed_night_detect(k,current_path,single_wandering_duration,single_speed_lower,single_speed_higher,single_wandering_entropy,single_night_limit)
              if (wandering.nonEmpty){
                println("es_value", Seq(wandering.get))
                scc.makeRDD(Seq(wandering.get)).saveToEs("abnormal/log", Map("es.mapping.id" -> "id"))
                scc.makeRDD(Seq(wandering.get)).saveToEs("abnormal_tmp/log", Map("es.mapping.id" -> "id"))
                println("successfully sent to ES")
              }
            }
          }
        }
        val jedisOuter = RedisClient.pool.getResource
        idCollection.map(_.split("_")(0) + "*").toSet.foreach {
         uniqueId:String =>
           println("uniqueId",uniqueId)
          val historyKeys = jedisOuter.keys(uniqueId)
          if (!historyKeys.isEmpty) {
            historyKeys.toSeq.foreach { key =>
              println("key",key)
              val historyLastTime = jedisOuter.zrangeWithScores(key, 0, -1).last.getScore
              println("beforeTime",beforeTime)

              val unixime = System.currentTimeMillis() / 1000L
              println("historyLastTime",unixime)
              if (!computeTime(beforeTime, unixime , 15)) {
                println("absolutely exists outCamera person!")

                var flag = true
                val data = jedisOuter.zrange(key, 0, -1).toSeq
                val mylast_data = Json.parse(data.last)
                val mytrack_type = (mylast_data \ "track_type").as[Int]
                if (mytrack_type==1){
                  // 多路徘徊
                  // 找相邻摄像头中的同一人
                  val (same_pedestrians_option,lat,lon,starttime,company)=multiple_cameras_detect_step1(key,mylast_data,scc,EM_distacne,Cosine_sim)
                  // 如果有同一人记录
                  if (same_pedestrians_option.nonEmpty) {
                    val same_pedestrians = same_pedestrians_option.get.collect()
                    same_pedestrians.foreach {
                      case (k1, v1) => {
                        val path_docs_option = multiple_cameras_detect_step2(k1, v1, scc, lat, lon, starttime)
                        // 找旧路径，如果存在，则update
                        if (path_docs_option.nonEmpty) {
                          val path_docs = path_docs_option.get._1.collect()
                          path_docs.foreach {
                            case (k2, v2) => {
                              val es_result_option = multiple_cameras_detect_step3(k2, v2, lat, lon, starttime, company, key, mutiple_wandering_duration, mutiple_avg_occspeed)
                              val (es_abnormal_option, es_path) = es_result_option.get
                              if (es_abnormal_option.nonEmpty) {
                                // 如果出现多路摄像头徘徊，存入abnormal
                                val rdd_multiple_wandering = scc.makeRDD(Seq(es_abnormal_option.get))
                                EsSpark.saveToEs(rdd_multiple_wandering, "abnormal/log", Map("es.mapping.id" -> "id"))
                                EsSpark.saveToEs(rdd_multiple_wandering, "abnormal_tmp/log", Map("es.mapping.id" -> "id"))
                                println("successfully sent to ES")
                              }
                              // 更新路径
                              println("updatingingingingnign")
                              val update_rdd = scc.makeRDD(Seq(es_path))
                              EsSpark.saveToEs(update_rdd, "path_multiple_cameras/logs", Map("es.mapping.id" -> "id"))
                              println("updated", key)
                              flag = false
                            }

                          }
                        }
                      }
                    }
                  }
                  if (flag){
                    println("creatingingingingnign")
                    val first_data = Map(key+"_"+company -> ArrayBuffer(lat.toString, lon.toString, starttime),"id"->(key+"_"+starttime))
//                    val first_data = Map(key+"_"+company -> lat.toString,"id"->(key+"_"+starttime))
                    println("first_data",first_data)
                    println(lat.toString, lon.toString, starttime)
                    val first_rdd = scc.makeRDD(Seq(first_data))
                    EsSpark.saveToEs(first_rdd, "path_multiple_cameras/logs")
                    println("created",key)
                  }

                  val partCharacter = majorityVote(data)
                  val partConf = computeConf(data, partCharacter)
                  val partOther = computeOtherFeature(data)
                  val res: Map[String, Any] = partCharacter ++ partConf ++ partOther
                  println("res",res)
                  scc.makeRDD(Seq(res)).saveToEs("history/logs")
                  jedisOuter.expire(key, 100)
                }
              }
            }
          }
        }
      }
      else println("rdd is empty!")

    }

    sc.start()
    sc.awaitTermination()
  }
}









