package com.central.service

import java.{sql, util}
import java.sql.{DriverManager, Timestamp}
import java.text.SimpleDateFormat
import java.util.concurrent.ConcurrentHashMap
import java.util.{Calendar, Date, Properties, UUID}

import com.alibaba.fastjson.{JSONArray, JSONObject}
import com.central.bean
import com.central.bean.{ClueRdd, ClueResultWithDetail, ResultDetail, WifiEntity}
import com.central.controller.InitComplexClueConditionEntity
import com.central.utils.{ConfigUtil, LoadESData, ParseTime}
import com.central.utils.Relationship.{AND, NOT, OR, Relationship}
import org.apache.spark.rdd.RDD
import org.apache.spark.sql.{DataFrame, SaveMode, SparkSession}
import org.elasticsearch.spark.rdd.EsSpark

import scala.reflect.ClassTag
import scala.reflect.runtime.universe.TypeTag
import scala.util.{Failure, Success, Try}

class NewClueService {
  val properties = new Properties()
  properties.put("user", "root")
  properties.put("password", "root")
  val TABLE_COLLIDE_RESULT = ConfigUtil.getProperties().getProperty("table.collide.result")
  val TABLE_COLLIDE_DETAIL = ConfigUtil.getProperties().getProperty("table.collide.detail")
  val TABLE_COLLIDE_STATUS = ConfigUtil.getProperties().getProperty("table.collide.status")
  val TABLE_SITE_RELATION = ConfigUtil.getProperties().getProperty("table.site.relation")
  val JDBC_URL = ConfigUtil.getProperties().getProperty("mysql.url")
  val USER_NAME = ConfigUtil.getProperties().getProperty("mysql.username")
  val PASSWORD = ConfigUtil.getProperties().getProperty("mysql.password")

  private val COLLIDE_CODE_INDEX: String = ConfigUtil.getProperties().getProperty("collide.code.index")
  private val COLLIDE_FACE_INDEX: String = ConfigUtil.getProperties().getProperty("collide.face.index")
  private val COLLIDE_CAR_INDEX: String = ConfigUtil.getProperties().getProperty("collide.licenseplate.index")


  def clueAll(spark: SparkSession, clueContext: InitComplexClueConditionEntity) = {
    val prop = new Properties()
    prop.put("user", "root")
    prop.put("password", "root")
    //读取数据库，获取站点和设备关系结果
    val frame: DataFrame = spark.read.jdbc(JDBC_URL, TABLE_SITE_RELATION, prop)
    val relate = frame.rdd.map(x => {
      //row 下标从0开始
      (x.get(2).toString, x.get(1).toString, x.get(3).toString)
    }).collect()
    //将站点和设备关系结果集进行转换，得到map，key为站点，value为对应的设备信息。
    val siteDevices: Map[String, Array[String]] = relate.groupBy(_._1).map(x => {
      val site = x._1
      val devices = x._2.map(_._2)
      (site, devices)
    })
    //将站点和设备关系结果集进行分类，为后面不同数据类型的分类做准备。
    val codeRelate = relate
      .filter(x => {
        x._3.equals("1")
      })
      .map(x => {
        (x._1, x._2)
      })
    println(codeRelate.length+"   code")
    val faceRelate = relate
      .filter(x => {
        x._3.equals("2")
      })
      .map(x => {
        (x._1, x._2)
      })
    println(faceRelate.length+"    face")
    val carRelate = relate
      .filter(x => {
        x._3.equals("3")
      })
      .map(x => {
        (x._1, x._2)
      })
    val relationship = clueContext.relationship
    val conditionsRange = clueContext.conditions.size
    val conditions = clueContext.conditions
    val id = clueContext.jobId
    val activity = clueContext.activity
    val hasliferow = clueContext.hasliferow.toInt
    val fuzzyway = clueContext.fuzzyway.toInt
    //对传入条件参数进行转换，主要是将传入条件的站点信息，转换为对应的设备信息，同时按照侦码、人脸、车牌等不同数据类型进行划分。
    val transConditions: List[(List[ClueConditionSiteEntity], Int)] =
      clueContext
        .conditions
        .map(listConditions => {
          val where = listConditions
            .map(condition => ClueConditionSiteEntity(condition.siteNumber, condition.begintime, condition.endtime, if (condition.didAppear == 0) false else true))
          val threshold = listConditions.head.threshold
          (where, threshold)
        })
    //2020-05-07碰撞功能改造：对传入条件参数按照数据类型进行分类
    //侦码类型数据进行筛选并转换
    val codeConditions = transConditions.map(x => {
      val value: List[ClueConditionSiteEntity] = x._1
      val condition = value.filter(x => {
        codeRelate.map(_._1).contains(x.siteNumber)
      })
      (condition,x._2)
    })
      .map(x => {
        val conditions = x._1.flatMap(elem => {
          val begin = elem.beginTime
          val endtime = elem.endTime
          val didAppear = elem.didAppear
          val site = elem.siteNumber
          siteDevices.get(site).get
            .map(x => {
              ClueConditionDeviceEntity(x, begin, endtime, didAppear)
            })
        })
        (conditions,x._2)
      })
    println(codeConditions.length+"=====codeCondition length")
    //人脸类型数据进行筛选并转换
    val faceConditions = transConditions.map(x => {
      val value: List[ClueConditionSiteEntity] = x._1

     val condition=value.filter(x => {
        faceRelate.map(_._1).contains(x.siteNumber)
      })
      (condition,x._2)
    })
      .map(x => {
        val conditions = x._1.flatMap(elem => {
          val begin = elem.beginTime
          val endtime = elem.endTime
          val didAppear = elem.didAppear
          val site = elem.siteNumber
          siteDevices.get(site).get
            .map(x => {
              ClueConditionDeviceEntity(x, begin, endtime, didAppear)
            })
        })

        (conditions,x._2)
      })

    //车牌类型数据进行筛选并转换
    val carConditions = transConditions.map(x => {
      val value: List[ClueConditionSiteEntity] = x._1

      val condition=value.filter(x => {
        carRelate.map(_._1).contains(x.siteNumber)
      })
      (condition,x._2)
    })
      .map(x => {
       val conditions= x._1.flatMap(elem => {
          val begin = elem.beginTime
          val endtime = elem.endTime
          val didAppear = elem.didAppear
          val site = elem.siteNumber
          siteDevices.get(site).get
            .map(x => {
              ClueConditionDeviceEntity(x, begin, endtime, didAppear)
            })
        })
        (conditions,x._2)

      })


    val threshold = transConditions.head._2
    //按不同数据类别每个点位条件内进行并操作

    val everyCodeNode: List[RDD[ClueResultWithDetail]] = codeConditions.map(x => {
      println(x._1.length+"======")
      val array = x._1.toArray
      justClue(spark, id, array, 2,COLLIDE_CODE_INDEX)
    })
    everyCodeNode.map(x=>{
      println(x.count())
    })
    val everyFaceNode: List[RDD[ClueResultWithDetail]] = faceConditions.map(x => {

      val array = x._1.toArray
      justClue(spark, id, array, 2,COLLIDE_FACE_INDEX)
    })

    val everyCarNode: List[RDD[ClueResultWithDetail]] = carConditions.map(x => {

      val array = x._1.toArray
      justClue(spark, id, array, 2,COLLIDE_CAR_INDEX)
    })



    //生活规律分析
    var hasLifeEveryCodeNode: List[RDD[ClueResultWithDetail]] =
      if (hasliferow == 1) {
        val recentRdd = LoadESData.loadRecentEs(spark, COLLIDE_CODE_INDEX, activity)
        everyCodeNode
          .map(node => {
            node.map(clueResult => {
              (clueResult.idNumber, clueResult)
            })
              .join(recentRdd)
              .map(joinResult => {
                (joinResult._1, joinResult._2._1)
              })
              .map(x => {
                x._2
              })
          })

      }
      else {
        everyCodeNode
      }

    var hasLifeEveryFaceNode: List[RDD[ClueResultWithDetail]] =
      if (hasliferow == 1) {
        val recentRdd = LoadESData.loadRecentEs(spark, COLLIDE_FACE_INDEX, activity)
        everyFaceNode
          .map(node => {
            node.map(clueResult => {
              (clueResult.idNumber, clueResult)
            })
              .join(recentRdd)
              .map(joinResult => {
                (joinResult._1, joinResult._2._1)
              })
              .map(x => {
                x._2
              })
          })

      }
      else {
        everyFaceNode
      }

    var hasLifeEveryCarNode: List[RDD[ClueResultWithDetail]] =
      if (hasliferow == 1) {
        val recentRdd = LoadESData.loadRecentEs(spark, COLLIDE_CAR_INDEX, activity)
        everyCarNode
          .map(node => {
            node.map(clueResult => {
              (clueResult.idNumber, clueResult)
            })
              .join(recentRdd)
              .map(joinResult => {
                (joinResult._1, joinResult._2._1)
              })
              .map(x => {
                x._2
              })
          })

      }
      else {
        everyCarNode
      }

    //
    //对点位层级的结果按照关联规则进行join
    var joinCodeRdd: RDD[ClueResultWithDetail] = hasLifeEveryCodeNode.foldLeft(spark.sparkContext.emptyRDD[ClueResultWithDetail]) {

      case (prevRdd, oneNodeRdd) => {
        if (prevRdd.isEmpty() && !oneNodeRdd.isEmpty()) {
          oneNodeRdd
        }
        else if (!oneNodeRdd.isEmpty()) {
          relationship match {
            case "and" => mergeRddsWith(AND, prevRdd, oneNodeRdd)(entity => entity.idNumber, (one, another) => one.mergeWithPlusingConditionCount(another))
            case "or" => mergeRddsWith(OR, prevRdd, oneNodeRdd)(entity => entity.idNumber, (one, another) => one.merge(another))
            case "not" => mergeRddsWith(NOT, prevRdd, oneNodeRdd)(entity => entity.idNumber, (one, another) => one.merge(another))
          }
        }
        else
          spark.sparkContext.emptyRDD[ClueResultWithDetail]
      }
    }

    var joinFaceRdd: RDD[ClueResultWithDetail] = hasLifeEveryFaceNode.foldLeft(spark.sparkContext.emptyRDD[ClueResultWithDetail]) {

      case (prevRdd, oneNodeRdd) => {
        if (prevRdd.isEmpty() && !oneNodeRdd.isEmpty()) {
          oneNodeRdd
        }
        else if (!oneNodeRdd.isEmpty()) {
          relationship match {
            case "and" => mergeRddsWith(AND, prevRdd, oneNodeRdd)(entity => entity.idNumber, (one, another) => one.mergeWithPlusingConditionCount(another))
            case "or" => mergeRddsWith(OR, prevRdd, oneNodeRdd)(entity => entity.idNumber, (one, another) => one.merge(another))
            case "not" => mergeRddsWith(NOT, prevRdd, oneNodeRdd)(entity => entity.idNumber, (one, another) => one.merge(another))
          }
        }
        else
          spark.sparkContext.emptyRDD[ClueResultWithDetail]
      }
    }

    var joinCarRdd: RDD[ClueResultWithDetail] = hasLifeEveryCarNode.foldLeft(spark.sparkContext.emptyRDD[ClueResultWithDetail]) {

      case (prevRdd, oneNodeRdd) => {
        if (prevRdd.isEmpty() && !oneNodeRdd.isEmpty()) {
          oneNodeRdd
        }
        else if (!oneNodeRdd.isEmpty()) {
          relationship match {
            case "and" => mergeRddsWith(AND, prevRdd, oneNodeRdd)(entity => entity.idNumber, (one, another) => one.mergeWithPlusingConditionCount(another))
            case "or" => mergeRddsWith(OR, prevRdd, oneNodeRdd)(entity => entity.idNumber, (one, another) => one.merge(another))
            case "not" => mergeRddsWith(NOT, prevRdd, oneNodeRdd)(entity => entity.idNumber, (one, another) => one.merge(another))
          }
        }
        else
          spark.sparkContext.emptyRDD[ClueResultWithDetail]
      }
    }


    val alldevicecodes: List[String] = conditions.flatMap(x => {
      x.flatMap(data => {
        val devices: Array[String] = siteDevices.get(data.siteNumber).get
//        data.siteNumber
        devices
      })
    }).distinct
    //    joinRdd.zip()

    //    println(joinRdd.count() + "======joinRdd")
    //模糊处理，目前模糊处理只对侦码类型的数据进行处理
    val fuzzywayCodeRdd = if (fuzzyway != 0) {
      val details: Array[ClueResultWithDetail] = joinCodeRdd.collect()
        .map(data => {
          //只对满足条件数小于阈值且小于阈值不超过2的
          if (data.conditionCount < threshold && (threshold - data.conditionCount) < 2) {
            println(data.conditionCount)
            val codetype = data.dataType
            val tagcode = data.idNumber
            val details: util.ArrayList[ResultDetail] = data.details
            val device_codes: Array[String] = details.toArray.map(x => {
              x.asInstanceOf[ResultDetail].devicecode + ""
            })

            val diffDevice: List[String] = alldevicecodes.diff(device_codes)

            //通过关联关系号码，找出关联号码经常出现的点位信息
            if (fuzzyway == 1) {
              var relat_code = findRelatDevice(spark, COLLIDE_CODE_INDEX, tagcode)
              if (relat_code != null) {
                if (diffDevice.intersect(relat_code).size > 0) {

                  val count = data.conditionCount + 1
                  ClueResultWithDetail(data.jobId, data.idNumber, data.dataType, data.count, data.details, count, data.resultid, data.createtime, data.address, data.location,data.phone,data.picpath)
                }
              }
            }
            else if (fuzzyway == 2) {
              var relat_code = findTrailDevice(spark, COLLIDE_CODE_INDEX, tagcode, codetype)
              if (relat_code != null) {
                if (diffDevice.intersect(relat_code).size > 0) {
                  val count = data.conditionCount + 1
                  ClueResultWithDetail(data.jobId, data.idNumber, data.dataType, data.count, data.details, count, data.resultid, data.createtime, data.address, data.location,data.phone,data.picpath)
                }
              }
            }
            else {
              var relat_code = findHotDevice(spark, COLLIDE_CODE_INDEX, tagcode, codetype)
              if (relat_code != null) {
                if (diffDevice.intersect(relat_code).size > 0) {
                  val count = data.conditionCount + 1
                  ClueResultWithDetail(data.jobId, data.idNumber, data.dataType, data.count, data.details, count, data.resultid, data.createtime, data.address, data.location,data.phone,data.picpath)
                }
              }
            }
          }
          data

        })
      spark.sparkContext.parallelize(details)
    }
    else joinCodeRdd
    val clueResultCodeRdd: RDD[ClueResultWithDetail] =
      if (relationship.equalsIgnoreCase("and") && conditionsRange > 1)
        fuzzywayCodeRdd.filter(result => result.conditionCount >= threshold)
      else
        fuzzywayCodeRdd

    val clueResultRdd = clueResultCodeRdd.union(joinFaceRdd).union(joinCarRdd)
    //保存到数据库当中
    val finalResult: RDD[ClueResultWithDetail] = if (clueResultRdd.isEmpty()) spark.sparkContext
      .parallelize(List(bean.ClueResultWithDetail(clueContext.jobId, "0", "None", 0, null, 0, UUID.randomUUID().toString, null, "None", "None","",""))) else clueResultRdd
    val properties = new Properties()
    properties.put("user", "root")
    properties.put("password", "root")

    val collideResult = finalResult.map(x => {

      val jobId = x.jobId
      val code = x.idNumber
      val dataType = x.dataType
      val count = x.count
      val conditionCount = x.conditionCount
      val phone = x.phone

//      println("<<<<" + jobId + " " + code + " " + dataType + " " + count)
      CollideResult(jobId.toInt, code, dataType, count, ParseTime.LongToDate(System.currentTimeMillis().toString), conditionCount, x.resultid, x.address, x.location,phone)
    }).cache()
    //    collideResult.collect()
    import spark.implicits._
    collideResult
      .filter(x => {
        !("None".equals(x.codetype))
      })
      .toDS()
      .write
      .mode(SaveMode.Append)
      .jdbc(JDBC_URL, TABLE_COLLIDE_RESULT, properties)


    finalResult
      .filter(x => {
        !x.dataType.equals("None")
      })
      .filter(x => {
        x.details != null
      })
      .flatMap(x => {
        x.details.toArray()
      })
      .map(x => {
        x.asInstanceOf[ResultDetail]
      })
      .filter(x => {
        !("None".equals(x.resultid)) || !("None".equals(x.codetype))
      })

      .toDS().write.mode(SaveMode.Append).jdbc(JDBC_URL, TABLE_COLLIDE_DETAIL, properties)
    updateStatus(id)
  }

  /**
    * 找出与参数code相关联的号码对应的经常出现的设备点
    *
    * @param code
    */
  def findRelatDevice(spark: SparkSession, index: String, code: String) = {

    //找出code关联的号码
    try {
      //      Class.forName("com.mysql.jdbc.Driver")
      Class.forName("com.mysql.cj.jdbc.Driver")
    } catch {
      case e =>
        e
    }
    var relatcode = new util.ArrayList[String]()
    val connection = DriverManager.getConnection(JDBC_URL, USER_NAME, PASSWORD)
    val statement = connection.prepareStatement(s"select tagcode,tagtype from t_wifi_eachother_relation where srccode=$code order by counts desc")
    val resultSet = statement.executeQuery()
    var data = ""
    if (resultSet.next()) {
      data = (resultSet.getString(1) + "_" + resultSet.getString(2))
    }
    resultSet.close()
    statement.close()
    connection.close()

    if (data.length > 0) {
      val codetype = data.split("_")(0)
      val tagcodes = data.split("_")(1)

      val queryPara = new JSONObject()
      val queryBool = new JSONObject()
      val queryCode = new JSONObject()
      if (codetype.equals("imsi")) {
        queryCode.put("imsi", tagcodes)
      }
      else if (codetype.equals("mac")) {
        queryCode.put("mac", tagcodes)
      }
      else {
        queryCode.put("imei", tagcodes)
      }

      val calendar = Calendar.getInstance()
      val date = new Date()
      calendar.setTime(date)
      calendar.add(Calendar.DATE, 0 - 10)
      calendar.set(Calendar.HOUR, 0)
      calendar.set(Calendar.AM_PM, 0)
      calendar.set(Calendar.MINUTE, 0)
      calendar.set(Calendar.SECOND, 0)
      val starttime = ParseTime.sdf.format(calendar.getTime)
      calendar.add(Calendar.DATE, 10)
      val endtime = ParseTime.sdf.format(calendar.getTime)


      val queryTerms = new JSONObject()
      queryTerms.put("term", queryCode)
      val queryFilters = new JSONArray()
      queryFilters.add(queryTerms)
      val queryTime = new JSONObject()
      queryTime.put("gte", starttime)
      queryTime.put("lte", endtime)
      val querycaptime = new JSONObject()
      querycaptime.put("captime", queryTime)
      val queryRange = new JSONObject()
      queryRange.put("range", querycaptime)
      queryFilters.add(queryRange)

      queryBool.put("filter", queryFilters)
      queryPara.put("bool", queryBool)
      val esRdd = EsSpark.esRDD(spark.sparkContext, index, queryPara.toJSONString)
      esRdd.map(x => {
        x._2.get("device_code").get + ""
      })
        .map(x => {
          (x, 1)
        })
        .reduceByKey(_ + _)
        .collect()
        .sortBy(x => {
          x._2
        })
        .reverse
        .map(x => {
          x._1
        })
        .splitAt(3)
        ._1
    }
    else null
  }

  /**
    * 找出code历史经常出现的设备点
    *
    * @param spark
    * @param index
    * @param code
    */
  def findHotDevice(spark: SparkSession, index: String, code: String, codetype: String) = {

    val calendar = Calendar.getInstance()
    val date = new Date()
    calendar.setTime(date)
    calendar.add(Calendar.DATE, 0 - 10)
    calendar.set(Calendar.HOUR, 0)
    calendar.set(Calendar.AM_PM, 0)
    calendar.set(Calendar.MINUTE, 0)
    calendar.set(Calendar.SECOND, 0)
    val starttime = ParseTime.sdf.format(calendar.getTime)
    calendar.add(Calendar.DATE, 10)
    val endtime = ParseTime.sdf.format(calendar.getTime)

    val queryPara = new JSONObject()
    val queryBool = new JSONObject()

    val queryMatch = new JSONObject()
    queryMatch.put(codetype, code)
    val queryMast = new JSONObject()
    queryMast.put("match", queryMatch)
    queryBool.put("must", queryMast)

    val queryTime = new JSONObject()
    queryTime.put("gte", starttime)
    queryTime.put("lte", endtime)
    val querycaptime = new JSONObject()
    querycaptime.put("captime", queryTime)
    val queryRange = new JSONObject()
    queryRange.put("range", querycaptime)
    val queryMasts = new JSONArray()
    queryMasts.add(queryRange)
    val queryBools = new JSONObject()
    queryBools.put("must", queryMasts)
    val queryFilter = new JSONObject()
    queryFilter.put("bool", queryBools)
    queryBool.put("filter", queryFilter)
    queryPara.put("bool", queryBool)
    val esRdd = EsSpark.esRDD(spark.sparkContext, index, queryPara.toJSONString)

    esRdd.map(x => {
      (x._2.get("device_code").get + "", 1)
    })
      .reduceByKey(_ + _)
      .collect()
      .sortBy(x => {
        x._2
      })
      .reverse
      .splitAt(3)
      ._1
      .map(x => {
        x._1
      })
  }

  /**
    * 找出code历史轨迹经常出现的设备点
    *
    * @param spark
    * @param index
    * @param code
    * @param codetype
    */
  def findTrailDevice(spark: SparkSession, index: String, code: String, codetype: String) = {
    val calendar = Calendar.getInstance()
    val date = new Date()
    calendar.setTime(date)
    calendar.add(Calendar.DATE, 0 - 10)
    calendar.set(Calendar.HOUR, 0)
    calendar.set(Calendar.AM_PM, 0)
    calendar.set(Calendar.MINUTE, 0)
    calendar.set(Calendar.SECOND, 0)
    val starttime = ParseTime.sdf.format(calendar.getTime)
    calendar.add(Calendar.DATE, 10)
    val endtime = ParseTime.sdf.format(calendar.getTime)

    val queryPara = new JSONObject()
    val queryBool = new JSONObject()

    val queryMatch = new JSONObject()
    queryMatch.put(codetype, code)
    val queryMast = new JSONObject()
    queryMast.put("match", queryMatch)
    queryBool.put("must", queryMast)

    val queryTime = new JSONObject()
    queryTime.put("gte", starttime)
    queryTime.put("lte", endtime)
    val querycaptime = new JSONObject()
    querycaptime.put("captime", queryTime)
    val queryRange = new JSONObject()
    queryRange.put("range", querycaptime)
    val queryMasts = new JSONArray()
    queryMasts.add(queryRange)
    val queryBools = new JSONObject()
    queryBools.put("must", queryMasts)
    val queryFilter = new JSONObject()
    queryFilter.put("bool", queryBools)
    queryBool.put("filter", queryFilter)
    queryPara.put("bool", queryBool)
    val esRdd = EsSpark.esRDD(spark.sparkContext, index, queryPara.toJSONString)

    esRdd.map(x => {
      (x._2.get("device_code").get + "", 1)
    })
      .reduceByKey(_ + _) //device,count
      .map(x => {
      x.swap
    })
      .groupByKey() //count,devices
      .collect()
      .sortBy(_._1)
      .reverse
      .splitAt(3) //点位count出现次数拍前三的
      ._1
      .sortBy(tump => {
        //按出现点位数相同的点位数据集个数进行排序
        tump._2.size
      })
      .reverse
      .head //得到出现点位数相同的点位数据集个数最多的
      ._2
      .toArray
  }


  def updateStatus(jobId: String) = {
    val table = TABLE_COLLIDE_STATUS
    try {
      Class.forName("com.mysql.cj.jdbc.Driver")
    } catch {
      case e =>
        e.printStackTrace()
    }
    val connection = DriverManager.getConnection(JDBC_URL, USER_NAME, PASSWORD)
    val statement = connection.prepareStatement(
      s"update $table set status = ?, successtime=? where id = ?  "
    )
    statement.setInt(1, 2)
    statement.setString(2, ParseTime.LongToDate(System.currentTimeMillis().toString))
    statement.setInt(3, jobId.toInt)
    statement.executeUpdate()
    statement.close()
    connection.close()
  }

  private[service] def mergeRddsWith[T: ClassTag : TypeTag](relationShip: Relationship, one: RDD[T], another: RDD[T])(key: T => String, merge: (T, T) => T): RDD[T] = {

    val onePair =
      one.mapPartitions(x => {
        x.map(x => {
          (key(x), x)
        })
      })
    val anotherPair =
      another.mapPartitions(x => {
        x.map(x => {
          (key(x), x)
        })
      })


    relationShip match {
      case AND => onePair.fullOuterJoin(anotherPair).map {
        case (key, joinResult) => {
          if (joinResult._1.isDefined && joinResult._2.isDefined)
            merge(joinResult._1.get, joinResult._2.get)
          else
            joinResult._1.getOrElse(joinResult._2.get)
        }
      }
      case OR => onePair.fullOuterJoin(anotherPair).map {
        case (key, joinResult) => {
          if (joinResult._1.isDefined && joinResult._2.isDefined)
            merge(joinResult._1.get, joinResult._2.get)
          else
            joinResult._1.getOrElse(joinResult._2.get)
        }
      }
      case NOT => onePair
        .leftOuterJoin(anotherPair)
        .filter(result => result._2._2.isEmpty)
        .mapPartitions(x => {
          x.map(_._2._1)
        })
      //        .optMap(_._2._1)
    }
  }


  private[service] def justClue(spark: SparkSession, jobId: String, conditions: Array[ClueConditionDeviceEntity], threshold: Int,index:String): RDD[ClueResultWithDetail] = {
    require(conditions != null)
    val sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss")
    var rddWfifEntity: RDD[WifiEntity] = null
    val starttime = conditions.head.beginTime
    val endtime = conditions.head.endTime
    val devices = conditions.map(x => {
      x.deviceNumber
    }).toList
    import scala.collection.JavaConverters._
     rddWfifEntity = LoadESData.getMutliDeviceData(spark,index,devices.asJava,sdf.format(starttime),sdf.format(endtime))
      .map((e => {
        if (e._2.get("mac").getOrElse(None) != None) //mac数据的封装
          WifiEntity(
            e._1, e._2.get("mac").get + "", e._2.get("mac").get + "", "mac",
            Timestamp.valueOf(e._2.get("captime").get + ""), Timestamp.valueOf(e._2.get("captime").get + ""),
            //                Timestamp.valueOf(e._2.get("ir_time").get + ""),
            new Date(ParseTime.DateToLong(e._2.get("ir_time").get + "")),
            e._2.get("device_code").get + "",
            e._2.get("device_name").get + "",
            e._2.get("address").get + "",
            e._2.get("location").getOrElse("") + "",
            e._2.get("captime").get + "",

            java.lang.Double.parseDouble(e._2.get("latitude").get + ""),
            java.lang.Double.parseDouble(e._2.get("longitude").get + ""),
            e._2.get("phone").getOrElse("") + "", "",
            e._2.get("picpath").getOrElse("")+""
          )
        else if (e._2.get("imsi").getOrElse(None) != None)
          WifiEntity(
            e._1, e._2.get("imsi").get + "", e._2.get("imsi").get + "", "imsi",
            Timestamp.valueOf(e._2.get("captime").get + ""), Timestamp.valueOf(e._2.get("captime").get + ""),
            //                Timestamp.valueOf(e._2.get("ir_time").get + ""),
            new Date(ParseTime.DateToLong(e._2.get("ir_time").get + "")),
            e._2.get("device_code").get + "",
            e._2.get("device_name").get + "",
            e._2.get("address").get + "",
            e._2.get("location").getOrElse("") + "",
            e._2.get("captime").get + "",
            java.lang.Double.parseDouble(e._2.get("latitude").get + ""),
            java.lang.Double.parseDouble(e._2.get("longitude").get + ""),

            e._2.get("phone").getOrElse("") + "", "",
            e._2.get("picpath").getOrElse("")+""
          )
        else if (e._2.get("imei").getOrElse(None) !=None)
          WifiEntity(
            e._1, e._2.get("imei").get + "", e._2.get("imei").get + "", "imei",
            Timestamp.valueOf(e._2.get("captime").get + ""), Timestamp.valueOf(e._2.get("captime").get + ""),
            //                Timestamp.valueOf(e._2.get("ir_time").get + ""),
            new Date(ParseTime.DateToLong(e._2.get("ir_time").get + "")),
            e._2.get("device_code").get + "",
            e._2.get("device_name").get + "",
            e._2.get("address").getOrElse("") + "",
            e._2.get("location").getOrElse("") + "",
            e._2.get("captime").get + "",
            java.lang.Double.parseDouble(e._2.get("latitude").get + ""),
            java.lang.Double.parseDouble(e._2.get("longitude").get + ""),

            e._2.get("phone").getOrElse("") + "", "",
            e._2.get("picpath").getOrElse("")+""
          )
        else if (e._2.get("faceid").getOrElse(None) !=None)
          WifiEntity(
            e._1,
            e._2.get("faceid").get+"",
            e._2.get("faceid").get+"",
            "face",
            Timestamp.valueOf(e._2.get("captime").get + ""),
            Timestamp.valueOf(e._2.get("captime").get + ""),
            new Date(ParseTime.DateToLong(e._2.get("ir_time").get + "")),
            e._2.get("device_code").get + "",
            e._2.get("device_name").get + "",
            e._2.get("address").getOrElse("") + "",
            e._2.get("location").getOrElse("") + "",
            e._2.get("captime").get + "",
            java.lang.Double.parseDouble(e._2.get("latitude").get + ""),
            java.lang.Double.parseDouble(e._2.get("longitude").get + ""),
            e._2.get("phone").getOrElse("") + "",
            "",
            e._2.get("facepath").getOrElse("")+""
          )
        else
          WifiEntity(
            e._1,
            e._2.get("licenseplateid").get+"",
            e._2.get("licenseplateid").get+"",
            "licenseplate",
            Timestamp.valueOf(e._2.get("captime").get + ""),
            Timestamp.valueOf(e._2.get("captime").get + ""),
            new Date(ParseTime.DateToLong(e._2.get("ir_time").get + "")),
            e._2.get("device_code").get + "",
            e._2.get("device_name").get + "",
            e._2.get("address").getOrElse("") + "",
            e._2.get("location").getOrElse("") + "",
            e._2.get("captime").get + "",
            java.lang.Double.parseDouble(e._2.get("latitude").get + ""),
            java.lang.Double.parseDouble(e._2.get("longitude").get + ""),
            e._2.get("phone").getOrElse("") + "",
            "",
            e._2.get("licenseplate_path").getOrElse("")+""
          )
      }))
//
    //进行碰撞
    val result: RDD[ClueResultWithDetail] = Try(rddWfifEntity.isEmpty()) match {
      case Success(result) =>
        if (!result)
          new ClueRdd(rddWfifEntity).clueAsUnion(spark, jobId)
        else spark.sparkContext.parallelize(List(bean.ClueResultWithDetail(jobId, "0", "None", 0, null, 0, null, null, null, null,null,null))) //最新需求导致使用clueAsUnion,需求面试ClueEntity中的描述

      case Failure(error) => spark.sparkContext
        .parallelize(List(bean.ClueResultWithDetail(jobId, "0", "None", 0, null, 0, null, null, null, null,null,null)))
    }


    if (result != null && result.count() == 1 && result.collect().head.idNumber.equals("0"))
      spark.sparkContext.emptyRDD[ClueResultWithDetail]
    else
      result
  }

}

case class CollideResult(
                          jobid: Integer,
                          tagcode: String,
                          codetype: String,
                          count: Integer,
                          createtime: String,
                          conditioncount: Integer,

                          resultid: String,
                          address: String,
                          location:String,
                          phone: String
                        )

case class ClueConditionSiteEntity(siteNumber: String, beginTime: Long, endTime: Long, didAppear: Boolean)

case class ClueConditionDeviceEntity(deviceNumber: String, beginTime: Long, endTime: Long, didAppear: Boolean)

case class ClueConditionEntity(deviceNumber: String, beginTime: Long, endTime: Long, didAppear: Boolean)