package com.central.service

import java.sql.ResultSet
import java.util
import java.util.{Calendar, Date, Properties}

import com.alibaba.druid.pool.DruidDataSourceFactory
import com.alibaba.fastjson.{JSONArray, JSONObject}
import com.central.utils.RedisUtil.{host, password, port}
import com.central.utils.{ConfigUtil, ParseTime}
import javax.sql.DataSource
import org.apache.spark.rdd.RDD
import org.apache.spark.sql.SparkSession
import org.elasticsearch.spark.rdd.EsSpark
import redis.clients.jedis.{JedisPool, JedisPoolConfig}

object RecordService {
  private val prop: Properties = ConfigUtil.properties
  private val RECORD_INDEX: String = prop.getProperty("record.es.index")
  private val CODE_RELATION_INDEX: String = prop.getProperty("record.es.index")
  private val RELATION_FACE_INDEX: String = prop.getProperty("record.relation.face")
  private val RELATION_LICENSE_INDEX: String = prop.getProperty("record.relation.licenseplate")
  private val RELATION_CODE_INDEX: String = prop.getProperty("record.relation.code")


  private val TIME: String = prop.getProperty("record.time")
  val JDBC_URL = prop.getProperty("mysql.url")
  val USER_NAME = prop.getProperty("mysql.username")

  val TABLE_RELATION = prop.getProperty("table.relation")
  val TABLE_FOCUS = prop.getProperty("table.record.focus")
  val TABALE_ARCHIVE_IMEI = prop.getProperty("table.archive.imei")
  val TABALE_ARCHIVE_IMSI = prop.getProperty("table.archive.imsi")
  val TABALE_ARCHIVE_MAC = prop.getProperty("table.archive.mac")

  val TABALE_ARCHIVE_LICENSEPLATE = prop.getProperty("table.archive.licenseplate")
  val TABALE_ARCHIVE_FACE = prop.getProperty("table.archive.face")
  private val jedisPoolConfig = new JedisPoolConfig
  jedisPoolConfig.setMaxTotal(1000)

  val REDIS_DATABASE = prop.getProperty("record.redis.database").toInt
  private val jedisPool = new JedisPool(jedisPoolConfig, host, port, 3000, password, REDIS_DATABASE)
  val properties: Properties = new Properties()
  properties.put("user", "root")
  properties.put("password", "root")

  val dataSource: Option[DataSource] = {
    try {
      val druidProps = ConfigUtil.properties
      // 获取Druid连接池的配置文件
      // 倒入配置文件
      Some(DruidDataSourceFactory.createDataSource(druidProps))
    } catch {
      case error: Exception =>
        //        LOG.error("Error Create Mysql Connection", error)
        None
    }
  }

  def main(args: Array[String]): Unit = {
    val sparksession = SparkSession
      .builder()
      .appName("testRecord")
      .master("local")
      .config("es.nodes", ConfigUtil.properties.getProperty("elasticsearch.host", "192.168.5.180"))
      .config("es.port", ConfigUtil.properties.getProperty("elasticsearch.port", "9200"))
      .config("es.mapping.date.rich", "false")

      .getOrCreate()
    sparksession.sparkContext.setLogLevel("error")
    recordAnalysis(sparksession)
  }

  def recordAnalysis(sparkSession: SparkSession) = {
    val recordRdd = loadRecordData(sparkSession)
    //码码关联
    val relateRdd = loadRelateData(sparkSession, TIME.toInt, RELATION_CODE_INDEX, "src_code")
    println(relateRdd.count())
    val licenseData = loadRelateData(sparkSession, TIME.toInt, RELATION_LICENSE_INDEX, "licenseplate")
    println(licenseData.count())
    val faceData = loadRelateData(sparkSession, TIME.toInt, RELATION_FACE_INDEX, "faceid")
    println(faceData.count())
    //    val licenseData = loadLicenseRelateData(sparkSession)
    //    val faceData = loadFaceRelateData(sparkSession)
    val needAddRdd = recordRdd
      .groupBy(_._1)
      .filter(_._2.size < 5)

    //archive_id, code, code_type
    val needAddRddWithShortType = needAddRdd.map(x => {

      val shortType = new util.ArrayList[String]()
      val types = x._2.toList.map(_._3)
      if (!types.contains("imsi")) {
        shortType.add("imsi")
      }
      if (!types.contains("mac")) {
        shortType.add("mac")
      }
      if (!types.contains("imei")) {
        shortType.add("imei")
      }
      if (!types.contains("license")) {
        shortType.add("license")
      }
      if (!types.contains("face")) {
        shortType.add("face")
      }
      (x._1, x._2, shortType)
    })
    val l = needAddRddWithShortType.collect()
    val shortImsiRdd = needAddRddWithShortType
      .filter(x => {
        x._3.contains("imsi")
      })
    val haveImsiRdd = needAddRddWithShortType.filter(x => {
      !x._3.contains("imsi")
    })
    //    val tuples = shortImsiRdd
    //        .flatMap(x=>{
    //
    //      x._3.toArray()
    //    }).
    //      collect()
    //    println(tuples.length+"===========short imsi")
    //    for (i <- 0 to tuples.length - 1) {
    //      println(tuples(i)+"=====")
    //    }
    //==================================================================================================================
    //差imsi同时也差人脸车牌数据，只能由侦码数据关联得到   差imsi，但是有人脸或者车牌，则通过人脸或车牌数据进行关联得到
    //差imsi同时也差人脸车牌数据，在通过侦码数据得到imsi之后，要继续关联得到人脸车牌数据
    println(shortImsiRdd.count())
    println(haveImsiRdd.count())
    val shortImsiWithOtherCode = shortImsiRdd.filter(x => {
      //差imsi同时也差人脸车牌数据
      (x._3.contains("license")) && (x._3.contains("face"))
    })
      .flatMap(x => {
        x._2.toList.map(tuple => {
          //code,archive,type
          (tuple._2, (tuple._1, tuple._3))
        })
      })
    println(shortImsiWithOtherCode.count() + "shotimsi")
    //差imsi同时也差人脸车牌数据，只能由侦码数据关联得到imsi.
    val getImsiCodeRdd = shortImsiWithOtherCode.join(relateRdd)
      .map(join => {

        val archiveId = join._2._1._1
        // tarcode_targetype_count
        val tarcode_targetype_count = join._2._2
        // tarcode,count,archiveId
        (tarcode_targetype_count._1, tarcode_targetype_count._2, archiveId)

      })
      .filter(x => {
        x._1.length == 15
      })
      //      .filter(_._2.equals("imsi"))
      .groupBy(_._3) //通过archiveId进行聚合
      .map(x => {
      val head = x._2.toList.sortBy(0 - _._3.toInt).head
      (head._3, head._1)//archiveId,license
    })
      .filter(x => {
        x._1.length == 15
      })
      .filter(x => {
        val resource = jedisPool.getResource
        val boolean = resource.hexists("imsi" + "_archivers", x._2)
        resource.close()
        !boolean
      })
      .map(x => {
        updateRedisData(jedisPool, x, "imsi")
        x
      }).cache()
    println(getImsiCodeRdd.count() + "====getgetImsiCodeRdd")
    val getImsiCodeCollect = getImsiCodeRdd.collect()
    //更新imsi档案-------------
    updateMysqlData(TABALE_ARCHIVE_IMSI, getImsiCodeCollect, "imsi")


    //==============得到imsi以后：差imsi同时也差人脸车牌数据，由侦码数据关联得到imsi后继续关联车牌、人脸数据================
    val transLicenseData = licenseData.filter(x => {
      //licenseplateid, tarcode_targetype_count
      x._2._2.equals("imsi")
    })
      .map(x => {
        (x._2._1, (x._1, x._2._2))
      })

    val getLicenseByImsi = getImsiCodeRdd
      .join(transLicenseData) //tarcode,(licenseplateid,count)
      .map(x => {
      val archiveId = x._2._1
      // licenseplateid,count,archiveId
      (x._2._2._1, x._2._2._2, archiveId)
    })
      .groupBy(_._3)
      .map(x => {
        val head = x._2.toList.sortBy(0 - _._2.toInt).head

        (head._3, head._1) //archiveId,license
      })
      .filter(x => {
        val resource = jedisPool.getResource
        val boolean = resource.hexists("plate" + "_archivers", x._1)
        resource.close()
        !boolean
      })
      .map(x => {
        updateRedisData(jedisPool, x, "plate")
        x
      }).collect()
    //更新车牌档案---------------
    updateMysqlData(TABALE_ARCHIVE_LICENSEPLATE, getLicenseByImsi, "licenseplate")
    //================================差imsi同时也差人脸车牌数据，由侦码数据关联得到imsi后继续关联车牌、人脸数据====================
    val tranceFaceData = faceData
      //      .filter(x => {
      //      //faceid, tarcode_targetype_count
      //      x._2._2.equals("imsi")
      //    })
      .map(x => {
      (x._2._1, (x._1, x._2._2)) //tarcode,faceid,count
    })


    val getFaceByImsi = getImsiCodeRdd
      .join(tranceFaceData)
      .map(x => {
        val archiveId = x._2._1
        // tarcode,targetype,count,archiveId
        (x._2._2._1, x._2._2._2, archiveId)
      })
      .groupBy(_._3)
      .map(x => {
        val head = x._2.toList.sortBy(0 - _._2.toInt).head
        (head._3, head._1)//archiveId,license
      })
      .filter(x => {
        val resource = jedisPool.getResource
        val boolean = resource.hexists("face" + "_archivers", x._1)
        resource.close()
        !boolean
      })
      .map(x => {
        updateRedisData(jedisPool, x, "faceid")
        x
      }).collect()

    //更新人脸档案
    updateMysqlData(TABALE_ARCHIVE_FACE, getFaceByImsi, "faceid")
    //updateMysqlData()

    //=====================================差imsi数据，但有人脸或者车牌数据==============================================
    //有人脸数据
    val shortImsiWithFace = shortImsiRdd.filter(x => {
      //差imsi，但有人脸数据
      (!x._3.contains("face"))
    })
      .flatMap(x => {
        x._2.toList.map(tuple => {
          //facecode,archive,type
          (tuple._2, (tuple._1, tuple._3))
        })
      })

    val getImsiByFace = shortImsiWithFace.join(faceData) //(faceid, (tarcode,targetype,count))
      .map(x => {
      val archiveId = x._2._1._1
      val tarcode = x._2._2._1
      val count = x._2._2._2
      (archiveId, tarcode, count)
    })
      .groupBy(_._1)
      .map(x => {
        val head = x._2.toList.sortBy(0 - _._3.toInt).head
        (x._1, head._1) //archiveId,tarcode
      })
      .filter(x => {
        val resource = jedisPool.getResource
        val boolean = resource.hexists("imsi" + "_archivers", x._1)
        resource.close()
        !boolean
      })
      .map(x => {
        updateRedisData(jedisPool, x, "imsi")
        x
      }).collect()
    updateMysqlData(TABALE_ARCHIVE_IMSI, getImsiByFace, "imsi")

    //================================差imsi数据，有人脸或者车牌数据=============================
    //有且只有车牌数据数据
    val shortImsiWithLicense = shortImsiRdd.filter(x => {
      //差imsi，但有车牌数据
      (!x._3.contains("license") && x._3.contains("face"))
    })
      .flatMap(x => {
        x._2.toList.map(tuple => {
          //code,archive,type
          (tuple._2, (tuple._1, tuple._3))
        })
      })

    val getImsiByLicense = shortImsiWithLicense.join(licenseData) //licenseplateid, tarcode_targetype_count
      .map(x => {
      val archiveId = x._2._1._1
      val tarcode = x._2._2._1
      val count = x._2._2._2
      (archiveId, tarcode, count)
    })
      .groupBy(_._1)
      .map(x => {
        val head = x._2.toList.sortBy(0 - _._3.toInt).head
        (x._1, head._1) //archiveId,tarcode
      })
      .filter(x => {
        val resource = jedisPool.getResource
        val boolean = resource.hexists("imsi" + "_archivers", x._1)
        resource.close()
        !boolean
      })

      .map(x => {
        updateRedisData(jedisPool, x, "imsi")
        x
      }).collect()
    updateMysqlData(TABALE_ARCHIVE_IMSI, getImsiByLicense, "imsi")

    //=========================================不差imsi的情况===================================
    //-----------------------------有imsi但是没有人脸数据--------------------------
    val shortFaceWithImsi = haveImsiRdd.filter(x => {
      x._3.contains("face")
    })
      .flatMap(x => {
        x._2.toList.map(tuple => {
          //code,archive,type
          (tuple._2, (tuple._1, tuple._3))
        })
      })


    val getFaceByHaveImis = shortFaceWithImsi
      .join(tranceFaceData) //(tarcode,(faceid,count))
      .map(x => {
      val archiveId = x._2._1._1
      val face = x._2._2._1
      val count = x._2._2._2
      (archiveId, face, count)
    })
      .groupBy(_._1)
      .map(x => {
        val head = x._2.toList.sortBy(0 - _._3.toInt).head

        (head._1, head._2) //archiveId,face
      })
      .filter(x => {
        val resource = jedisPool.getResource
        val boolean = resource.hexists("face" + "_archivers", x._1)
        resource.close()
        !boolean
      })

      .map(x => {
        updateRedisData(jedisPool, x,"face")
        x
      }).collect()
    //更新人脸档案
    updateMysqlData(TABALE_ARCHIVE_FACE, getFaceByHaveImis, "faceid")
    //-----------------------------有imsi但是没有车牌数据---------------------------

    val shortLicenseWithImsi = haveImsiRdd.filter(x => {
      x._3.contains("license")
    })
      .flatMap(x => {
        x._2.toList.map(tuple => {
          //code,archive,type
          (tuple._2, (tuple._1, tuple._3))
        })
      })

    val getLiceseByHaveImsi = shortLicenseWithImsi
      .join(transLicenseData)
      .map(x => {
        val archiveId = x._2._1._1
        val license = x._2._2._1
        val count = x._2._2._2
        (archiveId, license, count)
      })
      .groupBy(_._1)
      .map(x => {
        val head = x._2.toList.sortBy(0 - _._3.toInt).head

        (head._1, head._2) //archiveId,license
      })
      .filter(x => {
        val resource = jedisPool.getResource
        val boolean = resource.hexists("plate" + "_archivers", x._1)
        resource.close()
        !boolean
      })

      .map(x => {
        updateRedisData(jedisPool, x, "plate")
        x
      }).collect()

    updateMysqlData(TABALE_ARCHIVE_LICENSEPLATE, getLiceseByHaveImsi, "licenseplate")
    //--------------------------------有imsi但没有mac------------------------------------

//
//    val imsiRelateCode = loadImsiRelateCodeByMysql(sparkSession)
//      .filter(x => {
//        x._2._2.equals("mac")
//      }) //imsi,tagcode,tagtype,count


    val shortMacWithImsi = haveImsiRdd.filter(x => {
      x._3.contains("mac")
    })
      .flatMap(x => {
        x._2.toList.map(tuple => {
          //code,archive,type
          (tuple._2, (tuple._1, tuple._3))
        })
      })


    //    val esParam = shortMacWithImsi.join(imsiRelateCode)
    //      .map(x => {
    //        val archive_type = x._2._1
    //        val code_type_count = x._2._2
    //        val archive = archive_type._1
    //        val code = code_type_count._1
    //        val count = code_type_count._3
    //        (code, archive, count)
    //      }).groupBy(_._1)
    //      .map(x => {
    //        val head = x._2.toList.sortBy(0 - _._3.toInt)
    //        if (head.length > 3) {
    //          head.splitAt(3)._1.map(x => {
    //            (x._1, x._2)
    //          })
    //        }
    //        else head.map(x => {
    //          (x._1, x._2)
    //        })
    //      })
    //      .collect()


    val imsijoinmac = shortMacWithImsi.join(relateRdd.filter(x => {
      x._2._1.length < 15
    }))
    println(imsijoinmac.count() + "=================imsimac")
    val getMacByImsi = imsijoinmac //srccode, tarcode_targetype_count...经过过滤保证srccode只有imsi的
      .map(x => {
      val archiveId = x._2._1._1
      val code = x._2._2._1
      val count = x._2._2._2
      (archiveId, code, count)
    })
      .groupBy(_._1)
      .map(x => {
        val head = x._2.toList.sortBy(0 - _._3.toInt).head
        (head._1, head._2)
      })
      .filter(x => {
        x._1.length < 15
      })
      .filter(x => {
        val resource = jedisPool.getResource
        val boolean = resource.hexists("mac" + "_archivers", x._1)
        resource.close()
        !boolean
      })

      .map(x => {
        updateRedisData(jedisPool, x, "mac")
        x
      }).collect()
    updateMysqlData(TABALE_ARCHIVE_MAC, getMacByImsi, "mac")
    //------------------------------------有imsi但没有imei--------------------------------------

    val shortImeiWithImsi = haveImsiRdd.filter(x => {
      x._3.contains("imei")
    })
      .flatMap(x => {
        x._2.toList.map(tuple => {
          //code,archive,type
          (tuple._2, (tuple._1, tuple._3))
        })
      })

    val getImeiByImsi = shortImeiWithImsi.join(relateRdd.filter(x => {

      x._2._1.length >= 15 && !x._2._1.startsWith("460")
    })) //srccode, tarcode_targetype_count...经过过滤保证srccode只有imsi的
      .map(x => {
      val archiveId = x._2._1._1
      val code = x._2._2._1
      val count = x._2._2._2
      (archiveId, code, count)
    })
      .groupBy(_._1)
      .map(x => {
        val head = x._2.toList.sortBy(0 - _._3.toInt).head
        (head._1, head._2)
      })
      .filter(x => {
        val resource = jedisPool.getResource
        val boolean = resource.hexists("imei" + "_archivers", x._1)
        resource.close()
        !boolean
      })

      .map(x => {
        updateRedisData(jedisPool, x, "imei")
        x
      }).collect()
    updateMysqlData(TABALE_ARCHIVE_IMEI, getImeiByImsi, "imei")


    //------------------------------------------------------------------------------------------------
    //    shortImsiRdd.filter(x => {
    //      x._3.contains()
    //    })
    //    shortImsiRdd.flatMap(x => {
    //      val shortType = "imsi"
    //      x._2.toList.map(tuple => {
    //        //code,archive,type
    //        (tuple._2, (tuple._1, tuple._3, shortType))
    //      })
    //    })
    //      .join(relateRdd)
    //      .map(x => {
    //        val srcCode = x._1
    //        //archive_type_shortType   x._2._1
    //        val archive = x._2._1._1
    //        val tarcode_targetype_count = x._2._2
    //
    //
    //      })
    //
    //    val shortOne = needAddRddWithShortType.filter(_._2.size == 3)
    //    val shortTwo = needAddRdd.filter(_._2.size == 2)
    //    val shortThree = needAddRdd.filter(_._2.size == 1)
    //
    //    //    //缺失3个维度的数据
    //    //    val shortThreeRdd = shortThree.flatMap(x => {
    //    //      x._2.toList.map(tuple => {
    //    //        //type,archive,code
    //    //        (tuple._3, tuple._1, tuple._2)
    //    //      })
    //    //    }).cache()
    //    //    shortThreeRdd
    //    //
    //    //    //缺失2个维度的数据
    //    //
    //    //    val shortTwoRdd = shortTwo.flatMap(x => {
    //    //      x._2.toList.map(tuple => {
    //    //        //type,archive,code
    //    //        (tuple._3, tuple._1, tuple._2)
    //    //      })
    //    //    }).cache()
    //
    //    //将缺少一个维度的数据打散，每条数据结构为type,archive,code,shortType
    //    val shortOneRdd = shortOne.flatMap(x => {
    //      val shortOneType = x._3.head
    //      x._2.toList.map(tuple => {
    //        //type,archive,code
    //        (tuple._3, tuple._1, tuple._2, shortOneType)
    //      })
    //    }).cache()
    //
    //    //如果缺少一个维度为车牌，则通过车牌数据进行关联。如果缺少一个
    //    //type,archive,code,shortType
    //    val shortOneCodeRdd = shortOneRdd.filter(x => {
    //      x._4.equals("imsi") || x._4.equals("imei") || x._4.equals("mac")
    //    })
    //    //缺少一个维度为车牌的数据
    //    val shortOneLicenseRdd = shortOneRdd.filter(x => {
    //      x.equals("license")
    //    })
    //
    //    shortOneCodeRdd.map(x => {
    //      (x._3, (x._1, x._2, x._4)) //(code,(type,archive,shortType))
    //    }).join(relateRdd)
    //      .map(x => {
    //        val code = x._1
    //        val tarcode_targetype_count = x._2._2
    //        val type_archive_shortType = x._2._1
    //
    //
    //      })
    //
    //    val shortOneImsiRdd = shortOneRdd.filter(x => {
    //      !x._1.equals("imsi")
    //    }).cache()
    //    val shortOneWithImisRdd = shortOneRdd.filter(x => {
    //      x._1.equals("imsi")
    //    }).cache()

    //    shortOneWithImisRdd.map(x => {
    //      //code,type,archive
    //      (x._3, x._1 + "/u001" + x._2)
    //    })
    //      .join(relateRdd) // //srccode, tarcode_targetype_count
    //      .map(x => {
    //      val archive_type = x._2._1.split("/u001")
    //      val code_type_count = x._2._2.split("/u001")
    //      (archive_type(0), code_type_count(0), code_type_count(1), code_type_count(2))
    //    })
    //      .sortBy(_._4)


    //    needAddRdd.filter(_._2.size == 3)
    //      .map(x => {
    //        val archiveId = x._1
    //        x._2.toList.map(list => {
    //          val code = list._2
    //          val code_type = list._3
    //
    //        })
    //      })


  }

  def loadEsData(sparkSession: SparkSession, time: String, index: String, codes: List[String]) = {

  }

  /**
    * 从es加载一段时间内的原始数据
    *
    * @param sparkSession
    * @param time
    */
  def loadESOriginData(sparkSession: SparkSession, time: String, index: String) = {
    val longNow = System.currentTimeMillis()
    val longBefor = longNow - (1000 * 60 * 60 * 24 * (time.toInt))

    val beginTime = ParseTime.LongToDate(longBefor + "")
    val endTime = ParseTime.LongToDate(longNow + "")
    val queryPara = new JSONObject()
    val queryBool = new JSONObject()
    val queryDevice = new JSONObject()
    val queryMatchAll = new JSONObject()
    queryMatchAll.put("match_all", queryDevice)
    val queryFilters = new JSONArray()
    queryFilters.add(queryMatchAll)
    if (beginTime != null && !beginTime.equals("") && endTime != null && !endTime.equals("")) {
      val queryTime = new JSONObject()
      queryTime.put("gte", beginTime)
      queryTime.put("lte", endTime)
      val querycaptime = new JSONObject()
      querycaptime.put("captime", queryTime)
      val queryRange = new JSONObject()
      queryRange.put("range", querycaptime)
      queryFilters.add(queryRange)
    }
    queryBool.put("filter", queryFilters)
    queryPara.put("bool", queryBool)
    println(queryPara.toJSONString)
    val esData = EsSpark.esRDD(sparkSession.sparkContext, index, queryPara.toJSONString)
    val parsRdd: RDD[(String, String)] = esData.map(x => {
      if (x._2.get("mac").get != None) {
        var code = x._2.get("mac").get + ""
        var code_type = "mac"
        var captime = x._2.get("captime").get + ""
        (code, captime)
      }
      else if (x._2.get("imsi").get != None) {
        var code = x._2.get("imsi").get + ""
        var code_type = "imsi"
        var captime = x._2.get("captime").get + ""
        (code, captime)
      }
      else {
        var code = x._2.get("imei").get + ""
        var code_type = "imei"
        var captime = x._2.get("captime").get + ""
        (code, captime)
      }
    })
    parsRdd
  }

  /**
    * 加载已建档数据
    *
    * @param sparkSession
    * @return
    */
  def loadRecordData(sparkSession: SparkSession) = {
    val imei = sparkSession.read.jdbc(JDBC_URL, TABALE_ARCHIVE_IMEI, properties)
    val imsi = sparkSession.read.jdbc(JDBC_URL, TABALE_ARCHIVE_IMSI, properties)
    val mac = sparkSession.read.jdbc(JDBC_URL, TABALE_ARCHIVE_MAC, properties)
    val licensePlate = sparkSession.read.jdbc(JDBC_URL, TABALE_ARCHIVE_LICENSEPLATE, properties)

    val imeiRdd = imei.rdd.map(row => {
      //row的index从0开始
      val archive_id = row.get(1) + ""
      val code = row.getString(2)
      val code_type = "imei"
      (archive_id, code, code_type)
    })


    val imsiRdd = imsi.rdd.map(row => {
      //row的index从0开始
      val archive_id = row.get(1) + ""
      val code = row.getString(2)
      val code_type = "imsi"
      (archive_id, code, code_type)
    })


    val macRdd = mac.rdd.map(row => {
      //row的index从0开始
      val archive_id = row.get(1) + ""
      val code = row.getString(2)
      val code_type = "mac"
      (archive_id, code, code_type)
    })


    val licenseRdd = licensePlate.rdd.map(row => {
      //row的index从0开始
      val archive_id = row.get(1) + ""
      val code = row.getString(2)
      val code_type = "license"
      (archive_id, code, code_type)
    })
    imeiRdd.union(imsiRdd).union(macRdd).union(licenseRdd)
    //    imsiRdd.union(macRdd).union(licenseRdd)

  }


  /**
    * 加载重点人员数据
    *
    * @param sparkSession
    * @return
    */
  def loadFocusData(sparkSession: SparkSession) = {
    val dataFrame = sparkSession.read.jdbc(JDBC_URL, TABLE_FOCUS, properties)

    dataFrame.rdd.flatMap(row => {
      val values = new Array[(String, (String, String, String, String))](4)
      val imei = row.getString(10)
      val imsi = row.getString(11)
      val license_plate = row.getString(12)
      val mac = row.getString(13)
      val rowTuple = (imei, imsi, mac, license_plate)
      values.update(0, (imei, rowTuple))
      values.update(1, (imsi, rowTuple))
      values.update(2, (mac, rowTuple))
      values.update(3, (license_plate, rowTuple))
      values
    })
  }

  /**
    * 加载码码关联数据
    *
    * @param sparkSession
    * @return
    */
  def loadRelateData(sparkSession: SparkSession, accumulatedays: Integer, index: String, src_type: String) = {
    var srcParam = src_type
    if (src_type.equals("imsi") || src_type.equals("mac") || src_type.equals("imei")) {
      srcParam = "src_code"
    }
    val calendar = Calendar.getInstance()
    val date = new Date()
    calendar.setTime(date)
    calendar.add(Calendar.DATE, 0 - accumulatedays)
    calendar.set(Calendar.HOUR, 0)
    calendar.set(Calendar.AM_PM, 0)
    calendar.set(Calendar.MINUTE, 0)
    calendar.set(Calendar.SECOND, 0)
    val starttime = ParseTime.sdf.format(calendar.getTime)
    calendar.add(Calendar.DATE, accumulatedays)
    val endtime = ParseTime.sdf.format(calendar.getTime)

    val queryPara = new JSONObject()
    val queryBool = new JSONObject()
    val queryDevice = new JSONObject()
    val queryMatchAll = new JSONObject()
    queryMatchAll.put("match_all", queryDevice)
    val queryFilters = new JSONArray()
    queryFilters.add(queryMatchAll)
    if (starttime != null && !starttime.equals("") && endtime != null && !endtime.equals("")) {
      val queryTime = new JSONObject()
      queryTime.put("gte", starttime)
      queryTime.put("lte", endtime)
      val querycaptime = new JSONObject()
      querycaptime.put("captime", queryTime)
      val queryRange = new JSONObject()
      queryRange.put("range", querycaptime)
      queryFilters.add(queryRange)
    }
    queryBool.put("filter", queryFilters)
    queryPara.put("bool", queryBool)
    println(queryPara.toJSONString)
    val esData = EsSpark.esRDD(sparkSession.sparkContext, index, queryPara.toJSONString).cache()
    //src_code
    esData
      .filter(x => {
        (x._2.get("tag_code").get + "").length == 15 && (x._2.get("tag_code").get + "").startsWith("460")
      })
      .map(x => {
        val captime = x._2.get("captime") + ""
        val day = captime.split(" ")(0)
        (x._2.get(srcParam).get + "", x._2.get("tag_code").get + "",day)
      })
      .distinct()
      .map(x=>{
        ((x._1,x._2),1)//TODO srcCode,tagCode
      })
      .reduceByKey(_ + _)
      .map(x => {
        //TODO (srcCode,(tagCode,count))
        (x._1._1, (x._1._2, x._2)) //face,tagcode,count
      })

    //    val relation = sparkSession.read.jdbc(JDBC_URL, TABLE_RELATION, properties)
    //    relation.rdd.map(x => {
    //      //srccode, tarcode_targetype_count
    //      (x.getString(1), (x.get(3)+"", x.get(4)+"", x.get(7)+""))
    //    })
  }


  def loadImsiRelateCodeByMysql(sparkSession: SparkSession) = {
    val relation = sparkSession.read.jdbc(JDBC_URL, TABLE_RELATION, properties)
    relation.rdd.map(x => {
      //srccode,srctype,targcode,targtype,count
      (x.get(1) + "", x.get(2) + "", x.get(3) + "", x.get(4) + "", x.get(7) + "")
    })
      .filter(_._2.equals("imsi"))
      .filter(!_._4.equals("imsi"))
      .map(x => {
        (x._1, (x._3, x._4, x._5)) //imsi,tagcode,tagtype,count
      })
    //    relation.rdd.map(x => {
    //      //imsi, tarcode_targetype_count
    //      //      (x.getString(1), x.get(3) + "/u001" + x.get(4) + "/u001" + x.get(7))
    //      (x.get(8) + "", (x.get(1) + "", x.get(2) + "", x.get(4) + ""))
    //    })
    //      .filter(x => {
    //        x._2._2.equals("imsi")
    //      })
  }

  /**
    * 加载车码关联数据
    *
    * @param sparkSession
    */
  def loadLicenseRelateData(sparkSession: SparkSession) = {
    val relation = sparkSession.read.jdbc(JDBC_URL, TABLE_RELATION, properties)
    relation.rdd.map(x => {
      //licenseplateid, tarcode_targetype_count
      //      (x.getString(1), x.get(3) + "/u001" + x.get(4) + "/u001" + x.get(7))
      (x.get(8) + "", (x.get(1) + "", x.get(2) + "", x.get(4) + ""))
    })
      .filter(x => {
        x._2._2.equals("imsi")
      })
  }

  /**
    * 加载人码关联数据
    *
    * @param sparkSession
    */
  def loadFaceRelateData(sparkSession: SparkSession) = {
    val relation = sparkSession.read.jdbc(JDBC_URL, TABLE_RELATION, properties)
    relation.rdd.map(x => {
      //faceid, tarcode_targetype_count
      //      (x.getString(1), x.get(3) + "/u001" + x.get(4) + "/u001" + x.get(7))
      (x.getString(1), (x.get(3) + "", x.get(4) + "", x.get(7) + ""))
    })
      .filter(x => {
        x._2._2.equals("imsi")
      })
  }

  def updateRedisData(jedisPool: JedisPool, data: (String, String), code_type: String) = {
    println("=========update " + code_type)
    val resource = jedisPool.getResource
    resource.hset(code_type + "_archivers", data._2, data._1)
    resource.close()
  }

  def updateRedisFace(jedisPool: JedisPool, date: (String, String)) = {
    val resource = jedisPool.getResource
    resource.set(date._1, date._2)
    resource.close()
  }

  def updateMysqlData(table: String, data: Array[(String, String)], code_type: String) = {
    println("insert mysql ==========" + code_type)
    println(data.length)
    try {
      //      Class.forName("com.mysql.jdbc.Driver")
      Class.forName("com.mysql.cj.jdbc.Driver")
    } catch {
      case e =>
        e
    }
    val connection = dataSource.get.getConnection

    //    val connection = DriverManager.getConnection(url, username, password)
    connection.setAutoCommit(false)
    val statement = connection.prepareStatement(
      s"insert into  $table (archive_id,$code_type)values (?,?) ",
      ResultSet.TYPE_SCROLL_SENSITIVE,
      ResultSet.CONCUR_READ_ONLY
    )
    for (i <- 0 to data.length - 1) {
      val archiveid = data(i)._1
      val code = data(i)._2
      println(code)
      println(archiveid)
      statement.setString(1, archiveid)
      statement.setString(2, code)
      statement.addBatch()
    }
    val ints = statement.executeBatch()

    connection.commit()
    statement.close()
    connection.close()
  }

}

