package com.ehualu.liaocheng

import java.text.SimpleDateFormat

import org.apache.spark.rdd.RDD
import org.apache.spark.sql.SparkSession

import scala.collection.mutable.ArrayBuffer

/**
  * @: gyz
  * @: 2019/8/29 17:33
  */
object Test2 {

  def main(args: Array[String]): Unit = {

    val spark = SparkSession.builder()
      .master("local[4]")
      .appName("BanSui")
      .getOrCreate()

    // 获取SparkContext实例对象
    val sc = spark.sparkContext
    // 设置日志级别
//    sc.setLogLevel("WARN")



    val dataRDD: RDD[String] = spark.sparkContext.textFile("E:\\0830\\gcxx").distinct()



//    e表示每一行
    val transRDD = dataRDD.map(e => {
      val arr: Array[String] = e.split(",")
//      时间
      val sj = arr(3).substring(0, 19)

//      号牌
      val hphm = arr(5)

//      卡口编号
      val kkbh = arr(1)

//      返回三元组
      (hphm, kkbh, sj)

//      处理map结果

//      filter 过滤  _._1   是hphm
    }).filter(!_._1.toString.equals("车牌"))

    val count1: Long = transRDD.count()
    //    println(count1)   //18948

    val hphmGroupedRDD: RDD[(String, Iterable[String])] = transRDD.map(e => {

//      xxxx ,
      (e._1, e._2 + "_" + e._3)
    }).groupByKey()

    //(鲁PF6038,CompactBuffer(3715000916_2019-06-03 15:54:32, 3715000916_2019-06-03 12:54:48, 3715000916_2019-06-03 17:53:00, 3715000916_2019-06-03 05:54:37, 3715000916_2019-06-03 08:25:48))
    //(鲁PT1910,CompactBuffer(3715000916_2019-06-03 00:20:43, 3715000916_2019-06-03 01:04:19))
    //    hphmGroupedRDD.take(200).foreach(println)
    val filterHphmGroupedRDD: RDD[(String, Iterable[String])] = hphmGroupedRDD.filter(e => {
      e._2.size > 1
    })
    filterHphmGroupedRDD.take(100).foreach(println)

    val count3: Long = filterHphmGroupedRDD.count()
    println(s"count: $count3") //3150

    val roadTimeRDD: RDD[Seq[(String, Long)]] = filterHphmGroupedRDD.map(iter => {
      val array = scala.collection.mutable.ArrayBuffer[String]()
      val iterator: Iterator[String] = iter._2.iterator
      while (iterator.hasNext) {
        val kkbhSj: String = iterator.next()
        array.append(kkbhSj)
      }

      val tupleArray: ArrayBuffer[(String, String)] = array.map(e => {
        val arr = e.split("_")
        val kkbh = arr(0)
        val sj = arr(1)
        (kkbh, sj)
      })


//      按时间排序 升序拍
      val sortedTupleArray: ArrayBuffer[(String, String)] = tupleArray.sortBy(_._2)

      val size: Int = sortedTupleArray.size



//      序列化  seq


//      ()
      var seq = Seq[((String, String), (String, String))]()
      for (i <- (0 to size - 2)) {
        val oneData = (sortedTupleArray.apply(i), sortedTupleArray.apply(i + 1))
        seq = seq :+ oneData
      }

      val roadTime: Seq[(String, Long)] = seq.map(e => {
        val kkbh1 = e._1._1
        val kkbh2 = e._2._1
        val sj1 = e._1._2
        val sj2 = e._2._2

//        时间差
        val diffenenceSeconds = getDifferentSeconds(sj1, sj2)
        (kkbh1, kkbh2, diffenenceSeconds)

//        对三元组处理
      }).filter(e => {
        val kkbh1 = e._1
        val kkbh2 = e._2
        val diffenenceSeconds = e._3
        !kkbh1.equals(kkbh2) && diffenenceSeconds < 1200
      }).map(e => {
        val kkbh1_kkbh2 = e._1 + "_" + e._2
        val diffenenceSeconds = e._3
        (kkbh1_kkbh2, diffenenceSeconds)
      })
      roadTime

    })


    val notNullRoadRDD: RDD[Seq[(String, Long)]] = roadTimeRDD.filter(e => {
      e.size > 0
    })
    //List((37150002324_37150002323,2), (37150002324_37150002323,2))
    notNullRoadRDD.foreach(println)

    notNullRoadRDD.map(e => {

    })


    spark.close()
  }


  def getDifferentSeconds(start: String, end: String): Long = {

    val fm = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss")
    val startLong = fm.parse(start).getTime
    val endLong = fm.parse(end).getTime

    val seconds = (endLong - startLong) / 1000
    seconds
  }

}
