package com.test

import org.apache.commons.lang3.RandomStringUtils
import org.apache.spark.rdd.RDD
import org.apache.spark.sql.SparkSession

/**
 * @Created by Orion
 * @Description
 *            测试session分割逻辑代码
 */
object TestSessionId {
  /**
   * s01,1
   * s01,2
   * s01,3
   * s01,7
   * s01,8
   * s01,9
   * s02,1
   * s02,2
   * s02,3
   * s02,5
   * s02,6
   * s02,7
   */

  def main(args: Array[String]): Unit = {
    val session = SparkSession.builder()
      .master("local[*]")
      .appName("session-split")
      .enableHiveSupport()
      .getOrCreate()

    val sc = session.sparkContext
    val rdd = sc.textFile("/test01.txt")
    val rdd2 = rdd.map(line => {
      val arr = line.split(",")
      Log(arr(0), arr(1).toInt, "")
    })

    val groued: RDD[(String, Iterable[Log])] = rdd2.groupBy(_.sessionId)
    groued.flatMap(tp => {
      val seesionid = tp._1
      val sorted = tp._2.toList.sortBy(_.ts)
      var new_session = RandomStringUtils.randomAlphabetic(10)
      for (index <- 0 until sorted.size) {
        try {
          val pre = sorted(index)
          pre.new_session = new_session
          val after = sorted(index + 1)
          if (index < sorted.size - 1 && after.ts - pre.ts > 3) {
            new_session = RandomStringUtils.randomAlphabetic(10)
          }
        } catch {
          case e: Exception => null
        }
      }
      sorted
    }).sortBy(_.sessionId).foreach(println)

  }

}
