package com.taoqi.ss

import java.sql.Timestamp

import org.apache.spark.SparkConf
import org.apache.spark.sql.SparkSession
import org.apache.spark.sql.streaming.{GroupState, GroupStateTimeout}

/**
  * Created by TQ on 2017/9/21.
  */
object SSTest3 {


  def main(args: Array[String]): Unit = {

    val spark = SparkSession.builder().master("local[2]").appName("kafka").getOrCreate()
    val dsReader = spark
      .readStream
      .format("kafka")
      .option("kafka.bootstrap.servers", "10.0.25.57:9092,10.0.25.48:9092,10.0.25.39:9092")
      .option("subscribe", "gps-data")
      .option("includeTimestamp", true)
      .load()
    import spark.implicits._
    //val df = dsReader.selectExpr("CAST(value as STRING)").as[(String, Timestamp)]
    val df = dsReader.selectExpr("CAST(value as STRING)", "CAST(timestamp as STRING)").as[(String, Timestamp)]

    //df.collect().foreach(println)     1 minutes
    /*val dsWriter = df.writeStream.trigger(Trigger.ProcessingTime("10 seconds")).format("console")
    val query: StreamingQuery = dsWriter.start()*/

    val schemaString = "imei,\n gpsTime,\n receiveTime,\n lng,\n lat,\n course,\n speed,\n gsmSignal," +
      "\n acc,\n targetingStatus,\n oilStatus,\n speedAlarm,\n vibrationAlarm,\n powerAlarm," +
      "\n cellId,\n lac,\n voltage,\n gpsSignal,\n electricity,\n gpsStatus,\n latStatus,\n lngStatus," +
      "\n powerStatus,\n sosStatus,\n workStatus,\n status,\n mcc,\n mnc,\n workStyle,\n sim,\n currentTime,\n source," +
      "\n province,\n city"

    println(dsReader.isStreaming)
    dsReader.printSchema()
    /*df.foreachPartition { ss =>
      ss.foreach(println)
    }*/
    /*df.transform { ss =>
      val rVal = ss.select("value")
      val df2 = spark.read.schema(JsonSchemaBuilder.getJsonSchema(schemaString)).json(rVal.toJSON)
        .selectExpr(JsonSchemaBuilder.columnSplitPattern.split(schemaString): _*)
      df2.printSchema()
      ss
    }*/
    val events = df.flatMap { case (line, timestamp) =>
      println(line)
      line.split(" ").map(word => Event(sessionId = word, timestamp))
    }
    events.printSchema()

    //可以维护流数据的状态  可以设置超时 用于登录状态等等   此例子只是累加单词数 可以把业务场景拿来更新
    val sessionUpdates = events.groupByKey(event => event.sessionId)
      .mapGroupsWithState[SessionInfo, SessionUpdate](GroupStateTimeout.ProcessingTimeTimeout) {

      case (sessionId: String, events: Iterator[Event], state: GroupState[SessionInfo]) =>
        if (state.hasTimedOut) {
          val finalUpdate =
            SessionUpdate(sessionId, state.get.durationMs, state.get.numEvents, expired = true)
          state.remove()
          finalUpdate
        } else {
          val timestamps = events.map(_.timestamp.getTime).toSeq
          val updatedSession = if (state.exists) {
            val oldSession = state.get
            SessionInfo(
              oldSession.numEvents + timestamps.size,
              oldSession.startTimestampMs,
              math.max(oldSession.endTimestampMs, timestamps.max)
            )
          } else {
            SessionInfo(timestamps.size, timestamps.min, timestamps.max)
          }
          state.update(updatedSession)


          //state.setTimeoutDuration("10 seconds")
          SessionUpdate(sessionId, state.get.durationMs, state.get.numEvents, expired = false)
        }
    }

    sessionUpdates.writeStream
      .outputMode("update")
      .format("console")
      .start().awaitTermination()


    //query.awaitTermination()


    //dsReader.createOrReplaceGlobalTempView("")


  }


  def getSparkSession(conf: SparkConf, isNeedHive: Boolean = true) = SparkSession.synchronized {
    SparkSession.clearDefaultSession()
    @transient
    var session: SparkSession = null
    if (isNeedHive) {
      session = SparkSession.builder().config(conf).enableHiveSupport().getOrCreate()
    } else {
      session = SparkSession.builder().config(conf).getOrCreate()
    }
    SparkSession.clearDefaultSession()
    session
  }


  /** User-defined data type representing the input events */
  case class Event(sessionId: String, timestamp: Timestamp)

  /**
    * User-defined data type for storing a session information as state in mapGroupsWithState.
    *
    * @param numEvents        total number of events received in the session
    * @param startTimestampMs timestamp of first event received in the session when it started
    * @param endTimestampMs   timestamp of last event received in the session before it expired
    */
  case class SessionInfo(
                          numEvents: Int,
                          startTimestampMs: Long,
                          endTimestampMs: Long) {

    /** Duration of the session, between the first and last events */
    def durationMs: Long = endTimestampMs - startTimestampMs
  }

  /**
    * User-defined data type representing the update information returned by mapGroupsWithState.
    *
    * @param id         Id of the session
    * @param durationMs Duration the session was active, that is, from first event to its expiry
    * @param numEvents  Number of events received by the session while it was active
    * @param expired    Is the session active or expired
    */
  case class SessionUpdate(
                            id: String,
                            durationMs: Long,
                            numEvents: Int,
                            expired: Boolean)

}