package com.fulu.game.bigdata.realtime.roomstat

import java.util.{Date, Properties}

import com.alibaba.fastjson.JSON
import com.alibaba.fastjson.serializer.SerializeConfig
import com.fulu.game.bigdata.realtime.config.{Config, Constants}
import com.fulu.game.bigdata.realtime.sink.KafkaStringSerializationSchema
import org.apache.flink.api.common.functions.RichMapFunction
import org.apache.flink.api.common.serialization.SimpleStringSchema
import org.apache.flink.api.common.state.{StateTtlConfig, ValueState, ValueStateDescriptor}
import org.apache.flink.api.common.time.Time
import org.apache.flink.configuration.Configuration
import org.apache.flink.contrib.streaming.state.RocksDBStateBackend
import org.apache.flink.streaming.api.TimeCharacteristic
import org.apache.flink.streaming.api.environment.CheckpointConfig.ExternalizedCheckpointCleanup
import org.apache.flink.streaming.api.scala.{StreamExecutionEnvironment, _}
import org.apache.flink.streaming.connectors.kafka.{FlinkKafkaConsumer, FlinkKafkaProducer}
import org.apache.flink.table.api.EnvironmentSettings


object RoomStayTimeStat {

  private val checkpoint_dir = "room_stay_time_stat_prod"
  private val kafkaGroup = "room_stay_time_stat"
  private val kafkaTopic = "rt_user_event"
  private val resultTopic = "idl_user_enter_room_info"

  def main(args: Array[String]): Unit = {
    val env = StreamExecutionEnvironment.getExecutionEnvironment
    val bsSettings = EnvironmentSettings.newInstance.useBlinkPlanner.inStreamingMode.build

    env.enableCheckpointing(10000)
    env.getCheckpointConfig.enableExternalizedCheckpoints(ExternalizedCheckpointCleanup.RETAIN_ON_CANCELLATION)
    env.setStateBackend(new RocksDBStateBackend(Constants.CHECKPOINT_DIR + checkpoint_dir))
    env.setStreamTimeCharacteristic(TimeCharacteristic.EventTime)


    val properties = new Properties()
    properties.setProperty("bootstrap.servers", Config.getKafkaServer())
    properties.setProperty("group.id", kafkaGroup)
    val consumer = new FlinkKafkaConsumer[String](kafkaTopic, new SimpleStringSchema(), properties)
    consumer.setStartFromEarliest()

    val resultStream = env.addSource(consumer).filter(item => {
      val json = JSON.parseObject(item)
      val action = json.getString("eventType")
      action.equals("CHAT_ROOM_ACCESS") || action.equals("CHAT_ROOM_QUIT")
    }).map(item => {
      val json = JSON.parseObject(item)
      val roomNo = json.getJSONArray("eventParas").getString(0)
      val userId = json.getLong("busiId")
      val action = json.getString("eventType")
      val ingestionTimestamp = json.getLong("ingestion_timestamp")
      val ingestionTime = new Date(ingestionTimestamp)
      UserEvent(roomNo,userId,action,ingestionTimestamp,ingestionTime)
    }).keyBy(event=> event.roomNo+"#"+event.userId)
      .map(new RichMapFunction[UserEvent,Result] {

        var userState :ValueState[UserEvent] = _

        override def open(parameters: Configuration): Unit = {

          val stateDescriptor = new ValueStateDescriptor[UserEvent]("user",classOf[UserEvent])
          val ttlConfig = StateTtlConfig.newBuilder(Time.days(7))
            .setUpdateType(StateTtlConfig.UpdateType.OnReadAndWrite)
            .setStateVisibility(StateTtlConfig.StateVisibility.NeverReturnExpired)
            .cleanupInRocksdbCompactFilter(1000L)
            .build()
          stateDescriptor.enableTimeToLive(ttlConfig)
          userState = getRuntimeContext.getState(stateDescriptor)
        }

      override def map(event: UserEvent): Result = {
        var result : Result = null

        if(event.action.equals("CHAT_ROOM_ACCESS")){
          userState.update(event)
        }

        if(event.action.equals("CHAT_ROOM_QUIT")){

          if(userState.value() != null){

            val  enterEvent = userState.value()

            val stayTime = event.ingestionTimestamp - enterEvent.ingestionTimestamp

            result = Result(event.roomNo,event.userId, stayTime, enterEvent.ingestionTime, event.ingestionTime)
            userState.clear()
          }
        }
        result
      }

    }).filter(event => event!=null)
      .map(result => JSON.toJSONString(result,new SerializeConfig(true)))

    resultStream.addSink(new FlinkKafkaProducer(resultTopic,new KafkaStringSerializationSchema(resultTopic),properties,FlinkKafkaProducer.Semantic.AT_LEAST_ONCE))
    env.execute("room_stay_time_stat_prod")

  }

}
case class UserEvent (roomNo:String, userId: Long, action: String, ingestionTimestamp: Long, ingestionTime: Date)
case class Result (roomNo:String, userId: Long, stayTime: Long, enterTime: Date, quitTime: Date)