package com.fulu.game.bigdata.realtime.roomstat

import java.util.Properties

import com.alibaba.fastjson.serializer.SerializeConfig
import com.alibaba.fastjson.{JSON, JSONObject}
import com.fulu.game.bigdata.realtime.config.{Config, Constants}
import com.fulu.game.bigdata.realtime.sink.KafkaStringSerializationSchema
import com.fulu.game.bigdata.realtime.utils.TimeUtils
import org.apache.flink.api.common.functions.RichFlatMapFunction
import org.apache.flink.api.common.serialization.SimpleStringSchema
import org.apache.flink.api.common.state.{StateTtlConfig, ValueState, ValueStateDescriptor}
import org.apache.flink.api.common.time.Time
import org.apache.flink.configuration.Configuration
import org.apache.flink.contrib.streaming.state.RocksDBStateBackend
import org.apache.flink.streaming.api.TimeCharacteristic
import org.apache.flink.streaming.api.environment.CheckpointConfig.ExternalizedCheckpointCleanup
import org.apache.flink.streaming.api.scala.{StreamExecutionEnvironment, _}
import org.apache.flink.streaming.connectors.kafka.{FlinkKafkaConsumer, FlinkKafkaProducer}
import org.apache.flink.table.api.EnvironmentSettings
import org.apache.flink.util.Collector

object RoomLivelyNumStat {

  private val checkpoint_dir = "room_lively_num_stat_prod"
  private val kafkaGroup = "room_lively_num_stat_prod"
  private val kafkaTopic = "cancal_t_room_info_operation_log"
  private val resultTopic = "idl_room_open_info"

  def main(args: Array[String]): Unit = {

    val env = StreamExecutionEnvironment.getExecutionEnvironment
    val bsSettings = EnvironmentSettings.newInstance.useBlinkPlanner.inStreamingMode.build

    env.enableCheckpointing(10000)
    env.getCheckpointConfig.enableExternalizedCheckpoints(ExternalizedCheckpointCleanup.RETAIN_ON_CANCELLATION)
    env.setStateBackend(new RocksDBStateBackend(Constants.CHECKPOINT_DIR + checkpoint_dir))
    env.setStreamTimeCharacteristic(TimeCharacteristic.EventTime)


    val properties = new Properties()
    properties.setProperty("bootstrap.servers", Config.getKafkaServer())
    properties.setProperty("group.id", kafkaGroup)
    val consumer = new FlinkKafkaConsumer[String](kafkaTopic, new SimpleStringSchema(), properties)
    consumer.setStartFromTimestamp(TimeUtils.millsOfToday())

    val resultStream = env.addSource(consumer)
      .flatMap(item => {
        val json = JSON.parseObject(item)
        json.getJSONArray("data").toArray(new Array[JSONObject](1)).map(jo => {
          val roomNo = jo.getString("room_no")
          val operationId = jo.getString("operation_id")
          val operationType = jo.getIntValue("operation_type")
          val createTime = jo.getString("create_time")
          RoomOperationLog(roomNo, operationId, operationType, createTime)
        })
      })
      .keyBy(log => log.roomNo + "#" + log.operationId)
      .flatMap(new RichFlatMapFunction[RoomOperationLog, RoomLivelyInfo] {

        var operationState: ValueState[RoomOperationLog] = _

        override def open(parameters: Configuration): Unit = {

          val stateDescriptor = new ValueStateDescriptor[RoomOperationLog]("operation", classOf[RoomOperationLog])
          val ttlConfig = StateTtlConfig.newBuilder(Time.days(7))
            .setUpdateType(StateTtlConfig.UpdateType.OnReadAndWrite)
            .setStateVisibility(StateTtlConfig.StateVisibility.NeverReturnExpired)
            .cleanupInRocksdbCompactFilter(1000L)
            .build()
          stateDescriptor.enableTimeToLive(ttlConfig)
          operationState = getRuntimeContext.getState(stateDescriptor)
        }

        override def flatMap(operationLog: RoomOperationLog, out: Collector[RoomLivelyInfo]): Unit = {

          if (operationLog.operationType == 1) {
            operationState.update(operationLog)
          }

          if (operationLog.operationType == 2) {

            val openRoom = operationState.value()

            if(openRoom == null){
              return
            }

            val closeRoom = operationLog
            /*if (TimeUtils.isSameDay(openRoom.createTime, closeRoom.createTime)) {
              val livelyTime = TimeUtils.subTime(closeRoom.createTime, openRoom.createTime)
              out.collect(RoomLivelyInfo(closeRoom.roomNo, livelyTime, openRoom.createTime))
            } else {

              val nextDay = TimeUtils.nextDay(openRoom.createTime)

              val livelyTime = TimeUtils.subTime(nextDay, openRoom.createTime)
              out.collect(RoomLivelyInfo(openRoom.roomNo, livelyTime, openRoom.createTime))

              val nexLivelyTime = TimeUtils.subTime(closeRoom.createTime, nextDay)
              out.collect(RoomLivelyInfo(closeRoom.roomNo, nexLivelyTime, nextDay))
            }
*/
            var openTime = openRoom.createTime
            val closeTime = closeRoom.createTime
            var flag = true
            while (flag){

              if(TimeUtils.isSameDay(openTime,closeTime)){
                flag = false
                val livelyTime = TimeUtils.subTime(closeTime,openTime)
                out.collect(RoomLivelyInfo(closeRoom.roomNo, livelyTime, openTime))
              } else {
                val nextDay = TimeUtils.nextDay(openTime)
                val livelyTime = TimeUtils.subTime(nextDay, openTime)
                out.collect(RoomLivelyInfo(closeRoom.roomNo, livelyTime, openTime))

                openTime = nextDay
              }
            }
          }
        }
      }).map(result => JSON.toJSONString(result, new SerializeConfig(true)))

    resultStream.addSink(new FlinkKafkaProducer(resultTopic, new KafkaStringSerializationSchema(resultTopic), properties, FlinkKafkaProducer.Semantic.AT_LEAST_ONCE))

    env.execute("room_lively_num_stat_prod")
  }
}

case class RoomOperationLog(roomNo: String, operationId: String, operationType: Int, createTime: String)

case class RoomLivelyInfo(roomNo: String, livelyTime: Long, createTime: String)
