package com.fulu.game.bigdata.realtime.roomstat

import java.util.Properties

import com.alibaba.fastjson.JSON
import com.alibaba.fastjson.serializer.SerializeConfig
import com.fulu.game.bigdata.realtime.config.{Config, Constants}
import com.fulu.game.bigdata.realtime.sink.KafkaStringSerializationSchema
import com.fulu.game.bigdata.realtime.utils.TimeUtils
import org.apache.flink.api.common.serialization.SimpleStringSchema
import org.apache.flink.contrib.streaming.state.RocksDBStateBackend
import org.apache.flink.streaming.api.TimeCharacteristic
import org.apache.flink.streaming.api.environment.CheckpointConfig.ExternalizedCheckpointCleanup
import org.apache.flink.streaming.api.scala.{StreamExecutionEnvironment, _}
import org.apache.flink.streaming.connectors.kafka.{FlinkKafkaConsumer, FlinkKafkaProducer}
import org.apache.flink.table.api.EnvironmentSettings

object ExposureRoomSplit {
  private val checkpoint_dir = "exposure_room_stat"
  private val kafkaGroup = "exposure_room_stat"
  private val kafkaTopic = "rt_behaviour_event"
  private val resultTopic = "idl_room_exposure"

  def main(args: Array[String]): Unit = {
    val env = StreamExecutionEnvironment.getExecutionEnvironment
    val bsSettings = EnvironmentSettings.newInstance.useBlinkPlanner.inStreamingMode.build

    env.enableCheckpointing(180000)
    env.getCheckpointConfig.enableExternalizedCheckpoints(ExternalizedCheckpointCleanup.RETAIN_ON_CANCELLATION)
    env.setStateBackend(new RocksDBStateBackend(Constants.CHECKPOINT_DIR + checkpoint_dir))
    env.setStreamTimeCharacteristic(TimeCharacteristic.EventTime)


    val properties = new Properties()
    properties.setProperty("bootstrap.servers", Config.getKafkaServer())
    properties.setProperty("group.id", kafkaGroup)
    val consumer = new FlinkKafkaConsumer[String](kafkaTopic, new SimpleStringSchema(), properties)
    consumer.setStartFromTimestamp(TimeUtils.millsOfToday())

    val resultStream = env.addSource(consumer).filter(item => {
      val json = JSON.parseObject(item)
      val action = json.getString("action")
      action.equals("6")
    }).filter(item => {
      val json = JSON.parseObject(item)
      val action_param = json.getString("action_param")
      val param = JSON.parseObject(action_param)
      val attribute = param.getString("attribute")
      val screen_type = param.getString("screen_type")
      "chatroom_ev".equals(attribute)  || ("homepage_flow_EV".equals(attribute) && "4".equals(screen_type))
    }).flatMap(item => {
      val json = JSON.parseObject(item)
      val action_param = json.getString("action_param")
      val param = JSON.parseObject(action_param)
      val event_time = json.getString("event_time")
      val userId = json.getLong("user_id")
      val element_id = param.getString("element_id")
      val ids = if(element_id!=null)  element_id.split(",") else new Array[String](0)

      ids.map(id => RoomExposure(userId,id,event_time))
    }).filter(event => event!=null)
      .map(behaviour => JSON.toJSONString(behaviour,new SerializeConfig(true)))

    resultStream.addSink(new FlinkKafkaProducer(resultTopic,new KafkaStringSerializationSchema(resultTopic),properties,FlinkKafkaProducer.Semantic.AT_LEAST_ONCE))
    env.execute("ExposureRoomSplit")

  }
}
case class RoomExposure (user_id:Long, room_no: String,event_time: String)