package com.king.app.dwm

import java.time.Duration
import java.util

import com.alibaba.fastjson.serializer.SerializerFeature
import com.alibaba.fastjson.{JSON, JSONAware, JSONObject}
import com.king.config.StateBackendConfig
import com.king.util.MyKafkaUtil
import org.apache.flink.api.common.eventtime.{SerializableTimestampAssigner, WatermarkStrategy}
import org.apache.flink.cep.{PatternSelectFunction, PatternTimeoutFunction}
import org.apache.flink.cep.pattern.conditions.SimpleCondition
import org.apache.flink.cep.scala.{CEP, PatternStream}
import org.apache.flink.cep.scala.pattern.Pattern
import org.apache.flink.runtime.state.filesystem.FsStateBackend
import org.apache.flink.streaming.api.CheckpointingMode
import org.apache.flink.streaming.api.scala._
import org.apache.flink.streaming.api.windowing.time.Time

import scala.util.parsing.json

/**
 * @Author: KingWang
 * @Date: 2022/2/8  
 * @Desc:  用户跳出数，这里只做筛选
 **/
object UserJumpDetailApp {


  def main(args: Array[String]): Unit = {

    //1.获取环境
    val env = StreamExecutionEnvironment.getExecutionEnvironment
    env.setParallelism(1)

    env.setStateBackend(new FsStateBackend(StateBackendConfig.getFileCheckPointDir("user_jump_detail_app")))
    env.getCheckpointConfig.setCheckpointingMode(CheckpointingMode.EXACTLY_ONCE)
    env.getCheckpointConfig.setCheckpointTimeout(10000L)
    env.getCheckpointConfig.setMaxConcurrentCheckpoints(3)
    env.getCheckpointConfig.setMinPauseBetweenCheckpoints(10000L)


    //2.添加kafka数据源
    val sourceTopic = "dwd_page_log"
    val groupId = "userJumpDetailApp"
    val sinkTopic = "dwm_user_jump_detail"

    val kafkaStream = env.addSource(MyKafkaUtil.getKafkaConsumer(sourceTopic,groupId))

    //3. 将数据解析为JSON对象，并且提取时间戳（ms）设置watermark
        .map(x=> JSON.parseObject(x))
        .assignTimestampsAndWatermarks(
          WatermarkStrategy.forBoundedOutOfOrderness[JSONObject](Duration.ofSeconds(2))
            .withTimestampAssigner(new SerializableTimestampAssigner[JSONObject] {
              override def extractTimestamp(element: JSONObject, recordTimestamp: Long): Long = element.getLong("ts")
            })
        )



    //注意：依赖包引入的是scala版本的依赖包
    //4. CEP： 定义模式序列
    val pattern = Pattern.begin("start").where(new SimpleCondition[JSONObject] {
      override def filter(value: JSONObject): Boolean = {
        val lastPageId = value.getJSONObject("page").getString("last_page_id")
        null == lastPageId || lastPageId.length <= 0
      }
    })
      // 接着上个数据的条件，也要符合下个数据的条件，相连的
        .next("next").where(new SimpleCondition[JSONObject] {
      override def filter(value: JSONObject): Boolean = {
        val lastPageId = value.getJSONObject("page").getString("last_page_id")
        null == lastPageId || lastPageId.length <= 0
      }
    }).within(Time.seconds(10))  // 时间限制，在多长时间范围内

    //5. CEP： 将模式序列应用到流上
    val patternSteam:PatternStream[JSONObject] = CEP.pattern(kafkaStream.keyBy(json=> json.getJSONObject("common").getString("mid")),pattern)

    //6. 提取匹配上的和超时的事件
    val timeoutTag:OutputTag[JSONObject]= new OutputTag[JSONObject]("timeout")
    val selectDS = patternSteam.select(timeoutTag, new PatternTimeoutFunction[JSONObject,JSONObject] {
      override def timeout(pattern: util.Map[String, util.List[JSONObject]], timeoutTimestamp: Long): JSONObject = {
        pattern.get("start").get(0)
      }
    }, new PatternSelectFunction[JSONObject,JSONObject] {
      override def select(pattern: util.Map[String, util.List[JSONObject]]): JSONObject = {
        pattern.get("start").get(0)
      }
    }
    )

    val timeoutDS = selectDS.getSideOutput(timeoutTag)

    //7. UNION两种事件(老师讲的视频里面的骚操作：将超时的流和不超时的流又和在一起输出，这个当初为啥分开呢？)
    val unionDS = selectDS.union(timeoutDS)

    //8. 将数据写入Kafka
    unionDS.print()
    unionDS.map(x=> JSON.toJSON(x).toString)
        .addSink(MyKafkaUtil.getKafkaProducer(sinkTopic))

    //9. 执行
    env.execute("user_jump_detail_app")



  }


}
