package com.king.app.dwm

import java.text.SimpleDateFormat

import com.alibaba.fastjson.{JSON, JSONObject}
import com.king.config.StateBackendConfig
import com.king.util.{DateUtil, MyKafkaUtil}
import org.apache.flink.api.common.functions.{MapFunction, RichFilterFunction, RichMapFunction}
import org.apache.flink.api.common.state.StateTtlConfig.UpdateType
import org.apache.flink.api.common.state.{StateTtlConfig, ValueState, ValueStateDescriptor}
import org.apache.flink.api.common.time.Time
import org.apache.flink.configuration.Configuration
import org.apache.flink.runtime.state.filesystem.FsStateBackend
import org.apache.flink.streaming.api.CheckpointingMode
import org.apache.flink.streaming.api.functions.KeyedProcessFunction
import org.apache.flink.streaming.api.scala._
import org.apache.flink.util.Collector


/**
 * @Author: KingWang
 * @Date: 2022/2/7  
 * @Desc:  UV： 按天访客数
 **/
object UVApp {

  case class Uv(date:String,uv:Double)

  def main(args: Array[String]): Unit = {

    //1. 获取执行环境
    val env = StreamExecutionEnvironment.getExecutionEnvironment
    env.setParallelism(1)  //实际根据kafka的分区数来设置
    env.enableCheckpointing(10000L,CheckpointingMode.EXACTLY_ONCE)
    env.setStateBackend(new FsStateBackend(StateBackendConfig.getFileCheckPointDir("uv_app")))
    env.getCheckpointConfig.setCheckpointTimeout(10000L)
    env.getCheckpointConfig.setMaxConcurrentCheckpoints(3)
    env.getCheckpointConfig.setMinPauseBetweenCheckpoints(10000L)

    //2. 读取kafka dwd_page_log主题的数据
    val groupId = "uv_app"
    val sourceTopic = "dwd_page_log"
    val dataStream = env.addSource(MyKafkaUtil.getKafkaConsumer(sourceTopic,groupId))
    //3. 将每行数据转换为JSON对象
      .map(x=> JSON.parseObject(x))


    //4. 过滤数据， 状态编程 只保留每个mid每天第一次登录的数据 （视频中是这样计算逻辑）
      //这个是培训视频老师说的，感觉这里讲的有问题，只是做filter过滤，并没有求得结果UV的值
//      .keyBy(x=> x.getJSONObject("common").getString("mid"))
//      .filter(new MyFilter)

    //4. 状态编程：根据日期来保存将uid保存到Set集合中（我认为正确的逻辑）
      .keyBy(x=> DateUtil.getDateTime(x.getString("ts")))
      .process(new MyUvFunction())

    //5. 将数据写回Kafka
      .print("date_uv>>>>>")

    //6. 启动任务
    env.execute("uv_app")


  }

  class MyUvFunction extends KeyedProcessFunction[String,JSONObject,Uv]{
    var dateState: ValueState[scala.collection.mutable.Set[String]] = _
    override def open(parameters: Configuration): Unit = {

      val valueStateDesc = new ValueStateDescriptor[scala.collection.mutable.Set[String]]("user-view",classOf[scala.collection.mutable.Set[String]])
      valueStateDesc.enableTimeToLive(StateTtlConfig.newBuilder(Time.days(7)).build())
      dateState = getRuntimeContext.getState(valueStateDesc)
    }
    override def processElement(value: JSONObject, ctx: KeyedProcessFunction[String, JSONObject, Uv]#Context, out: Collector[Uv]): Unit = {
      val set = dateState.value()
      set.add(value.getJSONObject("common").getString("uid"))
      val size = set.size
      dateState.update(set)
      val date = ctx.getCurrentKey
      out.collect(Uv(date,size))
    }
  }


  class MyFilter extends RichFilterFunction[JSONObject]{

    var dateState: ValueState[String] = _
    val sdf: SimpleDateFormat = new SimpleDateFormat("yyyy-MM-dd")

    override def open(parameters: Configuration): Unit = {
      val valueStateDesc = new ValueStateDescriptor[String]("date-state",classOf[String])
      //设置状态的超时时间以及更新时间的策略
      val stateTtlConfig = StateTtlConfig.newBuilder(Time.hours(24)).setUpdateType(UpdateType.OnCreateAndWrite).build()
      valueStateDesc.enableTimeToLive(stateTtlConfig)
      dateState = getRuntimeContext.getState(valueStateDesc)
    }
    override def filter(value: JSONObject): Boolean = {

      val lastPageId = value.getJSONObject("page").getString("last_page_id")

      //当上一个页面id为空时
      if(null == lastPageId || lastPageId.length <= 0){
          //取出状态数据
          val lastDate = dateState.value()
          //取出今天的日期
          val currDate = sdf.format(value.getString("ts"))
          //判断2个日期是否相同
          if(!currDate.equals(lastDate)){
            dateState.update(currDate)
            return true
          }else {
            return false
          }
      }else {
        false
      }

    }
  }

}
