package dws

import com.alibaba.fastjson.{JSON, JSONObject}
import ods.SourceKafka
import org.apache.flink.api.common.functions.AggregateFunction
import org.apache.flink.streaming.api.TimeCharacteristic
import org.apache.flink.streaming.api.scala.{DataStream, StreamExecutionEnvironment}
import org.apache.flink.streaming.api.scala._
import org.apache.flink.streaming.api.scala.function.WindowFunction
import org.apache.flink.streaming.api.windowing.time.Time
import org.apache.flink.streaming.api.windowing.windows.TimeWindow
import org.apache.flink.util.Collector

object ChannelUserStatistics {
  def main(args: Array[String]): Unit = {
    val env: StreamExecutionEnvironment = StreamExecutionEnvironment.getExecutionEnvironment

    val eventLog: DataStream[String] = env.addSource(SourceKafka.getSource("stage9_event_log"))
    val channelAndUser: DataStream[(String, String)] = eventLog.flatMap((x, collect: Collector[(String, String)]) => {
      val json: JSONObject = JSON.parseObject(x)
      val attrJson: JSONObject = json.getJSONObject("attr")
      val channel: String = attrJson.getString("channel")
      val uid: String = attrJson.getString("uid")
      collect.collect((channel, uid))
    })

    val keyed: KeyedStream[(String, String), String] = channelAndUser.keyBy(x => x._1)

    val result: DataStream[(String, Int)] = keyed.timeWindow(Time.seconds(10)).aggregate(new AggregateFunction[(String, String), Int, Int] {
      override def createAccumulator(): Int = 0

      override def add(value: (String, String), accumulator: Int): Int = accumulator + 1

      override def getResult(accumulator: Int): Int = accumulator

      override def merge(a: Int, b: Int): Int = ???
    }, new WindowFunction[Int, (String, Int), String, TimeWindow] {
      override def apply(key: String, window: TimeWindow, input: Iterable[Int], out: Collector[(String, Int)]): Unit = {
        out.collect((key, input.iterator.next()))
      }
    })

    result.print()

    env.execute()
  }
}
