package dws

import java.util

import com.alibaba.fastjson.{JSON, JSONArray, JSONObject}
import modes.{AdvertisementInfo, OrderDetails}
import ods.SourceKafka
import org.apache.flink.api.common.functions.AggregateFunction
import org.apache.flink.cep.functions.PatternProcessFunction
import org.apache.flink.cep.scala.{CEP, PatternStream}
import org.apache.flink.cep.scala.pattern.Pattern
import org.apache.flink.streaming.api.TimeCharacteristic
import org.apache.flink.streaming.api.scala.{DataStream, KeyedStream, StreamExecutionEnvironment}
import org.apache.flink.util.Collector
import org.apache.flink.streaming.api.scala._
import org.apache.flink.streaming.api.scala.function.WindowFunction
import org.apache.flink.streaming.api.windowing.time.Time
import org.apache.flink.streaming.api.windowing.windows.TimeWindow

object AdvertisementBlacklist {
  def main(args: Array[String]): Unit = {
    val env: StreamExecutionEnvironment = StreamExecutionEnvironment.getExecutionEnvironment
    val eventLog: DataStream[String] = env.addSource(SourceKafka.getSource("stage9_event_log"))
    env.setStreamTimeCharacteristic(TimeCharacteristic.EventTime)

    val record: DataStream[AdvertisementInfo] = eventLog.flatMap((x: String, collect: Collector[AdvertisementInfo]) => {
      val json: JSONObject = JSON.parseObject(x)
      val jsonArr: JSONArray = json.getJSONArray("lagou_event")
      val attr: JSONObject = json.getJSONObject("attr")
      val area: String = attr.getString("area")
      val userId: String = attr.getString("uid")
      jsonArr.forEach(x => {
        val jsonEvent: JSONObject = JSON.parseObject(x.toString)
        if (jsonEvent.getString("name").equals("ad")) {
          val jsonAd: JSONObject = jsonEvent.getJSONObject("json")
          val productId: String = jsonAd.getString("product_id")
          val timestamp: Long = jsonEvent.getLong("time")
          collect.collect(AdvertisementInfo(productId, timestamp, userId, area))
        }
      })
    })
    val keyed: KeyedStream[AdvertisementInfo, (String, String)] = record.filter(x => x.productId != null).assignAscendingTimestamps(x => x.timestamp).keyBy(x => (x.userId, x.productId))

    // CEP实现
    val pattern: Pattern[AdvertisementInfo, AdvertisementInfo] = Pattern.begin[AdvertisementInfo]("first").next("second").next("third").within(Time.seconds(10))
    val patterned: PatternStream[AdvertisementInfo] = CEP.pattern(keyed, pattern)
    val blacklist: DataStream[String] = patterned.process(new PatternProcessFunction[AdvertisementInfo, String] {
      override def processMatch(`match`: util.Map[String, util.List[AdvertisementInfo]], ctx: PatternProcessFunction.Context, out: Collector[String]): Unit = {
        out.collect(`match`.get("first").get(0).userId)
      }
    })

    blacklist.print()


    // 窗口实现
//    val windowed: WindowedStream[AdvertisementInfo, (String, String), TimeWindow] = keyed.timeWindow(Time.seconds(10), Time.seconds(10))
//    val aggregated: DataStream[((String, String), String, Int)] = windowed.aggregate(new AggregateFunction[AdvertisementInfo, Int, Int] {
//      override def createAccumulator(): Int = 0
//
//      override def add(value: AdvertisementInfo, accumulator: Int): Int = accumulator + 1
//
//      override def getResult(accumulator: Int): Int = accumulator
//
//      override def merge(a: Int, b: Int): Int = ???
//    }, new WindowFunction[Int, ((String, String), String, Int), (String, String), TimeWindow] {
//      override def apply(key: (String, String), window: TimeWindow, input: Iterable[Int], out: Collector[((String, String),String, Int)]): Unit = {
//        out.collect((key, s"${window.getStart}-${window.getEnd}", input.iterator.next()))
//      }
//    })
//    val result: DataStream[((String, String), String, Int)] = aggregated.filter(x => x._3 >= 10)
//    result.print()

    env.execute()
  }
}
