package com.bigdata

import java.util.Properties

import com.bigdata.bean.LoginEvent
import com.bigdata.common.constants.Constants
import com.bigdata.common.utils.KafkaCommonUtils
import org.apache.flink.api.common.serialization.SimpleStringSchema
import org.apache.flink.cep.scala.{CEP, PatternStream}
import org.apache.flink.cep.scala.pattern.Pattern
import org.apache.flink.core.fs.FileSystem.WriteMode
import org.apache.flink.streaming.api.TimeCharacteristic
import org.apache.flink.streaming.api.functions.timestamps.BoundedOutOfOrdernessTimestampExtractor
import org.apache.flink.streaming.api.scala.StreamExecutionEnvironment
import org.apache.flink.streaming.api.scala._
import org.apache.flink.streaming.api.windowing.time.Time
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer

object LoginDetect {
  def main(args: Array[String]): Unit = {
    val env: StreamExecutionEnvironment = StreamExecutionEnvironment.getExecutionEnvironment

    env.setStreamTimeCharacteristic(TimeCharacteristic.EventTime)

    val logDataStream: DataStream[String] = KafkaCommonUtils.createKafkaSink(env, Constants.Topic_Login)
//    logDataStream.print().setParallelism(1)
    val source: DataStream[LoginEvent] = logDataStream.map {
      log =>
        val items: Array[String] = log.split(",")
        LoginEvent(items(0).toLong, items(1), items(2), items(3).toLong)
    }
      .assignTimestampsAndWatermarks(new BoundedOutOfOrdernessTimestampExtractor[LoginEvent](Time.milliseconds(1)) {
        override def extractTimestamp(element: LoginEvent): Long = element.eventTime
      })
    //    val source: DataStream[LoginEvent] = env.fromCollection(List(
    //      LoginEvent(1, "192.168.0.1", "fail", 1558430842000L),
    //      LoginEvent(1, "192.168.0.2", "fail", 1558430853000L),
    //      LoginEvent(1, "192.168.0.3", "fail", 1558430843500L),
    //      LoginEvent(2, "192.168.10.10", "success", 1558430845000L),
    //      LoginEvent(3, "192.168.10.1", "success", 1558430849000L),
    //      LoginEvent(4, "192.168.10.3", "success", 1558430825000L),
    //      LoginEvent(3, "192.168.10.1", "fail", 1558430875000L),
    //      LoginEvent(3, "192.168.10.1", "success", 1558430815000L),
    //      LoginEvent(4, "192.168.10.3", "fail", 1558430885000L)
    //    ))
    //      .assignTimestampsAndWatermarks(new BoundedOutOfOrdernessTimestampExtractor[LoginEvent](Time.milliseconds(1)) {
    //      override def extractTimestamp(element: LoginEvent): Long = element.eventTime
    //    })

    // 定义匹配模式
    val loginFailPattern: Pattern[LoginEvent, LoginEvent] = Pattern.begin[LoginEvent]("begin")
      .where(_.eventType == "fail")
      .next("next")
      .where(_.eventType == "fail")
      .within(Time.seconds(10))

    // 在数据流中匹配出定义好的模式
    val patternStream: PatternStream[LoginEvent] = CEP.pattern(source.keyBy(_.userId), loginFailPattern)

    import scala.collection.Map
    val loginFailDataStream: DataStream[(Long, String, String)] = patternStream.select((pattern: Map[String, Iterable[LoginEvent]]) => {
      val first: LoginEvent = pattern.getOrElse("begin", null).iterator.next()
      val second: LoginEvent = pattern.getOrElse("next", null).iterator.next()
      (second.userId, second.ip, second.eventType)
    })

    loginFailDataStream.print().setParallelism(1)

    loginFailDataStream.map{
      stream =>
        (s"${stream._1},${stream._2},${stream._3}")
    }
    .writeAsText("./output/loginFail.txt",WriteMode.OVERWRITE).setParallelism(1)

    env.execute("Login fail detect Job")
  }


}
