package com.stqf.scala.flink

import java.util
import java.util.{Objects, Properties}
import com.fasterxml.jackson.databind.JsonNode
import com.stqf.scala.utils.ProUtils
import org.apache.flink.api.common.io.OutputFormat
import org.apache.flink.api.common.restartstrategy.RestartStrategies
import org.apache.flink.api.common.time.Time
import org.apache.flink.api.common.typeinfo.{TypeHint, TypeInformation}
import org.apache.flink.api.java.utils.ParameterTool
import org.apache.flink.configuration.Configuration
import org.apache.flink.streaming.api.scala.StreamExecutionEnvironment
import org.apache.flink.streaming.api.scala.function.AllWindowFunction
import org.apache.flink.streaming.api.windowing.windows.TimeWindow
import org.apache.flink.streaming.connectors.kafka.{FlinkKafkaConsumer, KafkaDeserializationSchema}
import org.apache.flink.util.Collector
import org.apache.kafka.clients.consumer.ConsumerRecord
import org.slf4j.{Logger, LoggerFactory}
import org.apache.flink.api.scala._

import scala.beans.BeanProperty

case class AtItem(at: String, count: Int) {}

case class Record(topic: String, node: JsonNode)

class WcItem(@BeanProperty var name: String, @BeanProperty var count: Int) {
  override def toString: String = s"$name : $count"
}

class SimpleKds extends KafkaDeserializationSchema[Record] {

  override def isEndOfStream(nextElement: Record): Boolean = false

  override def getProducedType: TypeInformation[Record] = TypeInformation.of(new TypeHint[Record] {})

  override def deserialize(record: ConsumerRecord[Array[Byte], Array[Byte]]): Record = {
    val topic = record.topic()
    val binary = record.value()
    val obj = ProUtils.parse(binary)
    Record(topic, obj)
  }

}

class OutputFormatImpl extends OutputFormat[Object] {
  val log: Logger = LoggerFactory.getLogger(this.getClass)

  override def configure(parameters: Configuration): Unit = ???

  override def open(taskNumber: Int, numTasks: Int): Unit = ???

  override def writeRecord(record: Object): Unit = {
    log.info("OutputFormatImpl: ... {}", record)
  }

  override def close(): Unit = ???
}

class AllWindowFunImpl extends AllWindowFunction[AtItem, List[AtItem], TimeWindow] {
  override def apply(window: TimeWindow, input: Iterable[AtItem], out: Collector[List[AtItem]]): Unit = {
    out.collect(input.toList)
  }
}

object RecvKafkaStreamingPro {

  val log: Logger = LoggerFactory.getLogger(this.getClass)

  implicit val tpI: TypeInformation[Record] = TypeInformation.of(new TypeHint[Record] {})

  implicit val tpK: TypeInformation[String] = TypeInformation.of(new TypeHint[String] {})

  implicit val tpMt: TypeInformation[org.apache.flink.api.java.tuple.Tuple2[String, Long]] = TypeInformation.of(new TypeHint[org.apache.flink.api.java.tuple.Tuple2[String, Long]] {})

  implicit val tpMc: TypeInformation[AtItem] = TypeInformation.of(new TypeHint[AtItem] {})

  implicit val tpMb: TypeInformation[WcItem] = TypeInformation.of(new TypeHint[WcItem] {})

  implicit val tpApply: TypeInformation[List[AtItem]] = TypeInformation.of(new TypeHint[List[AtItem]] {})
  //implicit val tpI: TypeInformation[(String, JsonNode)] = TypeInformation.of(new TypeHint[(String, JsonNode)] {})

  def main(args: Array[String]): Unit = {
    val wcItem = new WcItem("hello", 1)
    log.debug("wcItem ... {},{}", wcItem.getName, wcItem.getCount)

    //println("Hello Simple")
    log.debug("... Hello Simple ... ")
    val params = ParameterTool.fromArgs(args)


    val props = new Properties()
    props.put("group.id", "fik")
    props.put("bootstrap.servers", "192.168.2.96:9092")

    val topics = util.Arrays.asList("os-mouse")
    val simpleKds = new SimpleKds()
    val consumer = new FlinkKafkaConsumer[Record](topics, simpleKds, props)
    consumer.setStartFromEarliest()

    //val env = StreamExecutionEnvironment.getExecutionEnvironment
    val env = StreamExecutionEnvironment.createLocalEnvironmentWithWebUI(new Configuration())
    env.setParallelism(1)
    env.enableCheckpointing(15000L)
    env.getConfig.setGlobalJobParameters(params)
    env.setRestartStrategy(RestartStrategies.fixedDelayRestart(5, Time.milliseconds(10000L)))

    val stream = env.addSource(consumer).name("Source")

    /*1.元祖方式(注意导包[org.apache.flink.api.scala._])*/
    val res = stream.map(item => {
      log.debug("[{}] ... {}", Array(item.topic, item.node.toString))
      val at = item.node.get("alarmType")
      val value = if (at == null) "Blank" else at.asText()
      org.apache.flink.api.java.tuple.Tuple2.of(value, 1)
    }).name("Map")
      .filter(item => Objects.nonNull(item))
      .keyBy(item => item.f0)
      .timeWindowAll(org.apache.flink.streaming.api.windowing.time.Time.seconds(2L))
      .sum(1)

    /*2.CaseClass方式(放弃探索)*/
    /*val res = stream.map(item => {
      println("[{}] ... {}", item.topic, item.node.textValue)
      val at = item.node.get("alarmType").asText()
      AtItem(at, 1)
    }).name("Map")
      .keyBy(_.at)
      .timeWindowAll(org.apache.flink.streaming.api.windowing.time.Time.seconds(2L))
      .sum("count")*/

    /*3.POJO方式(启动可行)*/
    /*val res = stream.map(item => {
      println("[{}] ... {}", item.topic, item.node.textValue)
      val at = item.node.get("alarmType").asText()
      new WcItem(at, 1)
    }).name("Map")
      .keyBy(_.getName)
      .timeWindowAll(org.apache.flink.streaming.api.windowing.time.Time.seconds(2L))
      .reduce((_1, _2) => new WcItem(_1.name, _1.count + _2.count))*/

    res.print()

    env.execute("Simple")

  }


}
