package com.baishancloud.log.live.tengine

import cn.hutool.json.JSONUtil
import com.baishancloud.log.common.env.StreamEnv
import com.baishancloud.log.common.sink.SinkUtil
import com.baishancloud.log.common.source.SourceUtil
import org.apache.flink.api.java.utils.ParameterTool
import org.apache.flink.streaming.api.scala._
import org.apache.flink.streaming.api.windowing.assigners.TumblingProcessingTimeWindows
import org.apache.flink.streaming.api.windowing.time.Time
import org.slf4j.{Logger, LoggerFactory}

import java.io.Serializable

class StreamingTengineLive extends Serializable

object StreamingTengineLive extends Serializable {

  val LOGGER: Logger = LoggerFactory.getLogger(classOf[StreamingTengineLive])


  def main(args: Array[String]): Unit = {
    val parameterTool = ParameterTool.fromArgs(args)
    println("输入参数为：" + parameterTool.toMap.toString)
    StreamEnv.builder(parameterTool).build()
    val source1: DataStream[String] = SourceUtil.kafkaOnlyValue(parameterTool, "1")
    val source2: DataStream[String] = SourceUtil.kafkaOnlyValue(parameterTool, "2")
    val source = source1.union(source2)
    source
      .filter(_ != null).name("!=null").uid("5396f7e-d729-44bd-a89d-9a714c061c1b")
      .map(_.split(" ")).name("split").uid("000ede23-2931-45ff-bdb-123001c9a4b5")
      .filter(_.length >= 21).name("length>21").uid("daf68ff5-76b-4ba4-b110-a078e3626c1a")
      .process(new TengineRecordParse(parameterTool)).name("TengineRecordParse").uid("8acefe24-4ff-459f-856b-a8b1c5782d80")
      .keyBy(_.getKey)
      .window(TumblingProcessingTimeWindows.of(Time.minutes(1)))
      .process(new TengineWindowAgg(parameterTool)).name("uvAgg").uid("46bff63-76b5-4ba8-ab70-146aaaf3d482")
      .map(JSONUtil.toJsonStr(_)).name("toJsonStr").uid("961122a5-9762-4ff4-925b-bb9a0da4d88")
      .addSink(SinkUtil.starRocksJsonString(parameterTool)).setParallelism(parameterTool.getInt(sinkParallel, 1)).name("starRocks").uid("58ac2db1-2064-4ce-b8ef-531bd003bbe0")
    StreamEnv.execute(parameterTool)
  }

}