package com.baishancloud.log

import com.baishancloud.log.common.env.StreamEnv
import com.baishancloud.log.common.sink.SinkUtil
import com.baishancloud.log.common.source.SourceUtil
import org.apache.flink.api.java.utils.ParameterTool
import org.apache.flink.streaming.api.scala._
import org.apache.flink.streaming.api.windowing.assigners.TumblingProcessingTimeWindows
import org.apache.flink.streaming.api.windowing.time.Time
import org.slf4j.{Logger, LoggerFactory}

import java.io.Serializable

class StreamingMiguICTAudit extends Serializable

object StreamingMiguICTAudit extends Serializable {
  val LOGGER: Logger = LoggerFactory.getLogger(classOf[StreamingMiguICTAudit])

  def main(args: Array[String]): Unit = {
    val parameterTool = ParameterTool.fromArgs(args)
    println("输入参数为：" + parameterTool.toMap.toString)
    StreamEnv.builder(parameterTool).build()
    //    StreamEnv.getStreamExecutionEnvironment.setParallelism(1)
    //    val source = SourceUtil.fileOnlyValue(parameterTool)
    val source1: DataStream[String] = SourceUtil.kafkaOnlyValue(parameterTool, "1")
    val source2: DataStream[String] = SourceUtil.kafkaOnlyValue(parameterTool, "2")
    val source = source1.union(source2)
    source
      .filter(_ != null).name("!=null").uid("904e35ad-a39c-4ce8-b552-1e93f7b5a55d")
      .map(_.split("\\|")).name("split").uid("f979f7ca-b891-406b-b930-1c2afdac6227")
      .filter(_.length >= 15).name("length>=15").uid("269d2e6b-893a-435b-9839-717b7acec0b6")
      .process(new MiguICTAuditLogParse(parameterTool)).name("MiguICTAuditLogParse").uid("74722b34-5c5f-4857-af80-f14078653be0")
      .process(new MiniBatchProcess).name("MiniBatch").uid("d59b8f21-8dc8-4eec-9a66-824ea128b8a5")
      .keyBy(_.getKey)
      .window(TumblingProcessingTimeWindows.of(Time.minutes(parameterTool.getLong(aggWindowSize, 5))))
      .reduce((x, y) => MiguICTAuditLog(x.getKey, MiguICTAuditLogValue(x.line + y.line, x.traffic + y.traffic))).name("windowAgg").uid("59f67559-06ce-4109-8ac0-c40fb732934b")
      .map(new ResultParse()).name("toJson").uid("846ac2f1-0bd3-4c90-a030-144a2ff97c49")
      .addSink(SinkUtil.starRocksJsonString(parameterTool)).setParallelism(parameterTool.getInt(sinkParallel, 1)).name("starRocks").uid("28b83e33-cc78-4415-b385-02abcb391c19")
    //      .print("结果数据：")
    StreamEnv.execute(parameterTool)
  }
}

