package org.yuanzheng.function

import org.apache.flink.streaming.api.functions.{KeyedProcessFunction, ProcessFunction, RichProcessFunction}
import org.apache.flink.streaming.api.scala.OutputTag
import org.apache.flink.util.Collector
import org.yuanzheng.source.StationLog


/**
 * @author yuanzheng
 * @date 2020/6/18-8:42
 */
object TestSideOutputStream {

  import org.apache.flink.streaming.api.scala._

  val notSuccessTag = new OutputTag[StationLog]("not_success")

  def main(args: Array[String]): Unit = {
    val streamEnv: StreamExecutionEnvironment = StreamExecutionEnvironment.getExecutionEnvironment
    streamEnv.setParallelism(1)

    val path = getClass.getResource("/station.log").getPath
    //读取数据源
    val stream: DataStream[StationLog] = streamEnv.readTextFile(path).map(line => {
      var split = line.split(",")
      new StationLog(split(0).trim, split(1).trim, split(2).trim, split(3).trim, split(4).trim.toLong, split(5).trim.toLong)
    })
    //

    val result = stream.keyBy(_.callType).process(new UserDefinedSideOutputStream(notSuccessTag))
    val value = result.getSideOutput(notSuccessTag)
    result.print("主流")
    value.print("侧流")
    streamEnv.execute()
  }
}


class UserDefinedSideOutputStream(notSuccessTag: OutputTag[StationLog]) extends KeyedProcessFunction[String, StationLog, StationLog] {
  override def processElement(value: StationLog, ctx: KeyedProcessFunction[String, StationLog, StationLog]#Context, out: Collector[StationLog]): Unit = {
    if (value.callType.equals("success")) {
      out.collect(value)
    } else {
      ctx.output(notSuccessTag, value)
    }
  }
}

