package com.ddxz.flink

import org.apache.flink.streaming.api.scala.{ConnectedStreams, DataStream, OutputTag, StreamExecutionEnvironment}
import org.apache.flink.api.scala._
import org.apache.flink.streaming.api.functions.co.{CoFlatMapFunction, CoMapFunction}
import org.apache.flink.util.Collector

object Learn001 extends App {
  val env = StreamExecutionEnvironment.getExecutionEnvironment
  val stream1:DataStream[(String, Int)] = env.fromElements(("a", 3), ("d", 4), ("c", 2), ("c", 5), ("a", 5))
  val stream2:DataStream[(Int, Int)] = env.fromElements((1,1), (2,1), (3,1), (4,1), (5,1))
  val connectStream:ConnectedStreams[(String, Int), (Int,Int)] = stream1.connect(stream2).keyBy(1, 0)
  connectStream.map(new CoMapFunction[(String, Int), (Int,Int), (Int, String, Int)] {
    override def map1(value: (String, Int)): (Int, String, Int) = {
      (value._2, value._1, -100)
    }

    override def map2(value: (Int,Int)): (Int, String, Int) = {
      (-1, "default", value._1)
    }
  }).print()

  connectStream.flatMap(new CoFlatMapFunction[(String, Int), (Int,Int), (Int, String, Int)] {
    var tmp: Int = 0
    override def flatMap1(value: (String, Int), out: Collector[(Int, String, Int)]): Unit = {
      out.collect(value._2, value._1, tmp)
    }

    override def flatMap2(value: (Int,Int), out: Collector[(Int, String, Int)]): Unit = {
      tmp = value._1
    }
  }).print()

//  stream1.join(stream2).where(in1 => in1._2).equalTo(in2 => in2).window().
  val splitStream = stream2.split(in => if(in._1 %2 == 0) Seq("even") else Seq("odd"))
  splitStream.select("even").print()
//  splitStream.select("odd").print()
  val outputTag1 = OutputTag[Int]("even")
  val outputTag2 = OutputTag[Int]("odd")
  stream2.filter(in => in._1 % 2 == 0).getSideOutput(outputTag1)
  stream2.filter(in => in._1 % 2 != 0).getSideOutput(outputTag2)

  env.execute("learn001")
}
