package com.pw.study.flink.transfrom

import com.pw.study.flink.entities.WordBeanOne
import com.pw.study.flink.function.CustomRichMapFunction
import com.pw.study.flink.source.MyWordSource
import org.apache.flink.streaming.api.functions.co.RichCoFlatMapFunction
import org.apache.flink.streaming.api.scala._
import org.apache.flink.util.Collector

object FlatMapTFCon {
  def main(args: Array[String]): Unit = {
    val env: StreamExecutionEnvironment = StreamExecutionEnvironment.getExecutionEnvironment
    env.setParallelism(1)
    //数据输入
    val ds: DataStream[String] = env.addSource(new MyWordSource())
    val words: DataStream[(String, Int)] = ds.map((_, 1))
    val w2: DataStream[(String, String)] = ds.map((_, "hello"))
    val dsMap: DataStream[WordBeanOne] = ds.map(new CustomRichMapFunction())

    val cc: ConnectedStreams[(String, Int), (String, String)] = words.connect(w2)
    val key: ConnectedStreams[(String, Int), (String, String)] = cc.keyBy(x => x._1, y => y._1)

    key.flatMap(new MyRichCoFlatMap())
    val result: DataStream[String] = key.flatMap(new RichCoFlatMapFunction[(String, Int), (String, String), String] {
      override def flatMap1(in1: (String, Int), collector: Collector[String]): Unit = {
        println("word: " + in1._1 + " : " + in1._2)
      }

      override def flatMap2(in2: (String, String), collector: Collector[String]): Unit = {
        println("map2: " + in2._1 + " : " + in2._2)
      }
    })
    dsMap.print("map:")
    result.print("result: ")
    words.print("word: ")
    env.execute("FlatMap: ")
  }
}
