package flink_p1

import org.apache.flink.api.scala.createTypeInformation
import org.apache.flink.streaming.api.functions.co.{CoFlatMapFunction, CoMapFunction}
import org.apache.flink.streaming.api.scala.{ConnectedStreams, DataStream, StreamExecutionEnvironment}
import org.apache.flink.util.Collector

object FlinkTest_07_Operator_connect {


  def main(args: Array[String]): Unit = {


    val env: StreamExecutionEnvironment = StreamExecutionEnvironment.getExecutionEnvironment



    //connect 不要求数据类型一致

    /**
     * 将一个字符串的流 和一个Int流合并 进行map  和 flatMap
     *
     */

    val ds3: DataStream[String] = env.fromCollection(List("aa ded", "bb fef", "cc fefe", "cc ff", "cd fg", "bb dd"))
    val ds4: DataStream[Int] = env.fromCollection(List(1, 2, 3))

    val connectDs: ConnectedStreams[String, Int] = ds3.connect(ds4)


    //map要求输出结果的泛型一致  => CoMap
    // CoMapFunction 有三个泛型：第一个流的类型、第二个流的类型、输出类型
    val res1: DataStream[String] = connectDs.map(new CoMapFunction[String, Int, String] {
      //处理第一个stream
      override def map1(value: String): String = {
        value
      }

      //处理第二个stream
      override def map2(value: Int): String = {
        String.valueOf(value)
      }
    })


    //CoFlatMap
    val res2: DataStream[String] = connectDs.flatMap(new CoFlatMapFunction[String, Int, String] {
      override def flatMap1(value: String, out: Collector[String]): Unit = {
        value.split(" ").foreach(x => {
          out.collect(x)
        })
      }

      override def flatMap2(value: Int, out: Collector[String]): Unit = {

        out.collect(String.valueOf(value))

      }
    })


    //    res1.print()
    res2.print()


    //案例：从一个动态的配置文件中，将数据源中的车牌号转换为车主姓名
    //TODO 略


    env.execute("test1")

  }
}
