package com.burges.net.dataStream.codeRuler.transform.MultiDataStream

import org.apache.flink.streaming.api.functions.co.{CoFlatMapFunction, CoMapFunction}
import org.apache.flink.streaming.api.scala.{ConnectedStreams, StreamExecutionEnvironment, _}
import org.apache.flink.util.Collector

/**
  * 创建人    BurgessLee
  * 创建时间   2020/1/26
  * 描述
  */
object ConnectDemo {

	def main(args: Array[String]): Unit = {
		val environment = StreamExecutionEnvironment.getExecutionEnvironment
		//创建两个数据集
		val dataStream1 = environment.fromElements(("a",3), ("d", 4), ("c", 2), ("e", 5))
		val dataStream2 = environment.fromElements(1,2,3,4)
		//链接连个不同的数据集
		val connectedStream: ConnectedStreams[(String, Int), Int] = dataStream1.connect(dataStream2)
		//不能直接print
		val resStream1 = connectedStream.map(new CoMapFunction[(String, Int), Int, (Int, String)] {
			//定义第一个处理函数逻辑，输入值为DataStream
			override def map1(in1: (String, Int)): (Int, String) = {
				(in1._2, in1._1)
			}

			//定义第二个处理函数逻辑，输入值为第二个DataStream
			override def map2(in2: Int): (Int, String) = {
				(in2, "default")
			}
		})
		resStream1.print()
		val resStream2 = connectedStream.flatMap(new CoFlatMapFunction[(String, Int), Int, (String, Int, Int)] {
			//定义共享变量
			var number = 0

			//定义第一个数据处理逻辑
			override def flatMap1(in1: (String, Int), collector: Collector[(String, Int, Int)]): Unit = {
				collector.collect((in1._1, in1._2, number))
			}

			//定义第二个数据处理逻辑
			override def flatMap2(in2: Int, collector: Collector[(String, Int, Int)]): Unit = {
				number = in2
			}
		})
		resStream2.print()

		//通常情况下计算出来的结果可能不是用户想要的，所以需要通过keyby或者broadcast广播变量实现
		//通过keyBy函数指定key的链接的两个数据集
		//可以将相同的key的数据路由在同一个Operator中
		val keyedConnect: ConnectedStreams[(String, Int), Int] = dataStream1.connect(dataStream2).keyBy(1,0)
		//通过broadcast关联两个变量
		//会再执行计算逻辑之前对dataStream2数据集广播到所有并行节点的Operator中
		val broadcastConnect: ConnectedStreams[(String, Int), Int] = dataStream1.connect(dataStream2.broadcast)


		environment.execute("streaming connect operator")
	}

}
