package cn.tedu.stream.transformation

import org.apache.flink.streaming.api.functions.source.SourceFunction
import org.apache.flink.streaming.api.scala.{ConnectedStreams, DataStream, StreamExecutionEnvironment}

/**
 * @author Amos
 * @date 2022/5/22
 */

object StreamConnectDemo {
  def main(args: Array[String]): Unit = {
    // 环境
    val env = StreamExecutionEnvironment.getExecutionEnvironment

    // 构建两个数据源
    import org.apache.flink.api.scala._
    val source1 = env.addSource(new MySource)
    val source2 = env.addSource(new MySource)

    // 两个数据源的connect，connect之后会保留原有的数据结构
    val connectedStream: ConnectedStreams[String, Long] = source1.map("source1:" + _).connect(source2)

    val result: DataStream[Any] = connectedStream.map(x => x, y => y)
    result.print()
    env.execute()
  }

}

// 自定义一个source源，从1开始自增长
class MySource extends SourceFunction[Long] {
  var count = 1L
  var isRunning = true

  // 构建数据 从1开始自增长
  override def run(ctx: SourceFunction.SourceContext[Long]): Unit = {
    while (isRunning) {
      ctx.collect(count)
      count += 1
      Thread.sleep(1000)
    }
  }

  // 取消执行
  override def cancel(): Unit = {
    isRunning = false
  }
}
