package com.gitee.transformation

import java.util.concurrent.TimeUnit

import org.apache.flink.streaming.api.functions.source.SourceFunction
import org.apache.flink.streaming.api.functions.source.SourceFunction.SourceContext
import org.apache.flink.streaming.api.scala.{ConnectedStreams, DataStream, StreamExecutionEnvironment,_}

/*
  connect支持将不同类型的DataStream进行连接,返回的ConnectedStreams[Long, String]中的泛型就是两个被连接的数据类型
  作用: 可以把数据类型不同的两个流进行合并
*/
object ConnectTest {
  def main(args: Array[String]): Unit = {
     val senv: StreamExecutionEnvironment = StreamExecutionEnvironment.getExecutionEnvironment
    val longData: DataStream[Long] = senv.addSource(new MyLongSourceScala)
    val stringData: DataStream[String] = senv.addSource(new MyStringSourceScala)
    val connData: ConnectedStreams[Long, String] = longData.connect(stringData)
    val result: DataStream[String] = connData.map(num=>"Long_"+num,str=>str)
    result.print()
    senv.execute()
  }

  /**
    * 自定义source实现从1开始产生递增数字
    */
  class MyLongSourceScala extends SourceFunction[Long] {
    var count = 1L
    var isRunning = true

    override def run(ctx: SourceContext[Long]) = {
      while (isRunning) {
        ctx.collect(count)
        count += 1
        TimeUnit.SECONDS.sleep(1)
      }
    }

    override def cancel() = {
      isRunning = false
    }
  }

  /**
    * 自定义source实现从1开始产生递增字符串
    */
  class MyStringSourceScala extends SourceFunction[String] {
    var count = 1L
    var isRunning = true

    override def run(ctx: SourceContext[String]) = {
      while (isRunning) {
        ctx.collect("str_" + count)
        count += 1
        TimeUnit.SECONDS.sleep(1)
      }
    }

    override def cancel() = {
      isRunning = false
    }
  }
}
