package com.wudl.flink.core

import org.apache.flink.streaming.api.functions.source.SourceFunction
import org.apache.flink.streaming.api.scala.StreamExecutionEnvironment

import scala.util.Random
/**
 * @ClassName : CustomerSource
 * @Description : 自定义数据源
 * @Author :wudl
 * @Date: 2020-12-02 11:23
 */
object CustomerSource {

  def main(args: Array[String]): Unit = {
    val env = StreamExecutionEnvironment.getExecutionEnvironment
    env.setParallelism(1)
    import org.apache.flink.streaming.api.scala._
    val stream: DataStream[StationLog] = env.addSource(new MyCustomerSource)
    stream.print()
    env.execute()


  }

  //写一个实现SourceFunction接口
  class MyCustomerSource extends SourceFunction[StationLog] {
    //是否终止数据流的标记
    var flag = true;

    /**
     * 主要的方法
     * 启动一个Source
     * 大部分情况下，都需要在这个run方法中实现一个循环，这样就可以循环产生数据了
     *
     * @param sourceContext
     * @throws Exception
     */
    override def run(sourceContext: SourceFunction.SourceContext[StationLog]):
    Unit = {
      val random = new Random()
      var types = Array("fail", "busy", "barring", "success")
      38
      while (flag) { //如果流没有终止，继续获取数据
        1.to(5).map(i => {
          var callOut = "1860000%04d".format(random.nextInt(10000))
          var callIn = "1890000%04d".format(random.nextInt(10000))
          new
              StationLog("station_" + random.nextInt(10), callOut, callIn, types(random.nextInt(4
              )), System.currentTimeMillis(), 0)
        }).foreach(sourceContext.collect(_)) //发数据
        Thread.sleep(2000) //每发送一次数据休眠2秒
      }
    } //终止数据流

    override def cancel(): Unit = flag = false
  }


}
