package com.yeming.flink.practice.source

import org.apache.flink.streaming.api.functions.source.SourceFunction
import org.apache.flink.streaming.api.scala._

import scala.util.Random

/**
 * 自定义的基站数据流，需求：每隔两秒钟，生成十条随机的基站通话数据
 */
class MyCustomerSource extends SourceFunction[StationLog] {

  var flag = true
  //主要的方法，启动一个source, 并且从数据流中返回数据
  override def run(sourceContext: SourceFunction.SourceContext[StationLog]): Unit = {
    var r = new Random()
    var types = Array("fail","refuse","success","busy")
    while (flag) {
      1.to(10).map(i =>{
        var callOut = "1860000%04d".format(r.nextInt(10000)) //主叫号码
        var callIn = "1890000%04d".format(r.nextInt(10000)) //被叫号码
        new StationLog("station_"+r.nextInt(10),callOut, callIn, types(r.nextInt(4)),System.currentTimeMillis(),r.nextInt(100).toLong)
      }).foreach(sourceContext.collect(_))
      Thread.sleep(2000) //每隔两秒钟发送一次数据
    }
  }

  //终止数据流
  override def cancel(): Unit = {
    flag = false
  }
}

object CustomSource {

  def main(args: Array[String]): Unit = {
    //初始化环境变量
    val streamEnv: StreamExecutionEnvironment = StreamExecutionEnvironment.getExecutionEnvironment
    //修改并行度
    streamEnv.setParallelism(1)
    //设置数据源,读取HDFS文件
    val stream: DataStream[StationLog] = streamEnv.addSource(new MyCustomerSource)
    //Transformation
    //Sink
    stream.print()

    streamEnv.execute("自定义Source")
  }
}
