package cn.dfun.sample.flink.apitest

import java.util.Properties

import org.apache.flink.api.common.serialization.SimpleStringSchema
import org.apache.flink.streaming.api.functions.source.SourceFunction
import org.apache.flink.streaming.api.scala._
import org.apache.flink.streaming.api.scala.StreamExecutionEnvironment
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer011

import scala.util.Random

// 定义样例类,温度传感器
case class SensorReading(id: String, timestamp: Long, temperature: Double)

// 不推荐从mysql或hdfs获取数据,推荐kafka
object SourceTest {
  def main(args: Array[String]): Unit = {
    val env = StreamExecutionEnvironment.getExecutionEnvironment
    env.setParallelism(1)
    // 1 从集合读取数据
    val stream1 =env.fromCollection(List(
      SensorReading("sensor_1",1547718199,35.8),
      SensorReading("sensor_6",1547718201,15.4),
      SensorReading("sensor_7",1547718202,6.7),
      SensorReading("sensor_10",1547718205,38.1))
    )
    // 类型不限
    // env.fromElements(1.0, 35, "hello")
    // 2 从文件读取数据
    val inputPath = "C:\\wor\\flink-sample\\src\\main\\resources\\sensor"
    val stream2 = env.readTextFile(inputPath)

    // 3 从kafka读取数据
    val properties = new Properties()
    properties.setProperty("bootstrap.servers", "node-01:9092")
    properties.setProperty("group.id", "consumer-group")
    val stream3 = env.addSource(new FlinkKafkaConsumer011[String]("sensor",
      new SimpleStringSchema(), properties))

    // 4 自定义source
    val stream4 = env.addSource(new MySensorSource())
    stream4.print()
//    stream3.print()
//    stream2.print()
//    stream1.print()
    // 有界流处理完直接退出
    // 并行执行造成乱序
    env.execute("source test")
  }
}

// 读取mysql数据也可以使用这种方式
class MySensorSource() extends SourceFunction[SensorReading] {
  // 定义flag,控制数据源是否正常发送数据
  var running: Boolean = true

  override def cancel(): Unit = running = false

  override def run(ctx: SourceFunction.SourceContext[SensorReading]): Unit = {
    // 定义随机数随机数发生器
    val rand = new Random()
    // 随机生成一组传感器初始温度: (id, temp)
    var curTemp = 1.to(10).map(i => ("sensor_" + i, rand.nextDouble() * 100))

    // 定义无限循环,不停地产生数据,除非cancel
    while(running) {
      // 在上次数据基础上微调更新温度值
      curTemp = curTemp.map(
        data => (data._1, data._2 + rand.nextGaussian()) // 高斯随机数(正态分布)
      )
      // 获取当前时间戳
      val curTime = System.currentTimeMillis()
      curTemp.foreach(data => ctx.collect(SensorReading(data._1, curTime, data._2)))
      Thread.sleep(500)
    }
  }
}
