package com.hu.flink12.sensor

import java.util.Properties

import com.hu.entity.SensorReading
import org.apache.flink.api.common.serialization.SimpleStringSchema
import org.apache.flink.api.java.utils.ParameterTool
import org.apache.flink.streaming.api.scala._
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer
import org.apache.flink.util.Preconditions
import org.apache.kafka.clients.consumer.ConsumerConfig
import org.apache.kafka.common.serialization.StringDeserializer

/**
 * @Author: hujianjun
 * @Create Date: 2021/1/26 16:01
 * @Describe: 从kafka读取数据
 */

object ReadSensorKafka {
  def main(args: Array[String]): Unit = {
    val parameters = ParameterTool.fromArgs(args)
    Preconditions.checkNotNull(parameters, "args is not empty")

    val env = StreamExecutionEnvironment.getExecutionEnvironment

    val properties = new Properties()
    properties.setProperty(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9092")
    properties.setProperty(ConsumerConfig.GROUP_ID_CONFIG, "consumer-group-1")
    properties.setProperty(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, classOf[StringDeserializer].getName)
    properties.setProperty(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, classOf[StringDeserializer].getName)
    properties.setProperty(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "latest")

    val inputStream = env.addSource(new FlinkKafkaConsumer[String](parameters.get("topic"), new SimpleStringSchema(), properties))
    val dataStream = inputStream.map(data => {
      val arr = data.split(",")
      SensorReading(arr(0), arr(1).toLong, arr(3).toDouble)
    })

    dataStream.print().setParallelism(1)

    env.execute("读取传感器kafka数据")
  }
}
