package com.atguigu.sink

import java.util.Properties

import com.atguigu.api.SensorReading
import org.apache.flink.api.common.serialization.SimpleStringSchema
import org.apache.flink.streaming.api.scala._
import org.apache.flink.streaming.connectors.kafka.{FlinkKafkaConsumer011, FlinkKafkaProducer011}

/**
 * @ClassName SinkTest
 * @Description sink相关
 * @Author Mr Yang
 * @Date 2020/8/31 22:01
 * @Version 1.0
 */
object KafkaSinkTest {
  def main(args: Array[String]): Unit = {
    //获取执行环境（根据运行情况动态获取[remote/local]）
    val env = StreamExecutionEnvironment.getExecutionEnvironment
    //设置全局的并行度
    env.setParallelism(1)

    //source
    // val inputStream = env.readTextFile("F:\\work\\FlinkTutorial\\src\\main\\resources\\sensor.txt")

    //从kafka获取数据
    val properties = new Properties()
    properties.setProperty("bootstrap.servers", "localhost:9092")
    properties.setProperty("group.id", "consumer-group")
    properties.setProperty("key.deserializer", "org.apache.kafka.common.serialization.StringDeserializer")
    properties.setProperty("value.deserializer", "org.apache.kafka.common.serialization.StringDeserializer")
    properties.setProperty("auto.offset.reset", "latest")

    val inputStream = env.addSource( new FlinkKafkaConsumer011[String]( "helloKafka", new SimpleStringSchema(), properties ))
    //transform
    val dataStream = inputStream.map(
      data => {
        val dataArray = data.split(",")
        //转换成String 方便序列化
        SensorReading(dataArray(0).trim, dataArray(1).trim.toLong, dataArray(2).trim.toDouble).toString
      }
    )
    //sink
    dataStream.addSink( new FlinkKafkaProducer011[String]("localhost:9092", "sinkTest", new SimpleStringSchema()))

    dataStream.print()
    env.execute("kafka sink test")
  }
}
