package com.desheng.bigdata.flink.stream.sink

import java.util.Properties

import com.desheng.bigdata.flink.deserialization.WorkerDeSerializationSchema
import org.apache.flink.api.common.serialization.SimpleStringSchema
import org.apache.flink.streaming.api.scala._
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer010

/**
  * 用户自定义source之从kafka中读取数据
  *   改程序就相当于kafka的消费者
  */
object _02SourceFunctionFromKafka {
    def main(args: Array[String]): Unit = {
        val env = StreamExecutionEnvironment.getExecutionEnvironment

        val topic = "worker"
        val props = new Properties()
        props.put("bootstrap.servers", "bigdata01:9092,bigdata02:9092,bigdata03:9092")
        props.put("group.id", "flink-kafka-group-2")
        props.put("auto.offset.reset", "earliest")
        props.put("enable.auto.commit", "true")
        val kafkaInputs: DataStream[Worker] = env.addSource(createKafkaConsumer(topic, props))

        kafkaInputs.print()

        env.execute(s"${_02SourceFunctionFromKafka.getClass.getSimpleName}")
    }
    def createKafkaConsumer(topic: String, props: Properties): FlinkKafkaConsumer010[Worker] = {
        val schema = new WorkerDeSerializationSchema()
        new FlinkKafkaConsumer010[Worker](topic, schema, props)
    }
}
