package flink_p1

import org.apache.flink.api.common.serialization.SimpleStringSchema
import org.apache.flink.api.common.typeinfo.TypeInformation
import org.apache.flink.streaming.api.scala.{DataStream, StreamExecutionEnvironment, createTuple2TypeInformation, createTypeInformation}
import org.apache.flink.streaming.connectors.kafka.{FlinkKafkaConsumer, KafkaDeserializationSchema}
import org.apache.kafka.clients.consumer.{ConsumerConfig, ConsumerRecord}
import org.apache.kafka.common.serialization.StringDeserializer

import java.util.Properties

object FlinkTest_05_kafka {


  def main(args: Array[String]): Unit = {


    val env: StreamExecutionEnvironment = StreamExecutionEnvironment.getExecutionEnvironment

    val prop = new Properties()
    prop.setProperty("bootstrap.servers", "127.0.0.1:9092")
    prop.setProperty("group.id", "flink-group1")
    prop.setProperty(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, classOf[StringDeserializer].getName)
    prop.setProperty(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, classOf[StringDeserializer].getName)


    //读取key和value
    val flinkKafkaConsumer: FlinkKafkaConsumer[(String, String)] = new FlinkKafkaConsumer[(String, String)]("flink-test",
      new KafkaDeserializationSchema[(String, String)] {

        override def isEndOfStream(t: (String, String)): Boolean = false

        override def deserialize(consumerRecord: ConsumerRecord[Array[Byte], Array[Byte]]): (String, String) = {

          val key = new String(consumerRecord.key(), "UTF-8")
          val value = new String(consumerRecord.value(), "UTF-8")
          (key, value)
        }

        override def getProducedType: TypeInformation[(String, String)] = {

          createTuple2TypeInformation(createTypeInformation[String], createTypeInformation[String])

        }
      }, prop)

    //仅读取value

    val kafkaConsumer_onlyvalue = new FlinkKafkaConsumer[String]("flink-test", new SimpleStringSchema(), prop)


    val kafkaStream: DataStream[String] = env.addSource(kafkaConsumer_onlyvalue)

    kafkaStream.print()


    env.execute("kafka test app")

  }

}
