package com.flink.com.source

import java.util.Properties

import org.apache.flink.api.common.serialization.SimpleStringSchema
import org.apache.flink.streaming.api.scala.StreamExecutionEnvironment
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer
import org.apache.kafka.common.serialization.StringDeserializer

/**
 * 从kafka 中读取数据
 */
object KafkaSource {
  def main(args: Array[String]): Unit = {
    val env: StreamExecutionEnvironment = StreamExecutionEnvironment.getExecutionEnvironment
    env.setParallelism(3);
    //2、导入隐式转换
    import org.apache.flink.streaming.api.scala._
    // 3. 连接kafak
    val props = new Properties();
    props.setProperty("bootstrap.servers","node01.com:6667")
    props.setProperty("group.id","fink01")
    props.setProperty("key.deserializer",classOf[StringDeserializer].getName)
    props.setProperty("value.deserializer",classOf[StringDeserializer].getName)
    props.setProperty("auto.offset.reset","latest")

  val stream = env.addSource(new FlinkKafkaConsumer[String]("flink_topic",new SimpleStringSchema(),props))
    val value = stream.print()
    env.execute()
  }

}
