package com.ww.flink

import org.apache.flink.api.common.serialization.SimpleStringSchema
import org.apache.flink.streaming.api.scala._
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer
import org.apache.kafka.common.serialization.StringSerializer

import java.util.Properties


object Flink_try00 {
  def main(args: Array[String]): Unit = {

    //配置连接kafka的信息  Properties
    val properties = new Properties()
    properties.setProperty("bootstrap.servers", "192.168.102.19:9092")
    properties.setProperty("key.serializer", classOf[StringSerializer].getName)
    properties.setProperty("value.serializer", classOf[StringSerializer].getName)

    //环境
    val env: StreamExecutionEnvironment = StreamExecutionEnvironment.getExecutionEnvironment

    //从kafka读取数据
    val kafkaStream: DataStream[String] = env.addSource(new FlinkKafkaConsumer[String]("flink-kafka", new SimpleStringSchema(), properties))

    val value: DataStream[String] = kafkaStream.flatMap(_.split("\t"))


    env.execute

  }
}
