package day1

import org.apache.flink.api.common.eventtime.WatermarkStrategy
import org.apache.flink.api.common.serialization.SimpleStringSchema
import org.apache.flink.configuration.{Configuration, RestOptions}
import org.apache.flink.connector.kafka.source.KafkaSource
import org.apache.flink.connector.kafka.source.enumerator.initializer.OffsetsInitializer
import org.apache.flink.streaming.api.scala._
import org.apache.kafka.clients.consumer.OffsetResetStrategy

object _02_WordCountStreaming {
  def main(args: Array[String]): Unit = {

    //本地flink ui 端口设置
    val conf = new Configuration()
    conf.setInteger(RestOptions.PORT, 9898)

    val env = StreamExecutionEnvironment.createLocalEnvironmentWithWebUI(conf)
    //import org.apache.flink.api.scala._


    val kafkaSource = KafkaSource.builder[String]()
      .setBootstrapServers("bigdata001:9092,bigdata002:9092,bigdata002:9092")
      .setTopics("flinkdemo")
      .setGroupId("idea-demo")
      .setStartingOffsets(OffsetsInitializer.committedOffsets(OffsetResetStrategy.LATEST))
      .setValueOnlyDeserializer(new SimpleStringSchema())
      .build()


    env.fromSource(kafkaSource, WatermarkStrategy.noWatermarks(), "kafka_source")
      .flatMap(_.split(" "))
      .map(word=>(word,1))
      .keyBy(_._1)
      .sum(1)
      .print()

    env.execute("wc scala streaming.....")

  }

}
