package com.z.demo

import com.z.demo.filter.NullFilter
import org.apache.flink.api.common.serialization.SimpleStringSchema
import org.apache.flink.streaming.api.scala._
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer

import java.io.{File, FileInputStream}
import java.util.Properties
/**
 * @Author wenz.ma
 * @Date 2021/10/26 13:48
 * @Desc kafka word count 最简单的代码
 */
object KafkaWordCount {
  def main(args: Array[String]): Unit = {
    val env = StreamExecutionEnvironment.getExecutionEnvironment
    //topic
    val topic = "test-topic"
    val props = new Properties()
    props.load(new FileInputStream("config.txt"))
    //创建source，使用kafka做数据源
    val inputStream = env.addSource(new FlinkKafkaConsumer[String](topic, new SimpleStringSchema(), props))

    val resultStream = inputStream.filter(NullFilter)//filter算子，使用自定义的Filter类，过滤为空数据
      .flatMap(_.split(" "))//根据空格分割数据
      .map((_, 1))//map算子，给每个值加个1
      .keyBy(x => {
        x._1
      })//根据第一个值分组
      .sum(1)//下标从0开始，汇总第二个值
    resultStream.print("kafka print")//给数出起个名

    env.execute("kafka word count")//任务名称
  }
}
