package com.codejiwei.sample

import com.codejiwei.sample.Kafka2Kafka.sourceDS
import org.apache.flink.api.common.serialization.SimpleStringSchema
import org.apache.flink.api.scala.createTypeInformation
import org.apache.flink.streaming.api.scala.{DataStream, StreamExecutionEnvironment}
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer
import org.apache.kafka.clients.consumer.ConsumerConfig

import java.util.Properties

/**
 * Author: jiwei01
 * Date: 2022/8/26 13:07
 * Package: com.codejiwei.sample
 * Description:
 */
object KafkaRecordsLagSample extends App {
  private val env: StreamExecutionEnvironment = StreamExecutionEnvironment.getExecutionEnvironment
  env.setParallelism(2)
  private val properties = new Properties()
  properties.setProperty(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, "172.28.16.37:9092")
  properties.setProperty(ConsumerConfig.GROUP_ID_CONFIG, "zlink-sample")

  private val sourceDS: DataStream[String] = env.addSource(new FlinkKafkaConsumer[String]("consumerRecordsLag_sample_source", new SimpleStringSchema(), properties))

  private val processDS: DataStream[String] = sourceDS.map(x => (x, 1))
    .keyBy(0)
    .sum(1)
    .map(x => x.toString())

  processDS.print()

  env.execute("KafkaRecordsLagSample")

}
