package com.codejiwei.sample

import org.apache.flink.api.common.eventtime.WatermarkStrategy
import org.apache.flink.api.common.serialization.SimpleStringSchema
import org.apache.flink.api.scala.createTypeInformation
import org.apache.flink.connector.kafka.sink.{KafkaRecordSerializationSchema, KafkaSink}
import org.apache.flink.connector.kafka.source.KafkaSource
import org.apache.flink.connector.kafka.source.enumerator.initializer.OffsetsInitializer
import org.apache.flink.streaming.api.scala.{DataStream, StreamExecutionEnvironment}
import org.apache.flink.streaming.connectors.kafka.{FlinkKafkaConsumer, FlinkKafkaProducer}

/**
 * Author: jiwei01
 * Date: 2022/8/23 14:26
 * Package: com.codejiwei.sample
 * Description:
 */
object Kafka2Kafka extends App {
  private val env: StreamExecutionEnvironment = StreamExecutionEnvironment.getExecutionEnvironment
  env.setParallelism(2)
  private val source: KafkaSource[String] = KafkaSource.builder[String]()
    .setBootstrapServers("172.29.148.56:9092")
    .setTopics("sample_jiwei01_source")
    .setGroupId("zlink-sample")
    .setStartingOffsets(OffsetsInitializer.earliest())
    .setValueOnlyDeserializer(new SimpleStringSchema())
    .build()

  private val sourceDS: DataStream[String] = env.fromSource(source, WatermarkStrategy.noWatermarks(), "Kafka Source 01")



  private val processDS: DataStream[String] = sourceDS.map(x => (x, 1))
    .keyBy(0)
    .sum(1)
    .map(x => x.toString())


  //sink
  private val sink: KafkaSink[String] = KafkaSink.builder[String]()
    .setBootstrapServers("172.29.148.56:9092")
    .setRecordSerializer(KafkaRecordSerializationSchema.builder()
      .setTopic("sample_jiwei01_sink")
      .setValueSerializationSchema(new SimpleStringSchema())
      .build())
    .build()

//  processDS.addSink(new FlinkKafkaProducer[String]())

  processDS.sinkTo(sink).name("Kafka Sink 01")




  env.execute("Kafka2kafkaSample")
}
