package com.chief.sink.kafka;


import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.common.serialization.SimpleStringSchema;
import org.apache.flink.connector.kafka.source.KafkaSource;
import org.apache.flink.connector.kafka.source.enumerator.initializer.OffsetsInitializer;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaProducer;
import org.apache.flink.streaming.connectors.kafka.KafkaSerializationSchema;
import org.apache.kafka.clients.producer.ProducerRecord;

import java.nio.charset.StandardCharsets;
import java.util.Properties;

public class KafkaSinkSource {

    public static void main(String[] args) throws Exception {

        //environment,必须打fat包才行，所以不使用远程，或者可以将相关jar包放入fink集群
        StreamExecutionEnvironment environment = StreamExecutionEnvironment.createRemoteEnvironment(
                "hadoop52", 8081, "E:\\bigdata-start\\flink\\flinkCode\\target\\flink-1.0-SNAPSHOT.jar");

        //source
//        Properties properties = new Properties();
//        properties.setProperty("bootstrap.servers", "hadoop53:9092,hadoop54:9092,hadoop55:9092");
//
//        DataStream<String> kafka = environment.addSource(new FlinkKafkaConsumer<>("inTest",new SimpleStringSchema(),properties));

        KafkaSource<String> source = KafkaSource.<String>builder()
                .setBootstrapServers("hadoop53:9092,hadoop54:9092,hadoop55:9092")
                .setTopics("inTest")
                .setStartingOffsets(OffsetsInitializer.earliest())
                .setValueOnlyDeserializer(new SimpleStringSchema())
                .build();

        DataStream<String> kafka = environment.fromSource(source, WatermarkStrategy.noWatermarks(), "Kafka Source");

        //transform
        SingleOutputStreamOperator<String> map = kafka.map((MapFunction<String, String>) value -> "kafka map res is " + value);

        //sink
        Properties properties2 = new Properties();
        properties2.setProperty("bootstrap.servers", "hadoop53:9092,hadoop54:9092,hadoop55:9092");

        KafkaSerializationSchema<String> serializationSchema = (element, timestamp) -> {
            return new ProducerRecord<>(
                    "outTest", // target topic
                    element.getBytes(StandardCharsets.UTF_8)); // record contents
        };

        FlinkKafkaProducer<String> myProducer = new FlinkKafkaProducer<>(
                "outTest",
                serializationSchema,
                properties2,
                FlinkKafkaProducer.Semantic.EXACTLY_ONCE);

        map.addSink(myProducer);

        environment.execute("kafka sink");

    }
}
