package com.zyh.flink.day02.sink;

import org.apache.flink.api.common.functions.FlatMapFunction;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.shaded.jackson2.com.fasterxml.jackson.core.JsonProcessingException;
import org.apache.flink.shaded.jackson2.com.fasterxml.jackson.databind.ObjectMapper;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaProducer;
import org.apache.flink.streaming.connectors.kafka.KafkaSerializationSchema;
import org.apache.flink.util.Collector;
import org.apache.kafka.clients.producer.ProducerConfig;
import org.apache.kafka.clients.producer.ProducerRecord;

import javax.annotation.Nullable;
import java.util.Properties;

public class SinkToKafkaTest {
    public static void main(String[] args) throws Exception {
        StreamExecutionEnvironment environment = StreamExecutionEnvironment.getExecutionEnvironment();
        DataStreamSource<String> hadoop10 = environment.socketTextStream("hadoop10", 9999);
        SingleOutputStreamOperator<Tuple2<String, Integer>> result = hadoop10.flatMap(new FlatMapFunction<String, Tuple2<String, Integer>>() {
            @Override
            public void flatMap(String s, Collector<Tuple2<String, Integer>> collector) throws Exception {
                String[] words = s.split("\\s+");
                for (String word : words) {
                    collector.collect(Tuple2.of(word, 1));
                }
            }
        }).keyBy(t -> t.f0)
                .sum(1);
        Properties props = new Properties();
        props.setProperty(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG,"kafka24:9092");
        FlinkKafkaProducer<Tuple2<String, Integer>> producer = new FlinkKafkaProducer<>("topic-flink-sink", new KafkaSerializationSchema<Tuple2<String, Integer>>() {
            private ObjectMapper objectMapper;

            @Override
            public ProducerRecord<byte[], byte[]> serialize(Tuple2<String, Integer> wordCount, @Nullable Long aLong) {
                String json = null;
                try {
                    json = objectMapper.writeValueAsString(wordCount);
                } catch (JsonProcessingException e) {
                    e.printStackTrace();
                }
                ProducerRecord<byte[], byte[]> record = new ProducerRecord<>("topic-flink-sink", json.getBytes());

                return record;
            }
        }, props, FlinkKafkaProducer.Semantic.EXACTLY_ONCE);
        result.addSink(producer);
        environment.execute("sinkToKafkaJob");
    }
}
