package com.shujia.flink.sink;

import org.apache.flink.api.common.serialization.SimpleStringSchema;
import org.apache.flink.api.common.typeinfo.Types;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.connector.base.DeliveryGuarantee;
import org.apache.flink.connector.kafka.sink.KafkaRecordSerializationSchema;
import org.apache.flink.connector.kafka.sink.KafkaSink;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;

import java.util.ArrayList;

public class Demo02KafkaSink {
    public static void main(String[] args) throws Exception {
        // 从本地集合构建DS：有界流，数据处理完后会结束任务，主要用于学习测试
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();

        ArrayList<String> arrList = new ArrayList<>();
        arrList.add("java");
        arrList.add("java");
        arrList.add("java");
        arrList.add("scala");
        arrList.add("scala");
        arrList.add("python");

        DataStreamSource<String> listDS = env.fromCollection(arrList);

        // 构建KafkaSink
        KafkaSink<String> kafkaSink = KafkaSink.<String>builder()
                .setBootstrapServers("master:9092,node1:9092,node2:9092") //broker地址
                .setRecordSerializer(
                        KafkaRecordSerializationSchema
                                .<String>builder()
                                .setValueSerializationSchema(new SimpleStringSchema())
                                .setTopic("t2")
                                .build()
                )
//                .setDeliverGuarantee(DeliveryGuarantee.EXACTLY_ONCE)
                .build();


        listDS.map(w -> Tuple2.of(w, 1), Types.TUPLE(Types.STRING, Types.INT))
                .keyBy(t2 -> t2.f0)
                .sum(1)
                .map(t2 -> t2.f0 + "," + t2.f1)
                .sinkTo(kafkaSink);

        env.execute();
    }
}
