package com.atguigu.flink.chapter02_DataStreamAPI.sink;

import com.alibaba.fastjson.JSON;
import com.atguigu.flink.pojo.WaterSensor;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.common.serialization.SimpleStringSchema;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaProducer;
import org.apache.flink.streaming.connectors.kafka.KafkaSerializationSchema;
import org.apache.kafka.clients.producer.ProducerRecord;

import javax.annotation.Nullable;
import java.nio.charset.StandardCharsets;
import java.util.Properties;

/**
 * Created by Smexy on 2022/10/24
 *
 *      ProduceRecord<key(null),value(String)>
 */
public class Demo21_KafkaSink2
{
    public static void main(String[] args) {


        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();

        Properties properties = new Properties();
        properties.put("bootstrap.servers","hadoop103:9092");
        /*
            FlinkKafkaProducer(
            String defaultTopic,  没有指定数据生产到哪个主题，提供默认的
            KafkaSerializationSchema<IN> serializationSchema,  序列化器 负责把流中T类型，序列化为 byte[]
            Properties producerConfig,   生产者的配置
            FlinkKafkaProducer.Semantic semantic  设置语义 (最多一次...))
                    目前，没有开启ck，不能使用 EXACTLY_ONCE
         */
        FlinkKafkaProducer<WaterSensor> flinkKafkaProducer = new FlinkKafkaProducer<WaterSensor>(
            "没用",
            new KafkaSerializationSchema<WaterSensor>()
            {
                //怎么把流中的 WaterSensor 转化为 byte[],返回ProducerRecord
                @Override
                public ProducerRecord<byte[], byte[]> serialize(WaterSensor element, @Nullable Long timestamp) {
                    //直接把WaterSensor转为 byte[],在kafka tool上看到的都是byte[]
                    //第二种选择，可以先把 WaterSensor转为 String,再把String转为 byte[],在kafka tool可以看到的
                    byte[] value = JSON.toJSONString(element).getBytes(StandardCharsets.UTF_8);

                    //再设置个key，使用id作为key
                    byte[] key = element.getId().getBytes(StandardCharsets.UTF_8);

                    return new ProducerRecord<>("topicC",key,value);
                }
            },
            properties,
            FlinkKafkaProducer.Semantic.AT_LEAST_ONCE

        );

        env
           .socketTextStream("hadoop103", 8888)
           .map(new MapFunction<String, WaterSensor>()
           {
               @Override
               public WaterSensor map(String value) throws Exception {
                   String[] data = value.split(",");
                   return new WaterSensor(
                       data[0],
                       Long.valueOf(data[1]),
                       Integer.valueOf(data[2])
                   );
               }
           })
           .addSink(flinkKafkaProducer);


        try {
                    env.execute();
                } catch (Exception e) {
                    e.printStackTrace();
                }

    }
}
