package com.bw.app.dwd;

import com.alibaba.fastjson.JSON;
import org.apache.flink.api.common.serialization.SerializationSchema;
import org.apache.flink.api.common.serialization.SimpleStringSchema;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaProducer;
import org.apache.flink.streaming.connectors.kafka.KafkaSerializationSchema;
import org.apache.kafka.clients.producer.ProducerConfig;
import org.apache.kafka.clients.producer.ProducerRecord;

import javax.annotation.Nullable;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Date;
import java.util.Properties;


public class Test {
    private static String BOOTSTRAP_SERVERS_CONFIG = "hadoop102:9092,hadoop103:9092,hadoop104:9092";
    public static void main(String[] args) throws Exception {
//        SimpleDateFormat simpleDateFormat = new SimpleDateFormat("yyyy-MM-dd");
//        String format = simpleDateFormat.format(new Date(1608253937000L));
//        System.out.println("simpleDateFormat = " + format);

        ArrayList<WaterSensor> waterSensors = new ArrayList<>();
        waterSensors.add(new WaterSensor("sensor_1", 1607527992000L, 20));
        waterSensors.add(new WaterSensor("sensor_1", 1607527994000L, 50));
        waterSensors.add(new WaterSensor("sensor_1", 1607527996000L, 50));
        waterSensors.add(new WaterSensor("sensor_2", 1607527993000L, 10));
        waterSensors.add(new WaterSensor("sensor_2", 1607527995000L, 30));

        Properties properties = new Properties();
        properties.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, BOOTSTRAP_SERVERS_CONFIG);
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(2);
        env
                .fromCollection(waterSensors)
                .map(JSON::toJSONString)
//                .addSink(new FlinkKafkaProducer<String>("hadoop102:9092", "topic_sensor", new SimpleStringSchema()));
//                .addSink(new FlinkKafkaProducer<String>("topic_sensor", new SerializationSchema<String>() {
//                    @Override
//                    public byte[] serialize(String s) {
//                        return s.getBytes();
//                    }
//                }, properties, null, FlinkKafkaProducer.Semantic.EXACTLY_ONCE, 5));
        .addSink(new FlinkKafkaProducer<String>("topic_sensor", new KafkaSerializationSchema(){
//            https://ningg.top/apache-kafka-10-best-practice-tips-data-skew-details/
//            https://zhuanlan.zhihu.com/p/365136566?utm_id=0
            @Override
            public ProducerRecord<byte[], byte[]> serialize(Object o, @Nullable Long aLong) {
                return new ProducerRecord("topic_sensor",null,o.toString().getBytes());
            }
        },properties,FlinkKafkaProducer.Semantic.EXACTLY_ONCE));
        env.execute();
    }
}
