package com.atguigu.flink.charkoer05;

import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONArray;
import com.alibaba.fastjson.JSONObject;
import com.atguigu.flink.been.WaterSensor;
import org.apache.flink.api.common.functions.FlatMapFunction;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.common.serialization.SimpleStringSchema;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.client.program.StreamContextEnvironment;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaProducer;
import org.apache.flink.streaming.connectors.kafka.KafkaSerializationSchema;
import org.apache.flink.util.Collector;
import org.apache.kafka.clients.producer.ProducerRecord;

import javax.annotation.Nullable;
import java.util.ArrayList;
import java.util.Properties;

import static java.nio.charset.StandardCharsets.UTF_8;

public class FlinkUseKafkaSink {
    public static void main(String[] args) throws Exception {

//        ArrayList<WaterSensor> waterSensors = new ArrayList<>();
//        waterSensors.add(new WaterSensor("sensor_1", 1607527992000L, 20));
//        waterSensors.add(new WaterSensor("sensor_1", 1607527994000L, 50));
//        waterSensors.add(new WaterSensor("sensor_1", 1607527996000L, 50));
//        waterSensors.add(new WaterSensor("sensor_2", 1607527993000L, 10));
//        waterSensors.add(new WaterSensor("sensor_2", 1607527995000L, 30));

        StreamExecutionEnvironment env = StreamContextEnvironment.getExecutionEnvironment();

        KafkaSerializationSchema schema = new KafkaSerializationSchema<Tuple2>() {

            @Override
            public ProducerRecord<byte[], byte[]> serialize(Tuple2 element, @Nullable Long timestamp) {
                return new ProducerRecord<>("s1", (element.f0 + "_" + element.f1).getBytes(UTF_8));
            }
        };

        Properties prop = new Properties();
        prop.setProperty("bootstrap.servers", "hadoop162:9092");

        env.socketTextStream("hadoop162", 9999)
                .flatMap(new FlatMapFunction<String, Tuple2<String, Integer>>() {
                    @Override
                    public void flatMap(String value, Collector<Tuple2<String, Integer>> out) throws Exception {
                        for (String line : value.split(" ")) {
                            out.collect(Tuple2.of(line, 1));
                        }
                    }
                }).keyBy(t -> t.f0)
                .sum(1)
                .addSink(new FlinkKafkaProducer<Tuple2<String, Integer>>(
                        "topicA",
                        schema,
                        prop,
                        FlinkKafkaProducer.Semantic.AT_LEAST_ONCE
                ));

        env.execute();

    }
}
