package com.hkbigdata.source;

import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import com.hkbigdata.bean.Sensor;
import com.hkbigdata.bean.WaterSensor;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.api.common.functions.IterationRuntimeContext;
import org.apache.flink.api.common.functions.JoinFunction;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.common.functions.RuntimeContext;
import org.apache.flink.api.common.serialization.SimpleStringSchema;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.connector.kafka.source.KafkaSource;
import org.apache.flink.connector.kafka.source.enumerator.initializer.OffsetsInitializer;
import org.apache.flink.connector.kafka.source.reader.deserializer.KafkaRecordDeserializer;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.sink.RichSinkFunction;
import org.apache.flink.streaming.api.windowing.assigners.TumblingProcessingTimeWindows;
import org.apache.flink.streaming.api.windowing.time.Time;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer;
import org.apache.kafka.clients.consumer.KafkaConsumer;
import org.apache.kafka.common.serialization.StringDeserializer;

import javax.xml.crypto.Data;
import java.util.Properties;

/**
 * @author liuanbo
 * @creat 2024-04-01-16:09
 * @see 2194550857@qq.com
 */
public class Flink03_Source_Kafka {
    public static void main(String[] args) throws Exception {
        //1.环境准备
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);

        //2.配置kafka
        Properties properties = new Properties();
        properties.setProperty("bootstrap.servers", "hadoop102:9092,hadoop103:9092,hadoop104:9092");
        properties.setProperty("group.id", "kafka_flink");
        properties.setProperty("auto.offset.reset", "latest");//earilest

        //3.读取数据
        DataStreamSource<String> stringDataStreamSource = env.addSource(new FlinkKafkaConsumer<>("sensor", new SimpleStringSchema(), properties));

        stringDataStreamSource.print();


        SingleOutputStreamOperator<Sensor> map1 = stringDataStreamSource.map(new MapFunction<String, Sensor>() {
            @Override
            public Sensor map(String value) throws Exception {
                //{"database":"test","data":{"id":"ws_001","vc":42,"ts":1715217672},"type":"insert","before-data":{},"table":"sensor"}
                JSONObject jsonObject = JSON.parseObject(value);
                String table = jsonObject.getString("table");
                String type = jsonObject.getString("insert");
                JSONObject data = jsonObject.getJSONObject("data");
                Sensor sensor = new Sensor();
                if (table.equals("sensor") && type.equals("insert")) {
                    Integer id = data.getInteger("id");
                    Double amount = data.getDouble("amount");
                    sensor.setId(id);
                    sensor.setAmount(amount);

                }


                return sensor;
            }
        });


        SingleOutputStreamOperator<Sensor> map2 = stringDataStreamSource.map(new MapFunction<String, Sensor>() {
            @Override
            public Sensor map(String value) throws Exception {
                //{"database":"test","data":{"id":"ws_001","vc":42,"ts":1715217672},"type":"insert","before-data":{},"table":"sensor"}
                JSONObject jsonObject = JSON.parseObject(value);
                String table = jsonObject.getString("table");
                String type = jsonObject.getString("insert");
                JSONObject data = jsonObject.getJSONObject("data");
                Sensor sensor = new Sensor();
                if (table.equals("sensor") && type.equals("insert")) {
                    Integer id = data.getInteger("id");
                    Double amount = data.getDouble("amount");
                    sensor.setId(id);
                    sensor.setAmount(amount);

                }


                return sensor;
            }
        });


        DataStream<String> apply = map1.keyBy(Sensor::getId)
                .join(map2.keyBy(Sensor::getId))
                .where(Sensor::getId)
                .equalTo(Sensor::getId)
                .window(TumblingProcessingTimeWindows.of(Time.seconds(3)))
                .apply(new JoinFunction<Sensor, Sensor, String>() {
                    @Override
                    public String join(Sensor first, Sensor second) throws Exception {
                        return null;
                    }
                });

        apply.addSink(new RichSinkFunction<String>() {
            @Override
            public void setRuntimeContext(RuntimeContext t) {
                super.setRuntimeContext(t);
            }

            @Override
            public RuntimeContext getRuntimeContext() {
                return super.getRuntimeContext();
            }

            @Override
            public IterationRuntimeContext getIterationRuntimeContext() {
                return super.getIterationRuntimeContext();
            }

            @Override
            public void open(Configuration parameters) throws Exception {
                super.open(parameters);
            }

            @Override
            public void close() throws Exception {
                super.close();
            }

            @Override
            public void invoke(String value) throws Exception {

            }

            @Override
            public void invoke(String value, Context context) throws Exception {

            }
        });

//        JSON.parseObject(stringDataStreamSource,WaterSensor.class)
//        JSONObject.parseObject(stringDataStreamSource);
        env.execute();


    }
}
