package com.atguigu.edu.realtime.app.dwd.db;

import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import com.atguigu.edu.realtime.app.func.BroadcastFunction;
import com.atguigu.edu.realtime.beans.TableProcess;
import com.atguigu.edu.realtime.utils.MyKafkaUtil;
import com.ververica.cdc.connectors.mysql.source.MySqlSource;
import com.ververica.cdc.connectors.mysql.table.StartupOptions;
import com.ververica.cdc.debezium.JsonDebeziumDeserializationSchema;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.api.common.functions.FlatMapFunction;
import org.apache.flink.api.common.state.MapStateDescriptor;
import org.apache.flink.streaming.api.datastream.*;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer;
import org.apache.flink.streaming.connectors.kafka.KafkaSerializationSchema;
import org.apache.flink.util.Collector;
import org.apache.kafka.clients.producer.ProducerRecord;

import javax.annotation.Nullable;

/**
 * @ClassName BaseDbApp
 * @Description TODO
 * @Author$ 邢家俊
 * @Date 2023-5-5 18:27
 * @Version 1.0
 **/
public class BaseDbApp {
    public static void main(String[] args) throws Exception {
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(4);
        FlinkKafkaConsumer<String> kafkaConsumer = MyKafkaUtil.getKafkaConsumer("topic_db", "BaseDbApp_group");
        DataStreamSource<String> streamSourceDS = env.addSource(kafkaConsumer);
        SingleOutputStreamOperator<JSONObject> filterDS = streamSourceDS.flatMap(new FlatMapFunction<String, JSONObject>() {
            @Override
            public void flatMap(String value, Collector<JSONObject> out) throws Exception {

                try {
                    JSONObject jsonObj = JSON.parseObject(value);
                    String  type = jsonObj.getString("type");
                    if (!"bootstrap-insert".equals(type) && !"bootstrap-start".equals(type) && !"bootstrap-complete".equals(type)) {
                        out.collect(jsonObj);
                    }
                } catch (Exception e) {
                    throw new RuntimeException(e);
                }}
    });
        //{"database":"edu","xid":74996,"data":{"coupon_reduce":0.00,"create_time":"2023-05-06 10:11:16","session_id":"8d73a5a2-f5c8-490f-851e-a548d03c94d4","expire_time":"2023-05-06 10:26:16","order_status":"1002","update_time":"2023-05-06 10:11:17","origin_amount":600.00,"out_trade_no":"515199566275462","user_id":4,"province_id":27,"trade_body":"Java8新特性等3件商品","final_amount":600.00,"id":46767},"old":{"order_status":"1001"}
        // ,"commit":true,"type":"update","table":"order_info","ts":1683339076}
       // filterDS.print("111111111111");
        MySqlSource<String> sourceFunction = MySqlSource.<String>builder()
                .hostname("hadoop101")
                .port(3306)
                .databaseList("edu_config") // set captured database
                .tableList("edu_config.table_process") // set captured table
                .username("root")
                .password("000000")
                .startupOptions(StartupOptions.initial())
                .deserializer(new JsonDebeziumDeserializationSchema()) // converts SourceRecord to String
                .build();
        DataStreamSource<String> streamSource = env.fromSource(sourceFunction, WatermarkStrategy.noWatermarks(), "Mysql_cdc_source");
        streamSource.print("222222222");
        MapStateDescriptor<String, TableProcess> mapStateDescriptorDS = new MapStateDescriptor<>("mysql_ds", String.class, TableProcess.class);
        BroadcastStream<String> broadcastDS = streamSource.broadcast(mapStateDescriptorDS);

        BroadcastConnectedStream<JSONObject, String> connectDS =  filterDS.connect(broadcastDS);
        SingleOutputStreamOperator<JSONObject> connectProcessDS = connectDS.process(new BroadcastFunction(mapStateDescriptorDS));

        DataStreamSink<JSONObject> jsonObjectDataStreamSinkDS = connectProcessDS.addSink(MyKafkaUtil.getKafkaProducerBySchema(new KafkaSerializationSchema<JSONObject>() {
            @Override
            public ProducerRecord<byte[], byte[]> serialize(JSONObject element, @Nullable Long timestamp) {
                String sinkTable = element.getString("sink_table");
                element.remove("sink_table");
                return new ProducerRecord<>(sinkTable, element.toJSONString().getBytes());
            }
        }));
        env.execute();
    }
}