package realtime.app.dwd.db;

import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONException;
import com.alibaba.fastjson.JSONObject;
import com.ververica.cdc.connectors.mysql.source.MySqlSource;
import com.ververica.cdc.connectors.mysql.table.StartupOptions;
import com.ververica.cdc.debezium.JsonDebeziumDeserializationSchema;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.api.common.functions.FlatMapFunction;
import org.apache.flink.api.common.state.MapStateDescriptor;
import org.apache.flink.runtime.state.hashmap.HashMapStateBackend;
import org.apache.flink.runtime.state.hashmap.HashMapStateBackend;
import org.apache.flink.streaming.api.CheckpointingMode;
import org.apache.flink.streaming.api.datastream.*;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.connectors.kafka.KafkaSerializationSchema;
import org.apache.flink.types.MapValue;
import org.apache.flink.util.Collector;
import org.apache.kafka.clients.producer.ProducerRecord;
import realtime.bean.TableProcess;
import realtime.func.DwdTableProcessFunction;
import realtime.util.MyKafkaUtil;

import javax.annotation.Nullable;

public class BaseDBApp {
    public static void main(String[] args) throws Exception {
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);
       /* env.enableCheckpointing(5000, CheckpointingMode.EXACTLY_ONCE);
        env.getCheckpointConfig().setCheckpointTimeout(10000);
        env.setStateBackend(new HashMapStateBackend());*/

        System.setProperty("HADOOP_USER_NAME","root");

        DataStreamSource<String> kafkaStream = env.addSource(MyKafkaUtil.getFlinkKafkaConsumer("topic_db", "base_db_app"));

        SingleOutputStreamOperator<JSONObject> jsonStream = kafkaStream.flatMap(new FlatMapFunction<String, JSONObject>() {
            @Override
            public void flatMap(String value, Collector<JSONObject> out) throws Exception {
                if (value != null) {
                    try {
                        out.collect(JSON.parseObject(value));
                    } catch (JSONException e) {
                        System.out.println("不能解析为JSON格式的脏数据为：" + value);
                    }
                }
            }
        });


        MySqlSource<String> build = MySqlSource.<String>builder()
                .hostname("hadoop102")
                .username("root")
                .password("123456")
                .port(3306)
                .databaseList("gmall_config")
                .tableList("gmall_config.table_process")
                .deserializer(new JsonDebeziumDeserializationSchema())
                .startupOptions(StartupOptions.latest())
                .build();

        DataStreamSource<String> configStream = env.fromSource(build, WatermarkStrategy.noWatermarks(), "mysql");


        MapStateDescriptor<String, TableProcess> mapStateDescriptor = new MapStateDescriptor<>("mapState", String.class, TableProcess.class);
        BroadcastStream<String> broadcast = configStream.broadcast(mapStateDescriptor);

        BroadcastConnectedStream<JSONObject, String> connectedStream = jsonStream.connect(broadcast);

        SingleOutputStreamOperator<JSONObject> process = connectedStream.process(new DwdTableProcessFunction(mapStateDescriptor));

        process.addSink(MyKafkaUtil.getFlinkKafkaProducer(new KafkaSerializationSchema<JSONObject>() {
            @Override
            public ProducerRecord<byte[], byte[]> serialize(JSONObject element, @Nullable Long timestamp) {
                return new ProducerRecord<>(element.getString("sink_table"), element.getString("data").getBytes());
            }
        }));


        env.execute();

    }
}
