package com.zyx.flinkdemo.cdc.stream;

import com.alibaba.fastjson.JSONObject;
import com.alibaba.ververica.cdc.connectors.mysql.MySQLSource;
import com.alibaba.ververica.cdc.connectors.mysql.table.StartupOptions;
import com.alibaba.ververica.cdc.debezium.DebeziumDeserializationSchema;
import com.alibaba.ververica.cdc.debezium.DebeziumSourceFunction;
import com.zyx.flinkdemo.pojo.User;
import io.debezium.data.Envelope;
import org.apache.flink.api.common.functions.FlatMapFunction;
import org.apache.flink.api.common.typeinfo.TypeInformation;
import org.apache.flink.api.common.typeinfo.Types;
import org.apache.flink.runtime.state.hashmap.HashMapStateBackend;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.util.Collector;
import org.apache.kafka.connect.data.Field;
import org.apache.kafka.connect.data.Struct;
import org.apache.kafka.connect.source.SourceRecord;

/**
 * @author Yaxi.Zhang
 * @since 2021/6/24 11:01
 * desc: 对cdc获取的结果进行反序列化操作
 */
public class StreamCdcDiserializeDemo {
    public static void main(String[] args) throws Exception {
        //1.获取执行环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);

        //开启CK
        env.enableCheckpointing(5000L);
        env.getCheckpointConfig().setCheckpointTimeout(5000L);
        env.setStateBackend(new HashMapStateBackend());
        env.getCheckpointConfig().setCheckpointStorage("hdfs://bigdata021:8020/dev/cdctest");

        //2.使用CDC的方式读取MySQL变化数据
        DebeziumSourceFunction<String> sourceFunction = MySQLSource
                .<String>builder()
                .hostname("localhost")
                .port(3306)
                .username("root")
                .password("123321")
                .serverTimeZone("Asia/Shanghai")
                .databaseList("db_test")
                .tableList("db_test.myuser")
                .startupOptions(StartupOptions.initial())
                .deserializer(new MyDeserializationSchema())
                .build();
        DataStreamSource<String> streamSource = env.addSource(sourceFunction);

        // 3.获取更新或添加后的值
        SingleOutputStreamOperator<String> after = streamSource.flatMap(new FlatMapFunction<String, String>() {
            @Override
            public void flatMap(String cdcJsonStr, Collector<String> out) throws Exception {
                JSONObject jsonObj = JSONObject.parseObject(cdcJsonStr);
                String after = jsonObj.getString("after");
                if (after != null) {
                    User user = JSONObject.parseObject(after, User.class);
                    out.collect(user.toString());
                }
            }
        });

        // 4.打印
        streamSource.print("stream >>> ");
        after.print("after >>> ");

        // 5.启动
        env.execute("cdc_diserialize");
    }

    public static class MyDeserializationSchema implements DebeziumDeserializationSchema<String> {

        @Override
        public void deserialize(SourceRecord sourceRecord, Collector<String> collector) throws Exception {
            // 创建结果JSON
            JSONObject result = new JSONObject();
            // 获取record的值
            Struct value = (Struct)sourceRecord.value();

            // 获取数据库及表名
            // 方式一: 通过value获取
            Struct source = value.getStruct("source");
            String dbByValue = source.getString("db");
            String tblByValue = source.getString("table");

            result.put("dbByValue", dbByValue);
            result.put("tblByValue", tblByValue);

            // 方式二: 通过topic获取
            String topic = sourceRecord.topic();
            String[] split = topic.split("\\.");

            String dbByTopic = split[1];
            String tblByTopic = split[2];

            result.put("dbByTopic", dbByTopic);
            result.put("tblByTopic", tblByTopic);


            // 获取计算结果
            Struct after = value.getStruct("after");
            Struct before = value.getStruct("before");
            if (after != null) {
                result.put("after", getJsonStr(after));
            }
            if (before != null) {
                result.put("before", getJsonStr(before));
            }

            // 获取操作类型
            // 方式一: 通过Envelope类的operationFor方法获取
            String opByEnvelope = Envelope.operationFor(sourceRecord).code();
            result.put("opByEnvelope", opByEnvelope);

            // 方式二: 通过value获取
            String opByValue = value.getString("op");
            result.put("opByValue", opByValue);

            // 获取主键字段
            Struct pk = (Struct)sourceRecord.key();
            result.put("pk", getJsonStr(pk));

            collector.collect(result.toJSONString());
        }

        @Override
        public TypeInformation<String> getProducedType() {
            return Types.STRING;
        }

        private String getJsonStr(Struct struct) {
            JSONObject jsonObj = new JSONObject();
            for (Field field : struct.schema().fields()) {
                jsonObj.put(field.name(), struct.get(field));
            }
            return jsonObj.toJSONString();
        }
    }
}
