package com.atbeijin.cdc;

import com.alibaba.fastjson.JSONObject;
import com.alibaba.ververica.cdc.connectors.mysql.MySQLSource;
import com.alibaba.ververica.cdc.connectors.mysql.table.StartupOptions;
import com.alibaba.ververica.cdc.debezium.DebeziumDeserializationSchema;
import com.alibaba.ververica.cdc.debezium.DebeziumSourceFunction;
import io.debezium.data.Envelope;
import org.apache.flink.api.common.typeinfo.BasicTypeInfo;
import org.apache.flink.api.common.typeinfo.TypeInformation;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.util.Collector;
import org.apache.kafka.connect.data.Field;
import org.apache.kafka.connect.data.Struct;
import org.apache.kafka.connect.source.SourceRecord;

import java.util.List;

/**
 * flinkcdc自定义反序列化
 */
public class FlinkCDC03_CustomSchema {
    public static void main(String[] args) throws Exception {
        //TODO 1.准备流处理环境
        //StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        //这里起了一个webUI界面
        StreamExecutionEnvironment env = StreamExecutionEnvironment.createLocalEnvironmentWithWebUI(new Configuration());
        env.setParallelism(1);

        DebeziumSourceFunction<String> sourceFunction =
                MySQLSource.<String>builder()
                        .hostname("hadoop202")
                        .port(3306)
                        .username("root")
                        .password("123456")
                        .databaseList("gmall1116_realtime")
                        .tableList("gmall1116_realtime.test")
                        .startupOptions(StartupOptions.initial())
                        .deserializer(new MySchema())
                        .build();

        DataStreamSource<String> mySqlDS = env.addSource(sourceFunction);

        mySqlDS.print(">>>>");
        env.execute();
    }

    public static class MySchema implements DebeziumDeserializationSchema<String>{
        //flinkCDC返回的数据:  SourceRecord{sourcePartition={server=mysql_binlog_source}, sourceOffset={ts_sec=1620895948, file=mysql-bin.000016, pos=5346, row=1, server_id=1, event=2}} ConnectRecord{topic='mysql_binlog_source.gmall1116_realtime.test', kafkaPartition=null, key=Struct{id=55}, keySchema=Schema{mysql_binlog_source.gmall1116_realtime.test.Key:STRUCT}, value=Struct{before=Struct{id=55,name=qqrfff,age=566},after=Struct{id=55,name=qqrfff,age=5660},source=Struct{version=1.4.1.Final,connector=mysql,name=mysql_binlog_source,ts_ms=1620895948000,db=gmall1116_realtime,table=test,server_id=1,file=mysql-bin.000016,pos=5497,row=0,thread=7},op=u,ts_ms=1620895978920}, valueSchema=Schema{mysql_binlog_source.gmall1116_realtime.test.Envelope:STRUCT}, timestamp=null, headers=ConnectHeaders(headers=)}
        @Override
        public void deserialize(SourceRecord sourceRecord, Collector<String> collector) throws Exception {
            //根据flinkcdc返回的数据解析
            //获取value
            Struct valueStruct = (Struct)sourceRecord.value();
            //获取source
            Struct sourceStruct = valueStruct.getStruct("source");
            //获取数据库名称
            String dbName = sourceStruct.getString("db");
            //获取表名称
            String tableName = sourceStruct.getString("table");
            //获取操作类型
            Envelope.Operation operation = Envelope.operationFor(sourceRecord);
            //将create改为insert
            String type = operation.toString().toLowerCase();
            if ("create".equals(type)) {
                type = "insert";
            }
            //获取afterStruct中所有属性  并封装为dataJson
            JSONObject dataJsonObj = new JSONObject();
            //获取afterStruct after=Struct{id=1,name=zs,age=18}
            Struct afterStruct = valueStruct.getStruct("after");
            if(afterStruct!=null){
                //字段集合
                List<Field> fieldList = afterStruct.schema().fields();
                for (Field field : fieldList) {
                    dataJsonObj.put(field.name(), afterStruct.get(field));
                }
            }

            //将库名、表名以及操作类型和具体数据 封装为一个大的json
            JSONObject resJsonObj = new JSONObject();
            resJsonObj.put("database", dbName);
            resJsonObj.put("table", tableName);
            resJsonObj.put("type", type);
            resJsonObj.put("data", dataJsonObj);

            collector.collect(resJsonObj.toJSONString());
        }

        @Override
        public TypeInformation<String> getProducedType() {
            return BasicTypeInfo.STRING_TYPE_INFO;
        }
    }
}
