package flink;

import bean.TableProcess;
import com.alibaba.fastjson.JSONObject;
import com.alibaba.ververica.cdc.connectors.mysql.MySQLSource;
import com.alibaba.ververica.cdc.connectors.mysql.table.StartupOptions;
import com.alibaba.ververica.cdc.debezium.DebeziumSourceFunction;
import function.HbaseSinkFunction;
import function.KafkaSinkFunction;
import function.TableProcessFunction;
import org.apache.flink.api.common.state.MapStateDescriptor;
import org.apache.flink.streaming.api.datastream.*;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.util.OutputTag;

public class FlinkCDCMetaData {
    public static void main(String[] args) throws Exception {
        //创建执行环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);

        //2.通过FlinkCDC构建SourceFunction并读取数据  配置表
        DebeziumSourceFunction<String> MetasourceFunction = MySQLSource.<String>builder()
                .hostname("192.168.1.10")
                .port(3306)
                .username("root")
                .password("123456")
                .databaseList("flinkcdc")
                .tableList("flinkcdc.metadata")   //如果不添加该参数,则消费指定数据库中所有表的数据.如果指定,指定方式为db.table
                .deserializer(new CustomerDeserialization())
                .startupOptions(StartupOptions.initial())
                .build();
        DataStreamSource<String> MetaStreamSource = env.addSource(MetasourceFunction);

        //读取所有数据表
        DebeziumSourceFunction<String> DatasourceFunction = MySQLSource.<String>builder()
                .hostname("192.168.1.10")
                .port(3306)
                .username("root")
                .password("123456")
                .databaseList("flink")
                .deserializer(new CustomerDeserialization())
                .startupOptions(StartupOptions.latest())
                .build();
        DataStreamSource<String> DataStreamSource = env.addSource(DatasourceFunction);

        //广播配置表数据
        MapStateDescriptor<String, TableProcess> mapStateDescriptor = new MapStateDescriptor<>("map-state", String.class, TableProcess.class);
        BroadcastStream<String> broadcastStream = MetaStreamSource.broadcast(mapStateDescriptor);

        //连接数据流和广播流并进行分流操作
        OutputTag<JSONObject> hbaseTag = new OutputTag<JSONObject>("hbase-tag") {};
        BroadcastConnectedStream<String, String> connectedStream = DataStreamSource.connect(broadcastStream);

        SingleOutputStreamOperator<JSONObject> kafka = connectedStream.process(new TableProcessFunction(hbaseTag, mapStateDescriptor));
        //提取kakfa和hbase流数据
        DataStream hbase = kafka.getSideOutput(hbaseTag);


        //3.打印数据
        kafka.print("kafka>>>>");
        hbase.print("hbase>>>>");

        //写入kafka和hbase
        kafka.addSink(KafkaSinkFunction.getKafkaProducer());
        hbase.addSink(new HbaseSinkFunction());

        //4.启动任务
        env.execute("FlinkCDCWithCustomerDeserialization");

    }

}
