package com.bw.yk02.app;

import com.alibaba.fastjson.JSONObject;
import com.alibaba.ververica.cdc.connectors.mysql.MySQLSource;
import com.alibaba.ververica.cdc.connectors.mysql.table.StartupOptions;
import com.alibaba.ververica.cdc.debezium.DebeziumSourceFunction;
import com.bw.yk02.fnc.HbaseSinkFunction;
import com.bw.yk02.fnc.MySQLSourceTest;
import com.bw.yk02.util.MyKafkaUtil;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.ProcessFunction;
import org.apache.flink.util.Collector;
import org.apache.flink.util.OutputTag;

import java.util.Properties;


public class FlinkDemo01 {
    public static void main(String[] args) throws Exception {
        StreamExecutionEnvironment env=StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);

//        Map config = new HashMap<>();
//        config.put(JsonConverterConfig.DECIMAL_FORMAT_CONFIG, "numeric");
//
//        JsonDebeziumDeserializationSchema jdd = new JsonDebeziumDeserializationSchema(false, config);
//
        Properties properties = new Properties();
        properties.setProperty("decimal.handling.mode","double");

        DebeziumSourceFunction<String> mysqlDS= MySQLSource.<String>builder()
                .hostname("192.168.18.105")
                        .port(3306)
                        .databaseList("gmall2021")
//                        .tableList("gmall2021.order_info")
                        .username("root")
                        .password("root")
                        .deserializer(new MySQLSourceTest())
                    .startupOptions(StartupOptions.initial())
//                .deserializer(new JsonDebeziumDeserializationSchema(false,configs))
                .debeziumProperties(properties)
                .build();

        DataStreamSource<String> mysqlDataSource=env.addSource(mysqlDS);

        mysqlDataSource.print();

        mysqlDataSource.addSink(MyKafkaUtil.getKafkaProducer("topic-db"));
//
//
        OutputTag<String> outputTag=new OutputTag<String>("tag"){};
        SingleOutputStreamOperator<String> dimDS=mysqlDataSource.process(new ProcessFunction<String, String>() {
                                    @Override
                                    public void processElement(String s, ProcessFunction<String, String>.Context context, Collector<String> collector) throws Exception {
                                        JSONObject jsonObject = (JSONObject) JSONObject.parse(s);

                                        String jsonString = "base_category3,sku_info,user_info,base_province,activity_info,cart_info,favor_info";
                                        if (jsonObject != null) {
                                            if (jsonString.contains(jsonObject.getString("table"))) {
                                                context.output(outputTag, s);
                                            }
                                        }
                                    }
                                }
        );


        dimDS.getSideOutput(outputTag).addSink(new HbaseSinkFunction());


        env.execute("FlinkDemo01");
    }
}
