package demo.cdc;

import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.api.common.functions.RichMapFunction;
import org.apache.flink.api.common.serialization.SerializationSchema;
import org.apache.flink.api.common.serialization.SimpleStringSchema;
import org.apache.flink.connector.kafka.sink.KafkaRecordSerializationSchema;
import org.apache.flink.connector.kafka.sink.KafkaSink;
import org.apache.flink.connector.kafka.sink.KafkaSinkBuilder;
import org.apache.flink.connector.kafka.sink.TopicSelector;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;

import com.alibaba.fastjson.JSONObject;
import com.ververica.cdc.connectors.mysql.source.MySqlSource;
import com.ververica.cdc.debezium.JsonDebeziumDeserializationSchema;
import com.we.flink.utils.WeKafkaPartitioner;
import org.apache.kafka.connect.json.JsonConverterConfig;

import java.util.HashMap;

public class MysqlCdcDemo {
    public static void main(String[] args) throws Exception {

        HashMap<String, Object> customConfig = new HashMap<>();
        customConfig.put(JsonConverterConfig.DECIMAL_FORMAT_CONFIG, "numeric");

        MySqlSource<String> mysqlSource =
                MySqlSource.<String>builder()
                        .hostname("172.16.2.153")
                        .port(3306)
                        .scanNewlyAddedTableEnabled(true)
                        .databaseList("haohuan_db_dev")
                        .tableList(
                                "haohuan_db_dev.user_amount_retrial_audit, haohuan_db_dev.tie_card_risk_tag")
                        .username("admin")
                        .password("admin@admin.com")
                        .deserializer(new JsonDebeziumDeserializationSchema(false, customConfig))
                        .build();

        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();

        DataStreamSource<String> cdcInput =
                env.fromSource(mysqlSource, WatermarkStrategy.noWatermarks(), "Haohuan_Db_CDC");

        SingleOutputStreamOperator<String> out =
                cdcInput.map(
                        new RichMapFunction<String, String>() {
                            @Override
                            public String map(String value) {
                                JSONObject jsonObject = JSONObject.parseObject(value);
                                JSONObject after = jsonObject.getJSONObject("after");
                                JSONObject source = jsonObject.getJSONObject("source");
                                String db = source.getString("db");
                                String table = source.getString("table");
                                after.put("db_name", db);
                                after.put("table_name", table);

                                //                Long id = after.getLong("id");
                                //                Long uid = after.getLong("uid");
                                //                String request_id = after.getString("request_id");
                                //                Long ts_ms = jsonObject.getLong("ts_ms");
                                //                JSONObject out = new JSONObject();
                                //                out.put("id", id);
                                //                out.put("uid", uid);
                                //                out.put("request_id", request_id);
                                //                out.put("ts_ms", ts_ms);
                                return after.toString();
                            }
                        });

        KafkaSinkBuilder<String> stringKafkaSinkBuilder =
                KafkaSink.<String>builder()
                        .setBootstrapServers(
                                "172.16.1.41:9092,172.16.1.42:9092,172.16.1.43:9092,172.16.1.44:9092")
                        .setRecordSerializer(
                                KafkaRecordSerializationSchema.builder()
                                        .setTopicSelector(new jkTopicSelector())
                                        .setKeySerializationSchema(
                                                new SerializationSchema<String>() {
                                                    @Override
                                                    public byte[] serialize(String element) {
                                                        JSONObject jsonObject =
                                                                JSONObject.parseObject(element);
                                                        Long uid = jsonObject.getLong("uid");
                                                        return String.valueOf(uid).getBytes();
                                                    }
                                                })
                                        .setValueSerializationSchema(new SimpleStringSchema())
                                        .setPartitioner(new WeKafkaPartitioner())
                                        .build());

        out.sinkTo(stringKafkaSinkBuilder.build());

        //        out.setParallelism(1).print();

        //        cdcInput.setParallelism(1).print();

        env.execute("test");
    }

    public static class jkTopicSelector implements TopicSelector<String> {

        @Override
        public String apply(String input) {
            JSONObject jsonObject = JSONObject.parseObject(input);
            //            String db_name = jsonObject.getString("db_name");
            String table_name = jsonObject.getString("table_name");
            String prefix = "AAA_";
            return prefix + table_name;
        }
    }
}
