package com.atlocal.app;

import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import com.atlocal.base.impl.FlinkBaseApiImpl;
import com.atlocal.fun.FilterJsonFunction;
import com.atlocal.fun.MyDebeziumDeserializationSchema;
import com.ververica.cdc.connectors.base.options.StartupOptions;
import com.ververica.cdc.connectors.oracle.OracleSource;
import org.apache.flink.api.java.utils.ParameterTool;
import org.apache.flink.connector.base.DeliveryGuarantee;
import org.apache.flink.connector.kafka.sink.KafkaRecordSerializationSchema;
import org.apache.flink.connector.kafka.sink.KafkaSink;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.functions.source.SourceFunction;
import org.apache.kafka.clients.producer.ProducerRecord;

import javax.annotation.Nullable;
import java.nio.charset.StandardCharsets;
import java.util.Properties;


/**
 * @ClassName OracleToClickhouseByCdcApi
 * @Description TOO
 * @Author kongjiangjiang
 * @Date 2023-04-27 23:02
 * @Version 1.0
 **/
@SuppressWarnings("all")
public class OracleToClickhouseByCdcApi extends FlinkBaseApiImpl {

    // --tableInfo  zssys.web_ply_ci&c_pk_id@c_app_no#zssys.web_ply_base&c_app_no   参数案例

    /*
    --tableInfo  zssys.web_ply_ci&c_pk_id@c_app_no#zssys.web_ply_base&c_app_no
    --hostname 10.16.55.38
    --port 1521
    --schema zssys
    --database V7RAC
    --username flinkuser
    --password Flink_0426
    --kafkaserver 10.30.64.202:9092,10.30.64.203:9092,10.30.64.204:9092
     */
    public static void main(String[] args) throws Exception {
        ParameterTool parameter = ParameterTool.fromArgs(args);
        String tableInfo = parameter.get("tableInfo").trim();
        String hostname = parameter.get("hostname").trim();
        Integer port = parameter.getInt("port");
        String schema = parameter.get("schema").trim();
        String database = parameter.get("database").trim();
        String username = parameter.get("username").trim();
        String password = parameter.get("password").trim();
        String kafkaserver = parameter.get("kafkaserver").trim();


        String[] splits = tableInfo.split("#");
        StringBuilder stringBuilder = new StringBuilder();
        for (int i = 0; i < splits.length; i++) {
            //获取表名
            String[] splitTableName = splits[i].trim().split("&");
            if (i < splits.length - 1) {
                stringBuilder.append(splitTableName[0]).append(",");
            } else {
                stringBuilder.append(splitTableName[0]);
            }
        }


        System.setProperty("javax.xml.parsers.DocumentBuilderFactory", "com.sun.org.apache.xerces.internal.jaxp.DocumentBuilderFactoryImpl");
        Properties properties = new Properties();
        properties.put("database.tablename.case.insensitive", "false");//11g数据库适配
        properties.setProperty("flink.xml.input.streaming.xinclude-aware", "false");
        properties.setProperty("database.connection.adapter", "logminer");
        // 要同步快，这个配置必须加，不然非常慢
        properties.setProperty("log.mining.strategy", "online_catalog");
        properties.setProperty("log.mining.continuous.mine", "true");
        SourceFunction<String> sourceFunction = OracleSource.<String>builder()
                .hostname(hostname)
                .port(port)
                .database(database) // monitor XE database
                .schemaList(schema) // monitor inventory schema
                .tableList(stringBuilder.toString()) // monitor products table
                .username(username)
                .password(password)
//                .startupOptions(StartupOptions.initial())
                .startupOptions(StartupOptions.initial())
//                .deserializer(new JsonDebeziumDeserializationSchema()) // converts SourceRecord to JSON String
                .deserializer(new MyDebeziumDeserializationSchema(args)) // 自定义解析器
                .debeziumProperties(properties)
                .build();


        DataStreamSource<String> stringDataStreamSource = env.addSource(sourceFunction);
        //判断数据是否符号标志json格式
        SingleOutputStreamOperator<String> filterStream = stringDataStreamSource.filter(new FilterJsonFunction());

        KafkaSink<String> kafkaSink = KafkaSink
                .<String>builder()
                .setBootstrapServers(kafkaserver)
                .setRecordSerializer(new KafkaRecordSerializationSchema<String>() {
                    @Nullable
                    @Override
                    public ProducerRecord<byte[], byte[]> serialize(String element, KafkaSinkContext context, Long timestamp) {
                        JSONObject json = JSON.parseObject(element);
                        String topic = "paimon_" + json.getString("tableName");
                        return new ProducerRecord<>(topic, element.getBytes(StandardCharsets.UTF_8));
                    }
                }).setDeliveryGuarantee(DeliveryGuarantee.AT_LEAST_ONCE)
                .build();
        //数据写到kafka
        filterStream.sinkTo(kafkaSink);
        env.execute(String.valueOf(OracleToClickhouseByCdcApi.class));
    }

}
