package com.changan;

import com.ververica.cdc.connectors.mysql.source.MySqlSource;
import com.ververica.cdc.connectors.mysql.table.StartupOptions;
import com.ververica.cdc.debezium.JsonDebeziumDeserializationSchema;
import com.changan.funcs.StreamSplitProcessor;
import com.changan.model.AppArgs;
import com.changan.parser.CliTool;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.api.common.restartstrategy.RestartStrategies;
import org.apache.flink.api.common.serialization.SimpleStringSchema;
import org.apache.flink.api.common.time.Time;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaProducer;
import org.apache.flink.table.api.EnvironmentSettings;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;
import org.apache.flink.util.OutputTag;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import java.util.Arrays;
import java.util.HashMap;
import java.util.Map;
import java.util.Properties;

public class Application {
    
    private static final String CDC_PREFIX = "cdc_";
    private static final String STREAM_DATA_VIEW_SUFFIX = "_data_stream_view";
    private static final String STREAM_SCHEMA_VIEW_SUFFIX = "_schema_stream_view";
    private static final String OUTPUT_TAG_PREFIX = "output_";
    private static final String OUTPUT_TAG_SUFFIX = "_ops";
    private static final String PIPELINE_KEY = "pipeline.name";

    private static final String STREAMING_JOB_SUFFIX = "_binlog_syncing";
    private final static Logger LOG = LoggerFactory.getLogger(Application.class);


    public static void main(String[] args) throws Exception {
        AppArgs appArgs = CliTool.getJcommander(AppArgs.class, args);

        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.enableCheckpointing(3000L);
        env.setRestartStrategy(RestartStrategies.fixedDelayRestart(3, Time.milliseconds(10 * 1000)));
        StreamTableEnvironment streamTableEnv = getStreamTableEnv(env);
        Configuration configuration = streamTableEnv.getConfig().getConfiguration();
        configuration.setString(PIPELINE_KEY, appArgs.getJobName());

        //MysqlUtil.closeConnection(connection);
        String databaseName = appArgs.getDatabaseName();
        String tableList = Arrays.stream(appArgs.getSourceTableName().split(",")).map(
                x -> databaseName + "." + x
        ).reduce((x, y) -> x + "," + y).orElse("");

        MySqlSource<String> mySqlSource = MySqlSource.<String>builder()
                .hostname(appArgs.getHost())
                .port(Integer.parseInt(appArgs.getPort()))
                .databaseList(appArgs.getDatabaseName()) // set captured database
                .tableList(tableList) // set captured table
                .username(appArgs.getUserName())
                .password(appArgs.getPassword())
                .serverId(appArgs.getServerId())
                //.includeSchemaChanges(true)
                .debeziumProperties(getDebeziumProperties())
                .deserializer(new JsonDebeziumDeserializationSchema()) // converts SourceRecord to JSON String
                .startupOptions(getStartUpMode(appArgs))
                .serverTimeZone("Asia/Shanghai")
                .build();

        Map<String, OutputTag<String>> outputTags = new HashMap<>();

        String[] toSyncTables = appArgs.getSourceTableName().split(",");
        for (String entity : toSyncTables) {
            String absolutTableName = appArgs.getDatabaseName() + "_dot_" + entity;
            String outputTagName = OUTPUT_TAG_PREFIX + absolutTableName + OUTPUT_TAG_SUFFIX;
            OutputTag<String> outTag = new OutputTag<String>(outputTagName) {
            };
            LOG.info("Add a outputTag, key: {}, value: {}", absolutTableName, outTag);
            outputTags.put(outputTagName, outTag);
        }

        SingleOutputStreamOperator<String> processedStream = env
                .fromSource(mySqlSource, WatermarkStrategy.noWatermarks(), "MySQL Source")
                .setParallelism(1)
                //分成多条流, schema变更为main stream, 表的数据的changelogStream为侧边流
                .process(new StreamSplitProcessor(outputTags)).name("SplitStreamByTableNameProcessor")
                .setParallelism(1);
        int i = 0;
        for (String toSyncTable : toSyncTables) {
            String absolutTableName = appArgs.getDatabaseName() + "_dot_" + toSyncTable;
            String outputTagName = OUTPUT_TAG_PREFIX + absolutTableName + OUTPUT_TAG_SUFFIX;
            OutputTag<String> outputTag = outputTags.get(outputTagName);

            processedStream.getSideOutput(outputTag)
                    .addSink(getFlinkKafkaProducer(appArgs, i)).name(absolutTableName).setParallelism(1);
            i++;
        }
        env.execute(appArgs.getJobName() + STREAMING_JOB_SUFFIX);
    }

    public static FlinkKafkaProducer<String> getFlinkKafkaProducer(AppArgs startArguments, Integer i) {

        return new FlinkKafkaProducer<>(
                startArguments.getKafkaAddress(),
                startArguments.getKafkaTopic().split(",")[i],
                new SimpleStringSchema()
        );
    }

    public static StreamTableEnvironment getStreamTableEnv(StreamExecutionEnvironment env) {
        EnvironmentSettings streamSetting = EnvironmentSettings.newInstance().useBlinkPlanner().inStreamingMode().build();
        return StreamTableEnvironment.create(env, streamSetting);
    }
    public static Properties getDebeziumProperties() {
        Properties properties = new Properties();
        properties.setProperty("converters", "dateConverters");
        //根据类在那个包下面修改
        properties.setProperty("dateConverters.type", "com.changan.schema.MySqlDateTimeConverter");
        properties.setProperty("dateConverters.database.type", "mysql");
        properties.setProperty("dateConverters.format.date", "yyyy-MM-dd");
        properties.setProperty("dateConverters.format.time", "HH:mm:ss");
        properties.setProperty("dateConverters.format.datetime", "yyyy-MM-dd HH:mm:ss");
        properties.setProperty("dateConverters.format.timestamp", "yyyy-MM-dd HH:mm:ss");
        properties.setProperty("dateConverters.format.timestamp.zone", "UTC+8");
        properties.setProperty("debezium.snapshot.locking.mode", "none"); //全局读写锁，可能会影响在线业务，跳过锁设置
        properties.setProperty("snapshot.mode", "when_needed");
        properties.setProperty("bigint.unsigned.handling.mode", "long");
        properties.setProperty("decimal.handling.mode", "string");

        return properties;
    }

    private static StartupOptions getStartUpMode(AppArgs appArgs) {
        if (appArgs.getStartUpMode() == null) {
            return StartupOptions.initial();
        }
        switch (appArgs.getStartUpMode()) {
            case "initial":
                return StartupOptions.initial();
            case "earliest":
                return StartupOptions.earliest();
            case "latest":
                return StartupOptions.latest();
            default:
                LOG.info("You didn't set any kind of startup mode options, use default: initial.");
                return StartupOptions.initial();
        }
    }
}
