package com.ververica.cdc.guass.source.hybrid;

import com.ververica.cdc.guass.Constants;
import com.ververica.cdc.guass.sink.jdbc.JdbcConnectionOptions;
import com.ververica.cdc.guass.source.kafka.KafkaConnectionOptions;
import com.ververica.cdc.guass.source.kafka.GaussKafkaSourceFunction;
import com.ververica.cdc.guass.source.kafka.table.ChangeEventToRowDataSourceFunction;
import com.ververica.cdc.guass.source.kafka.table.PhysicalColumn;
import org.apache.flink.api.common.functions.RuntimeContext;
import org.apache.flink.api.connector.source.Source;
import org.apache.flink.table.connector.ChangelogMode;
import org.apache.flink.table.connector.source.DynamicTableSource;
import org.apache.flink.table.connector.source.ScanTableSource;
import org.apache.flink.table.connector.source.SourceFunctionProvider;
import org.apache.flink.table.connector.source.SourceProvider;
import org.apache.flink.table.data.RowData;
import org.apache.flink.types.RowKind;
import java.util.List;

public class HybridTableSource implements ScanTableSource {

    private final boolean enableParallelRead;
    private final JdbcConnectionOptions jdbcOptions;
    private final KafkaConnectionOptions kafkaOptions;
    private final List<PhysicalColumn> physicalColumns;
    private final List<String> primaryKeys;
    private final String tableName;

    public HybridTableSource(
            boolean enableParallelRead,
            JdbcConnectionOptions jdbcOptions,
            KafkaConnectionOptions kafkaOptions,
            List<PhysicalColumn> physicalColumns,
            List<String> primaryKeys,
            String tableName) {
        this.enableParallelRead = enableParallelRead;
        this.jdbcOptions = jdbcOptions;
        this.kafkaOptions = kafkaOptions;
        this.physicalColumns = physicalColumns;
        this.primaryKeys = primaryKeys;
        this.tableName = tableName;
    }

    @Override
    public ChangelogMode getChangelogMode() {
        return ChangelogMode.newBuilder()
                .addContainedKind(RowKind.INSERT)
                .addContainedKind(RowKind.UPDATE_AFTER)
                .addContainedKind(RowKind.DELETE)
                .build();
    }

    @Override
    public ScanRuntimeProvider getScanRuntimeProvider(ScanContext scanContext) {

        // 获取 RuntimeContext
        if (enableParallelRead) {
            return SourceProvider.of(createParallelSource());
        } else {
            return SourceFunctionProvider.of(createIncrceaseSourceFunction(), false);
        }
    }

    private Source<RowData, ?, ?> createParallelSource() {

        return new HybridParallelSource(
                jdbcOptions,
                kafkaOptions,
                physicalColumns,
                primaryKeys,
                tableName
        );
    }

    private ChangeEventToRowDataSourceFunction createIncrceaseSourceFunction() {
        GaussKafkaSourceFunction gaussKafkaSourceFunction = new GaussKafkaSourceFunction(
                kafkaOptions.getTopic(),
                kafkaOptions.getBootstrapServers(),
                kafkaOptions.getGroupId(),
                tableName,
                kafkaOptions.getScanStartupMode()
        );
        return new ChangeEventToRowDataSourceFunction(gaussKafkaSourceFunction, physicalColumns);
    }

    @Override
    public DynamicTableSource copy() {
        return new HybridTableSource(
                enableParallelRead,
                jdbcOptions,
                kafkaOptions,
                physicalColumns,
                primaryKeys,
                tableName
        );
    }

    @Override
    public String asSummaryString() {
        return Constants.GAUSS_CDC_CONNECTOR_NAME;
    }
}