package com.ververica.cdc.guass.source.hybrid;

import com.ververica.cdc.guass.Constants;
import com.ververica.cdc.guass.sink.jdbc.JdbcConnectionOptions;
import com.ververica.cdc.guass.source.kafka.KafkaConnectionOptions;
import com.ververica.cdc.guass.source.kafka.table.PhysicalColumn;
import org.apache.flink.configuration.ConfigOption;
import org.apache.flink.configuration.ConfigOptions;
import org.apache.flink.configuration.ReadableConfig;
import org.apache.flink.table.catalog.ResolvedSchema;
import org.apache.flink.table.connector.source.DynamicTableSource;
import org.apache.flink.table.factories.DynamicTableSourceFactory;
import org.apache.flink.table.factories.FactoryUtil;

import java.util.*;
import java.util.stream.Collectors;

public class HybridTableFactory implements DynamicTableSourceFactory {

    private static final String IDENTIFIER = Constants.GAUSS_CDC_CONNECTOR_NAME;

    private static final ConfigOption<String> TOPIC = ConfigOptions.key("topic")
            .stringType()
            .noDefaultValue()
            .withDescription("Kafka topic to consume from.");

    private static final ConfigOption<String> BOOTSTRAP_SERVERS = ConfigOptions.key("properties.bootstrap.servers")
            .stringType()
            .noDefaultValue()
            .withDescription("Kafka bootstrap server address.");

    private static final ConfigOption<String> GROUP_ID = ConfigOptions.key("properties.group.id")
            .stringType()
            .noDefaultValue()
            .withDescription("Kafka consumer group id.");

    private static final ConfigOption<String> TABLE_NAME = ConfigOptions.key("table-name")
            .stringType()
            .noDefaultValue()
            .withDescription("The name of the table to monitor.");

    private static final ConfigOption<String> SCAN_STARTUP_MODE = ConfigOptions.key("scan.startup.mode")
            .stringType()
            .defaultValue("latest")
            .withDescription("The startup mode for the Kafka consumer (earliest, latest, specific-offsets, timestamp).");

    private static final ConfigOption<Boolean> ENABLE_PARALLEL_READ = ConfigOptions.key("enable-parallel-read")
            .booleanType()
            .defaultValue(false)
            .withDescription("Enable parallel read.");

    private static final ConfigOption<String> JDBC_URL = ConfigOptions.key("url")
            .stringType()
            .noDefaultValue()
            .withDescription("JDBC URL for the database connection.");

    private static final ConfigOption<String> JDBC_USER = ConfigOptions.key("username")
            .stringType()
            .noDefaultValue()
            .withDescription("Username for database connection.");

    private static final ConfigOption<String> JDBC_PASSWORD = ConfigOptions.key("password")
            .stringType()
            .noDefaultValue()
            .withDescription("Password for database connection.");


    @Override
    public DynamicTableSource createDynamicTableSource(Context context) {
        final FactoryUtil.TableFactoryHelper helper = FactoryUtil.createTableFactoryHelper(this, context);

        final ReadableConfig config = helper.getOptions();
        boolean enableParallelRead = config.get(ENABLE_PARALLEL_READ);
        String tableName = config.get(TABLE_NAME);


        JdbcConnectionOptions jdbcOptions = new JdbcConnectionOptions.JdbcConnectionOptionsBuilder()
                .withUrl(config.get(JDBC_URL))
                .withUsername(config.get(JDBC_USER))
                .withPassword(config.get(JDBC_PASSWORD))
                .build();

        KafkaConnectionOptions kafkaOptions = new KafkaConnectionOptions(
                config.get(BOOTSTRAP_SERVERS),
                config.get(TOPIC),
                config.get(GROUP_ID),
                config.get(SCAN_STARTUP_MODE)
        );

        ResolvedSchema physicalSchema = context.getCatalogTable().getResolvedSchema();
        List<PhysicalColumn> physicalColumns = physicalSchema.getColumns().stream()
                .map(column -> new PhysicalColumn(column.getName(), column.getDataType()))
                .collect(Collectors.toList());


        ResolvedSchema schema = context.getCatalogTable().getResolvedSchema();
        List<String> primaryKeys = schema.getPrimaryKey()
                .map(pk -> pk.getColumns().stream().collect(Collectors.toList())).orElse(Collections.emptyList());

        return new HybridTableSource(
                enableParallelRead,
                jdbcOptions,
                kafkaOptions,
                physicalColumns,
                primaryKeys,
                tableName
        );
    }

    @Override
    public String factoryIdentifier() {
        return IDENTIFIER;
    }

    @Override
    public Set<ConfigOption<?>> requiredOptions() {
        Set<ConfigOption<?>> options = new HashSet<>();
        options.add(TOPIC);
        options.add(BOOTSTRAP_SERVERS);
        options.add(GROUP_ID);
        options.add(TABLE_NAME);
        options.add(JDBC_URL);
        options.add(JDBC_USER);
        options.add(JDBC_PASSWORD);
        return options;
    }

    @Override
    public Set<ConfigOption<?>> optionalOptions() {
        Set<ConfigOption<?>> options = new HashSet<>();
        options.add(SCAN_STARTUP_MODE);
        options.add(ENABLE_PARALLEL_READ);
        return options;
    }
}