package com.ververica.cdc.guass.source.kafka.table;

import com.ververica.cdc.guass.Constants;
import org.apache.flink.api.common.ExecutionConfig;
import org.apache.flink.configuration.ConfigOption;
import org.apache.flink.configuration.ConfigOptions;
import org.apache.flink.configuration.ReadableConfig;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.catalog.ResolvedSchema;
import org.apache.flink.table.connector.source.DynamicTableSource;
import org.apache.flink.table.factories.DynamicTableSourceFactory;
import org.apache.flink.table.factories.FactoryUtil;
import org.apache.kafka.clients.admin.AdminClient;
import org.apache.kafka.clients.admin.DescribeTopicsResult;
import org.apache.kafka.clients.consumer.ConsumerConfig;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import java.util.*;
import java.util.stream.Collectors;

public class GaussKafkaTableFactory implements DynamicTableSourceFactory {


    private final Logger LOG = LoggerFactory.getLogger(this.getClass());


    private static final String IDENTIFIER = Constants.GAUSS_KAFKA_CDC_CONNECTOR_NAME;

    private static final ConfigOption<String> TOPIC = ConfigOptions.key("topic")
            .stringType()
            .noDefaultValue()
            .withDescription("Kafka topic to consume from.");

    private static final ConfigOption<String> BOOTSTRAP_SERVERS = ConfigOptions.key("properties.bootstrap.servers")
            .stringType()
            .noDefaultValue()
            .withDescription("Kafka bootstrap server address.");

    private static final ConfigOption<String> GROUP_ID = ConfigOptions.key("properties.group.id")
            .stringType()
            .noDefaultValue()
            .withDescription("Kafka consumer group id.");

    private static final ConfigOption<String> TABLE_NAME = ConfigOptions.key("table-name")
            .stringType()
            .noDefaultValue()
            .withDescription("The name of the table to monitor.");

    private static final ConfigOption<String> SCAN_STARTUP_MODE = ConfigOptions.key("scan.startup.ode")
            .stringType()
            .noDefaultValue()
            .withDescription("The startup mode for the Kafka consumer.");




    @Override
    public DynamicTableSource createDynamicTableSource(Context context) {

        final FactoryUtil.TableFactoryHelper helper = FactoryUtil.createTableFactoryHelper(this, context);

        final ReadableConfig config = helper.getOptions();
        String topic = config.get(TOPIC);
        String bootstrapServers = config.get(BOOTSTRAP_SERVERS);
        String groupId = config.get(GROUP_ID);
        String tableName = config.get(TABLE_NAME);
        String scanStartupMode = config.get(SCAN_STARTUP_MODE);


        ResolvedSchema physicalSchema = context.getCatalogTable().getResolvedSchema();
        List<PhysicalColumn> physicalColumns = physicalSchema.getColumns().stream()
                .map(column -> new PhysicalColumn(column.getName(), column.getDataType()))
                .collect(Collectors.toList());

        return new GaussKafkaDynamicTableSource(
                physicalColumns,
                topic,
                bootstrapServers,
                groupId,
                tableName,
                scanStartupMode
        );
    }



    @Override
    public String factoryIdentifier() {
        return IDENTIFIER;
    }

    @Override
    public Set<ConfigOption<?>> requiredOptions() {
        Set<ConfigOption<?>> options = new HashSet<>();
        options.add(TOPIC);
        options.add(BOOTSTRAP_SERVERS);
        options.add(GROUP_ID);
        options.add(TABLE_NAME);
        return options;
    }

    @Override
    public Set<ConfigOption<?>> optionalOptions() {
        return new HashSet<>();
    }
}