package com.gitee.melin.bee.core.jdbc.dialect.impl;

import static com.gitee.melin.bee.core.jdbc.enums.SaslMchanism.AWS_MSK_IAM;
import static com.gitee.melin.bee.core.jdbc.enums.SaslMchanism.GSSAPI;
import static com.gitee.melin.bee.core.jdbc.enums.SaslMchanism.PLAIN;
import static com.gitee.melin.bee.core.jdbc.enums.SaslMchanism.SCRAM_SHA_256;
import static com.gitee.melin.bee.core.jdbc.enums.SaslMchanism.SCRAM_SHA_512;

import com.gitee.melin.bee.core.jdbc.QueryResult;
import com.gitee.melin.bee.core.jdbc.dialect.JdbcDialect;
import com.gitee.melin.bee.core.jdbc.enums.DataSourceType;
import com.gitee.melin.bee.core.jdbc.relational.*;
import com.google.common.collect.Lists;
import java.sql.Statement;
import java.util.*;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.TimeUnit;
import java.util.stream.Collectors;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang3.exception.ExceptionUtils;
import org.apache.kafka.clients.admin.AdminClient;
import org.apache.kafka.clients.admin.Config;
import org.apache.kafka.common.Node;
import org.apache.kafka.common.config.ConfigResource;
import org.apache.kafka.common.errors.ClusterAuthorizationException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

public class KafkaDialect implements JdbcDialect {

    private static final Logger LOG = LoggerFactory.getLogger(KafkaDialect.class);

    protected ConnectionInfo connectionInfo;

    private AdminClient admin;

    public KafkaDialect(ConnectionInfo connectionInfo) {
        if (connectionInfo == null) {
            return;
        }

        Properties properties = buildProperties(connectionInfo);
        admin = AdminClient.create(properties);
    }

    @Override
    public Object getClient() {
        return this.admin;
    }

    @Override
    public DataSourceType getDataSourceType() {
        return DataSourceType.ELASTICSEARCH;
    }

    @Override
    public MetaDataSource testConnection(ConnectionInfo connectionInfo) {
        Properties properties = buildProperties(connectionInfo);
        try (AdminClient admin = AdminClient.create(properties)) {
            Collection<Node> nodes = admin.describeCluster().nodes().get(100, TimeUnit.SECONDS);
            if (nodes.isEmpty()) {
                throw new RuntimeException("No brokers available to obtain default settings");
            }

            String nodeId = nodes.iterator().next().idString();
            Set<ConfigResource> resources =
                    Collections.singleton(new ConfigResource(ConfigResource.Type.BROKER, nodeId));
            Map<ConfigResource, Config> configs =
                    admin.describeConfigs(resources).all().get();
            Config config = configs.values().iterator().next();
            MetaDataSource metaDataSource = new MetaDataSource();
            metaDataSource.setDatabaseProductName("Kafka");
            metaDataSource.setDatabaseProductVersion(
                    config.get("inter.broker.protocol.version").value());
            return metaDataSource;
        } catch (Exception e) {
            Throwable root = ExceptionUtils.getRootCause(e);
            if (root != null && root instanceof ClusterAuthorizationException) {
                LOG.warn("test kafka connection failure: " + root.getMessage());
                MetaDataSource metaDataSource = new MetaDataSource();
                metaDataSource.setDatabaseProductName("Kafka");
                return metaDataSource;
            } else {
                throw new RuntimeException("test kafka connection failed: " + e.getMessage(), e);
            }
        }
    }

    private Properties buildProperties(ConnectionInfo connectionInfo) {
        Properties properties = new Properties();
        properties.put("request.timeout.ms", "30000");
        properties.put("sasl.login.read.timeout.ms", "3000");

        if (connectionInfo.getProperties() != null) {
            connectionInfo.getProperties().forEach(properties::put);
        }

        properties.put("bootstrap.servers", connectionInfo.getEndpoint());
        String protocol = connectionInfo.getProtocol().toUpperCase();

        if (StringUtils.equalsIgnoreCase(protocol, "SASL_PLAINTEXT") && connectionInfo.getSaslMchanism() != null) {
            properties.put("security.protocol", protocol);
            buildSaslProperties(properties, connectionInfo);
        } else if (StringUtils.equalsIgnoreCase(protocol, "SSL")) {
            properties.put("security.protocol", protocol);
            buildSslProperties(properties, connectionInfo);
        } else if (StringUtils.equalsIgnoreCase(protocol, "SASL_SSL")) {
            properties.put("security.protocol", protocol);
            buildSaslProperties(properties, connectionInfo);
            buildSslProperties(properties, connectionInfo);
        } else if (!StringUtils.equalsIgnoreCase(protocol, "None")) {
            throw new IllegalArgumentException(
                    "not support protocol: " + protocol + ", SaslMchanism: " + connectionInfo.getSaslMchanism());
        }
        return properties;
    }

    private void buildSaslProperties(Properties properties, ConnectionInfo connectionInfo) {
        properties.put("sasl.mechanism", connectionInfo.getSaslMchanism().toString());

        if (StringUtils.isNotBlank(connectionInfo.getJaasConfig())) {
            properties.put("sasl.jaas.config", connectionInfo.getJaasConfig());
        } else {

            if (AWS_MSK_IAM == connectionInfo.getSaslMchanism()) {
                properties.put("sasl.mechanism", "AWS_MSK_IAM");
                properties.put("sasl.jaas.config", "software.amazon.msk.auth.iam.IAMLoginModule required;");
                properties.put(
                        "sasl.client.callback.handler.class", "software.amazon.msk.auth.iam.IAMClientCallbackHandler");
                return;
            }

            String className = "";
            if (PLAIN == connectionInfo.getSaslMchanism()) {
                className = "org.apache.kafka.common.security.plain.PlainLoginModule";
            } else if (SCRAM_SHA_256 == connectionInfo.getSaslMchanism()
                    || SCRAM_SHA_512 == connectionInfo.getSaslMchanism()) {
                className = "org.apache.kafka.common.security.scram.ScramLoginModule";
            } else if (GSSAPI == connectionInfo.getSaslMchanism()) {
                className = "com.sun.security.auth.module.Krb5LoginModule";
            } else {
                throw new IllegalArgumentException("not support saslMchanism: " + connectionInfo.getSaslMchanism());
            }

            if (GSSAPI == connectionInfo.getSaslMchanism()) {
                if (connectionInfo.getKrb5File() == null) {
                    throw new IllegalArgumentException("SaslMchanism: GSSAPI, krb5 file can not null");
                }
                if (connectionInfo.getKeytabFile() == null) {
                    throw new IllegalArgumentException("SaslMchanism: GSSAPI, keytab file can not null");
                }

                System.setProperty("java.security.krb5.conf", connectionInfo.getKrb5File());

                if (StringUtils.isBlank(connectionInfo.getServicePrincipal())) {
                    String msg = "protocol: "
                            + connectionInfo.getProtocol()
                            + ", SaslMchanism: GSSAPI. Service principal can not blank: sasl.kerberos.service.name";
                    throw new IllegalArgumentException(msg);
                }

                properties.put("sasl.kerberos.service.name", connectionInfo.getServicePrincipal());
                properties.put(
                        "sasl.jaas.config",
                        String.format(
                                "%s required\n"
                                        + "useKeyTab=true\n"
                                        + "storeKey=true\n"
                                        + "useTicketCache=false\n"
                                        + "keyTab=\"%s\"\n"
                                        + "principal=\"%s\";",
                                className, connectionInfo.getKeytabFile(), connectionInfo.getPrincipal()));
            } else {
                properties.put(
                        "sasl.jaas.config",
                        String.format(
                                "%s required " + "username=\"%s\"   " + "password=\"%s\";",
                                className, connectionInfo.getUsername(), connectionInfo.getPassword()));
            }
        }
    }

    private void buildSslProperties(Properties properties, ConnectionInfo connectionInfo) {
        if (StringUtils.isNotBlank(connectionInfo.getSslKeyPassword())) {
            properties.put("ssl.key.password", connectionInfo.getSslKeyPassword());
        }

        if (StringUtils.isNotBlank(connectionInfo.getSslTruststoreLocation())) {
            properties.put("ssl.truststore.location", connectionInfo.getSslTruststoreLocation());
        }
        if (StringUtils.isNotBlank(connectionInfo.getSslTruststorePassword())) {
            properties.put("ssl.truststore.password", connectionInfo.getSslTruststorePassword());
        }

        if (StringUtils.isNotBlank(connectionInfo.getSslKeystoreLocation())) {
            properties.put("ssl.keystore.location", connectionInfo.getSslKeystoreLocation());
        }
        if (StringUtils.isNotBlank(connectionInfo.getSslKeystorePassword())) {
            properties.put("ssl.keystore.password", connectionInfo.getSslKeystorePassword());
        }

        if (!properties.containsKey("ssl.endpoint.identification.algorithm")) {
            properties.put("ssl.endpoint.identification.algorithm", "");
        }
    }

    @Override
    public DatabaseVersion getDatabaseVersion() {
        throw new UnsupportedOperationException();
    }

    @Override
    public List<String> getDatabases() {
        throw new UnsupportedOperationException();
    }

    @Override
    public List<String> getSchemas() {
        throw new UnsupportedOperationException();
    }

    @Override
    public List<String> getTableNames(String schemaName) {
        try {
            Set<String> topicNames = admin.listTopics().names().get(3, TimeUnit.SECONDS);
            return Lists.newArrayList(topicNames);
        } catch (Exception e) {
            throw new RuntimeException(e);
        }
    }

    @Override
    public List<SimpleMetaTable> getSimpleTables(String schemaName) {
        try {
            Set<String> topicNames = admin.listTopics().names().get(3, TimeUnit.SECONDS);
            return topicNames.stream()
                    .map(name -> {
                        SimpleMetaTable table = new SimpleMetaTable();
                        table.setTableName(name);
                        return table;
                    })
                    .collect(Collectors.toList());
        } catch (Exception e) {
            throw new RuntimeException(e);
        }
    }

    @Override
    public MetaTable getSchemaTable(String schemaName, String tableName, boolean containColumn) {
        throw new UnsupportedOperationException();
    }

    @Override
    public List<MetaTable> getSchemaTables(String schemaName) {
        throw new UnsupportedOperationException();
    }

    @Override
    public LinkedHashSet<String> getPrimaryKeys(String schemaName, String tableName) {
        throw new UnsupportedOperationException();
    }

    @Override
    public List<MetaColumn> getSchemaColumns(String schemaName, String tableName) {
        throw new UnsupportedOperationException();
    }

    @Override
    public String getCreateTableScript(BeeTableType type, String schemaName, String tableName) {
        throw new UnsupportedOperationException();
    }

    @Override
    public Statement getQueryStatement() {
        throw new UnsupportedOperationException();
    }

    @Override
    public CompletableFuture<QueryResult> asyncQuery(String sql) {
        throw new UnsupportedOperationException();
    }

    @Override
    public CompletableFuture<QueryResult> asyncQuery(String schema, String sql) {
        throw new UnsupportedOperationException();
    }

    @Override
    public CompletableFuture<QueryResult> asyncQuery(String sql, int maxRecords) {
        throw new UnsupportedOperationException();
    }

    @Override
    public CompletableFuture<QueryResult> asyncQuery(String schema, String sql, int maxRecords) {
        throw new UnsupportedOperationException();
    }

    @Override
    public QueryResult query(String sql) {
        throw new UnsupportedOperationException();
    }

    @Override
    public QueryResult query(String schema, String sql) {
        throw new UnsupportedOperationException();
    }

    @Override
    public QueryResult query(String sql, int maxRecords) {
        throw new UnsupportedOperationException();
    }

    @Override
    public QueryResult query(String schema, String sql, int maxRecords) {
        throw new UnsupportedOperationException();
    }

    @Override
    public long queryCount(String sql, Object... params) {
        throw new UnsupportedOperationException();
    }

    @Override
    public List<Map<String, Object>> queryForList(String sql, Object... params) {
        throw new UnsupportedOperationException();
    }

    @Override
    public Boolean execute(String sql) {
        throw new UnsupportedOperationException();
    }

    @Override
    public Boolean execute(String schema, String sql) {
        throw new UnsupportedOperationException();
    }

    @Override
    public CompletableFuture<Boolean> asyncExecute(String sql) {
        throw new UnsupportedOperationException();
    }

    @Override
    public CompletableFuture<Boolean> asyncExecute(String schema, String sql) {
        throw new UnsupportedOperationException();
    }

    @Override
    public Integer executeUpdate(String sql, Object... params) {
        throw new UnsupportedOperationException();
    }

    @Override
    public Integer executeUpdate(SchemaIdentifier schema, String sql, Object... params) {
        return null;
    }

    @Override
    public CompletableFuture<Integer> asyncExecuteUpdate(String sql, Object... params) {
        throw new UnsupportedOperationException();
    }

    @Override
    public CompletableFuture<Integer> asyncExecuteUpdate(SchemaIdentifier schema, String sql, Object... params) {
        throw new UnsupportedOperationException();
    }

    @Override
    public void close() throws Exception {
        if (admin != null) {
            admin.close();
        }
    }
}
