package com.central.dataManage.common.datasourcePlugin;

import com.alibaba.druid.sql.ast.SQLStatement;
import com.alibaba.druid.sql.dialect.mysql.ast.statement.MySqlCreateTableStatement;
import com.central.common.constant.CommonConstant;
import com.central.common.model.ColumnInfo;
import com.central.common.model.DatasourceInfo;
import com.central.common.model.IndexInfo;
import com.central.common.model.TableInfo;
import com.central.dataManage.common.utils.PropertyUtils;
import com.central.dataManage.model.DatabaseInfo;
import com.mysql.cj.xdevapi.SqlStatement;
import com.mysql.cj.xdevapi.SqlStatementImpl;
import io.itit.itf.okhttp.FastHttpClient;
import io.itit.itf.okhttp.util.FileUtil;
import org.apache.commons.lang3.StringUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.hdfs.client.HdfsAdmin;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.kafka.clients.admin.AdminClient;
import org.apache.kafka.clients.admin.KafkaAdminClient;
import org.apache.kafka.clients.admin.ListTopicsResult;
import org.apache.kafka.clients.consumer.KafkaConsumer;
import org.apache.kafka.clients.producer.KafkaProducer;
import org.apache.kafka.clients.producer.Producer;
import org.apache.kafka.common.config.SaslConfigs;

import java.io.File;
import java.io.InputStream;
import java.net.URI;
import java.sql.Connection;
import java.sql.SQLException;
import java.util.*;
import java.util.concurrent.ExecutionException;
import java.util.function.Predicate;
import java.util.stream.Collectors;

import static com.central.dataManage.common.Constants.STORE_KEYTAB_DIR;
import static com.central.dataManage.common.Constants.STORE_KEYTAB_DIR_DEFAULT;

/**
 * @author Tindy
 * @date 2021/7/20
 * @describe
 */
public class KafkaDatasourcePlugin extends BaseDatasourcePlugin {

    private final IllegalArgumentException exception=new IllegalArgumentException("kafka数据源不支持该操作");
    public KafkaDatasourcePlugin(DatasourceInfo datasource) {
        super(datasource);
    }

    @Override
    public String getJdbcUrl() {
        throw exception;
    }

    @Override
    public Connection getConn() throws SQLException {
        throw exception;
    }

    public KafkaProducer getProducer() {
        Properties props = new Properties();
        // 定义kakfa 服务的地址，不需要将所有broker指定上
        props.put("bootstrap.servers", getDatasource().getHost());
        // 是否自动确认offset
        props.put("enable.auto.commit", "true");
        // 自动确认offset的时间间隔
        props.put("auto.commit.interval.ms", "1000");
        //序列化类
        props.put("key.serializer", "org.apache.kafka.common.serialization.StringSerializer");
        props.put("value.serializer", "org.apache.kafka.common.serialization.StringSerializer");
        KafkaProducer<String, String> producer=null;
        try {
            if (StringUtils.isNotBlank(getDatasource().getKeytabUrl()) && StringUtils.isNotBlank(getDatasource().getKrb5ConfUrl())) {
                synchronized (CommonConstant.KRB5CONF_LOCK) {
                    System.clearProperty("java.security.krb5.conf");
                    String krb5ConfPath = getKrb5ConfPath(getDatasource());
                    System.setProperty("java.security.krb5.conf", krb5ConfPath);
                    String keytabPath = getKeyTabPath(getDatasource())
                            .replace("\\","/");
                    props.put("security.protocol","SASL_PLAINTEXT");
                    props.put("sasl.mechanism","GSSAPI");
                    props.put("sasl.kerberos.service.name", "kafka");
                    props.put(SaslConfigs.SASL_JAAS_CONFIG,
                            "com.sun.security.auth.module.Krb5LoginModule required " +
                                    "useKeyTab=true " +
                                    "keyTab='"+keytabPath+"' " +
                                    "principal='"+getDatasource().getUsername()+"' " +
                                    "useTicketCache=true;");
                    producer = new KafkaProducer<String, String>(props);
                }
            }else {
                producer = new KafkaProducer<String, String>(props);
            }
        } catch (Exception exception) {
            exception.printStackTrace();
        }
        return producer;
    }

    public AdminClient getAdminClient() {
        Properties props = new Properties();
        // 定义kakfa 服务的地址，不需要将所有broker指定上
        props.put("bootstrap.servers", getDatasource().getHost());
        AdminClient adminClient=null;
        try {
            if (StringUtils.isNotBlank(getDatasource().getKeytabUrl()) && StringUtils.isNotBlank(getDatasource().getKrb5ConfUrl())) {
                synchronized (CommonConstant.KRB5CONF_LOCK) {
                    System.clearProperty("java.security.krb5.conf");
                    String krb5ConfPath = getKrb5ConfPath(getDatasource());
                    System.setProperty("java.security.krb5.conf", krb5ConfPath);
                    String keytabPath = getKeyTabPath(getDatasource())
                            .replace("\\","/");
                    props.put("security.protocol","SASL_PLAINTEXT");
                    props.put("sasl.mechanism","GSSAPI");
                    props.put("sasl.kerberos.service.name", "kafka");
                    props.put(SaslConfigs.SASL_JAAS_CONFIG,
                            "com.sun.security.auth.module.Krb5LoginModule required " +
                                    "useKeyTab=true " +
                                    "keyTab='"+keytabPath+"' " +
                                    "principal='"+getDatasource().getUsername()+"' " +
                                    "useTicketCache=true;");
                    adminClient=AdminClient.create(props);
                }
            }else {
                adminClient=AdminClient.create(props);
            }
        } catch (Exception exception) {
            exception.printStackTrace();
        }
        return adminClient;
    }

    private String getKeyTabPath(DatasourceInfo datasourceInfo) throws Exception {
        File file = new File(PropertyUtils.getString(STORE_KEYTAB_DIR,STORE_KEYTAB_DIR_DEFAULT) + File.separator + datasourceInfo.findKeytabName());
        if (!file.exists()) {
            InputStream is = FastHttpClient.get().url(datasourceInfo.getKeytabUrl()).build().execute().byteStream();
            FileUtil.saveContent(is, file);
            is.close();
        }
        return file.getPath();
    }

    private String getKrb5ConfPath(DatasourceInfo datasourceInfo) throws Exception {
        File file = new File(PropertyUtils.getString(STORE_KEYTAB_DIR,STORE_KEYTAB_DIR_DEFAULT) + File.separator + datasourceInfo.findKrb5ConfName());
        if (!file.exists()) {
            InputStream is = FastHttpClient.get().url(datasourceInfo.getKrb5ConfUrl()).build().execute().byteStream();
            FileUtil.saveContent(is, file);
            is.close();
        }
        return file.getPath();
    }

    @Override
    public List<DatabaseInfo> getDatabases(Connection conn) throws SQLException {
        throw exception;
    }

    @Override
    public List<TableInfo> getTables(DatabaseInfo database, Connection conn) throws SQLException {
        throw exception;
    }

    @Override
    public List<ColumnInfo> getColumns(DatabaseInfo database, TableInfo table, Connection conn) throws SQLException {
        throw exception;
    }

    @Override
    public List<IndexInfo> getIndexs(DatabaseInfo database, TableInfo table, Connection conn) throws SQLException {
        throw exception;
    }

    @Override
    public void createNewDatabase(String dbName) throws SQLException {
    }

    @Override
    public void dropDatabase(String dbName) throws SQLException {
        AdminClient adminClient = getAdminClient();
        ListTopicsResult listTopicsResult = adminClient.listTopics();
        try {
            Set<String> topics = listTopicsResult.names().get();
            List<String> delTopics = topics.stream().filter(new Predicate<String>() {
                @Override
                public boolean test(String s) {
                    return s.startsWith(dbName + ".");
                }
            }).collect(Collectors.toList());
            adminClient.deleteTopics(delTopics).all().get();
        } catch (InterruptedException e) {
            e.printStackTrace();
        } catch (ExecutionException e) {
            e.printStackTrace();
        }
        adminClient.close();
    }

    @Override
    public void dropTable(String dbName, String tableName) throws SQLException {
        AdminClient adminClient = getAdminClient();
        String topicName=dbName+"."+tableName;
        ArrayList<String> deletelist = new ArrayList<>();
        deletelist.add(topicName);
        try {
            adminClient.deleteTopics(deletelist).all().get();
        } catch (InterruptedException|ExecutionException e) {
            throw new RuntimeException(e);
        }finally {
            adminClient.close();
        }
    }

    @Override
    public List<SQLStatement> checkCreateTableDdl(String ddl) throws SQLException {
        throw exception;
    }

    @Override
    public void createTableByDdl(DatabaseInfo database, List<SQLStatement> sqlStatements) throws SQLException {
        throw exception;
    }

    @Override
    public String generateDdlSql(TableInfo table) {
        throw exception;
    }

    @Override
    public String getTableNameByDdl(String ddl) {
        throw exception;
    }

    @Override
    public String getTableNameByModifyDdl(SQLStatement sqlStatement) {
        throw exception;
    }

    @Override
    public String generateModifySql(TableInfo table, TableInfo oldTable) throws Exception {
        throw exception;
    }

    @Override
    public void modifyTableByDdl(DatabaseInfo database, List<SQLStatement> sqlStatements) throws SQLException {
        throw exception;
    }

    @Override
    public List<SQLStatement> checkModifyTableDdl(String ddl, String tableName) {
        throw exception;
    }

    @Override
    public List<String> getDataTypes() {
        String[] dataTypes={"VARCHAR","CHAR","STRING","BINARY","VARBINARY","BYTES","DEICIMAL","INT","TINYINT","SMALLINT","BIGINT","FLOAT","DOUBLE","BOOLEAN","TIME","TIMESTAMP","DATE","ARRAY","MAP"};
        return Arrays.asList(dataTypes);
    }
    @Override
    public String format(String sql) {
        throw exception;
    }

    @Override
    public String getDatasourceDriver() {
        throw exception;
    }
}
