package com.changan.services;


import com.changan.common.model.Tuple2;
import com.changan.common.parser.FlinkSqlParse;
import com.changan.config.ColumnEntity;
import com.changan.config.EnvConfig;
import com.changan.config.FlinkConfig;
import com.changan.config.TableEntity;
import com.changan.enums.ConnectorType;
import com.changan.enums.FieldType;
import com.changan.model.AppArgs;
import com.changan.utils.MysqlUtil;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import java.sql.Connection;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Statement;
import java.util.ArrayList;
import java.util.Comparator;
import java.util.List;
import java.util.Properties;
import java.util.stream.Collectors;

public class FlinkService {

    private static final String KAFKA_SCAN_START_UP_MODE_TIMESTAMP_KEY = "timestamp";
    private final static Logger LOG = LoggerFactory.getLogger(FlinkService.class);

    public static String getDebeziumSql(AppArgs appArgs, TableEntity table, List<ColumnEntity> columnEntityList, String cdcTableName) {
        List<Tuple2<String, String>> cdcFields = getColumnList(columnEntityList);
        Properties optionsProperties = FlinkPropertyService.getFlinkProperties(EnvConfig.CONFIG, ConnectorType.DEBEZIUM);
        Properties kafkaProperties = FlinkPropertyService.getFlinkProperties(EnvConfig.CONFIG, ConnectorType.KAFKA);
        kafkaProperties.entrySet().forEach(x -> optionsProperties.setProperty(x.getKey().toString(), x.getValue().toString()));
        optionsProperties.setProperty(FlinkConfig.Kafka.PROPERTIES_BOOTSTRAP_SERVER, appArgs.getKafkaAddress());
        if (appArgs.getKafkaTopicSuffix() != null)
            optionsProperties.setProperty(FlinkConfig.Kafka.TOPIC, table.getQualifier() + appArgs.getKafkaTopicSuffix());
        else optionsProperties.setProperty(FlinkConfig.Kafka.TOPIC, table.getQualifier() + "_topic");

        //消费策略
        if (appArgs.getScanStartUpMode() != null)
            optionsProperties.setProperty(FlinkConfig.Kafka.SCAN_STARTUP_MODE, appArgs.getScanStartUpMode());
        //如果指定消费策略是timestamp
        if (KAFKA_SCAN_START_UP_MODE_TIMESTAMP_KEY.equals(appArgs.getScanStartUpMode()))
            optionsProperties.setProperty(FlinkConfig.Kafka.SCAN_STARTUP_TIMESTAMP_MILLIS, appArgs.getScanStartUpTimestamp());
        //消费者组
        if (appArgs.getKafkaGroupId() != null)
            optionsProperties.setProperty(FlinkConfig.Kafka.PROPERTIES_GROUP_ID, appArgs.getKafkaGroupId());

        return FlinkSqlParse.sqlParseWithNoPart(cdcTableName, cdcFields, optionsProperties);

    }

    public static String getStarrocksSql(AppArgs appArgs, TableEntity table, List<ColumnEntity> columnEntityList, String starrocksTableName) {
        List<Tuple2<String, String>> starrocksFields = getColumnList(columnEntityList);
        String fields = starrocksFields.stream().map(x -> x._1).reduce((x, y) -> x + "," + y).orElse("");

        Properties optionProperties = FlinkPropertyService.getFlinkProperties(EnvConfig.CONFIG, ConnectorType.STARROCKS);
        String primaryKeyStr = getPrimaryKeyStr(table, columnEntityList);
        optionProperties.setProperty(FlinkConfig.Starrocks.TABLE_NAME, table.getTargetTableName());
        String targetAddress = appArgs.getTargetAddress();
        String[] starrocksProp = targetAddress.split("\\|");
        optionProperties.setProperty(FlinkConfig.Starrocks.JDBC_URL, starrocksProp[0].split("=")[1]);
        optionProperties.setProperty(FlinkConfig.Starrocks.LOAD_URL, starrocksProp[1].split("=")[1]);
        optionProperties.setProperty(FlinkConfig.Starrocks.USERNAME, appArgs.getTargetUserName());
        optionProperties.setProperty(FlinkConfig.Starrocks.PASSWORD, appArgs.getTargetPassword());
        optionProperties.setProperty(FlinkConfig.Starrocks.DATABASE_NAME, appArgs.getTargetDatabase());

        optionProperties.setProperty("sink.properties.format", "json");
        optionProperties.setProperty("sink.properties.partial_update", "true");
        optionProperties.setProperty("sink.semantic", "exactly-once");
        optionProperties.setProperty("sink.properties.columns", fields + ",__op");
        optionProperties.setProperty("sink.ignore.update-before", "false");
        optionProperties.setProperty("sink.parallelism", "1");
        optionProperties.setProperty("sink.properties.strip_outer_array", "true");

        return FlinkSqlParse.sqlParseWithNoPart(starrocksTableName, starrocksFields, optionProperties, primaryKeyStr);
    }

    public static List<Tuple2<TableEntity, List<ColumnEntity>>> getFlinkTaskSourceDefinitions(AppArgs appArgs) {

        String url = appArgs.getSourceAddress();
        String userName = appArgs.getUserName();
        String password = appArgs.getPassword();
        String[] tables = appArgs.getSourceTableName().split(",");
        return parseColumnsFromJdbc(url, userName, password, appArgs.getDatabaseName(), tables);
    }

    public static List<Tuple2<TableEntity, List<ColumnEntity>>> getFlinkTaskTargetDefinitions(AppArgs appArgs, List<Tuple2<TableEntity, List<ColumnEntity>>> sourceTables) {
        ArrayList<Tuple2<TableEntity, List<ColumnEntity>>> tuple2s = new ArrayList<>();

        String[] targetTables = appArgs.getTargetTableName().split(",");

        for (int i = 0; i < sourceTables.size(); i++) {
            Tuple2<TableEntity, List<ColumnEntity>> sourceTable = sourceTables.get(i);
            TableEntity tableEntity = new TableEntity();
            tableEntity.setQualifier(sourceTable._1.getQualifier());
            tableEntity.setDatabaseName(sourceTable._1.getDatabaseName());
            tableEntity.setTableName(sourceTable._1.getTableName());
            tableEntity.setTargetTableName(targetTables[i]);

            Tuple2<TableEntity, List<ColumnEntity>> tuple2 = new Tuple2<>();
            tuple2._1 = tableEntity;
            tuple2._2 = sourceTable._2;
            tuple2s.add(tuple2);
        }

        return tuple2s;
    }


    public static List<Tuple2<TableEntity, List<ColumnEntity>>> parseColumnsFromJdbc(String url, String userName, String password, String databaseName, String... tableName) {
        try {
            Class.forName("com.mysql.cj.jdbc.Driver");
        } catch (ClassNotFoundException e) {
            throw new RuntimeException(e);
        }
        Connection connection = MysqlUtil.getConnection(url, userName, password);
        ArrayList<Tuple2<TableEntity, List<ColumnEntity>>> syncTasks = new ArrayList<>();
        try {
            Statement statement = connection.createStatement();

            for (String table : tableName) {
                String describe = "describe " + table;
                String showCreateTable = "SHOW CREATE TABLE " + table;
                ResultSet ddlSet = statement.executeQuery(showCreateTable);
                while (ddlSet.next()) {
                    String ddl = ddlSet.getString(2);
                    LOG.info("{}'s ddl, this may be helpful for debug: \n{}", table, ddl);
                }

                ResultSet resultSet = statement.executeQuery(describe);

                ArrayList<ColumnEntity> columnEntities = new ArrayList<>();
                Tuple2<TableEntity, List<ColumnEntity>> tuple2 = new Tuple2<>();
                TableEntity tableEntity = new TableEntity();
                int i = 1;
                while (resultSet.next()) {
                    String fieldName = resultSet.getString(1);
                    String type = resultSet.getString(2);
                    String typeWithPrecision = type.split("\\s+")[0];
                    String actualTypeName = "";
                    ColumnEntity columnEntity = new ColumnEntity();

                    boolean decimalTag = false;
                    boolean numericTag = false;

                    if (typeWithPrecision.contains("decimal")) {
                        decimalTag = true;
                        actualTypeName = typeWithPrecision;
                    }
                    if (typeWithPrecision.contains("numeric")) {
                        numericTag = true;
                        actualTypeName = typeWithPrecision;
                    } else {
                        actualTypeName = typeWithPrecision.contains("(") ? typeWithPrecision.substring(0, typeWithPrecision.indexOf("(")) : typeWithPrecision;
                    }

                    String isKey = resultSet.getString(4);

                    tableEntity.setDatabaseName(databaseName);
                    tableEntity.setTableName(table);
                    tableEntity.setTargetTableName(table);
                    tableEntity.setQualifier(databaseName + "_" + table);

                    columnEntity.setDatabaseName(databaseName);
                    columnEntity.setTableName(table);
                    columnEntity.setColName(fieldName);
                    columnEntity.setFlinkColName(fieldName);
                    columnEntity.setFlinkColType(FieldType.fromSimpleJdbcTypeName(actualTypeName).getFlinkType());
                    columnEntity.setTargetColName(fieldName);
                    if (decimalTag) {
                        columnEntity.setTargetColType(actualTypeName);
                    }
                    if (numericTag) {
                        columnEntity.setTargetColType(actualTypeName);
                    } else {
                        columnEntity.setTargetColType(FieldType.fromSimpleJdbcTypeName(actualTypeName).getFlinkType());
                    }
                    columnEntity.setColType(typeWithPrecision);
                    columnEntity.setColPosition(i);
                    columnEntity.setIsPrimaryKey("PRI".equals(isKey));

                    columnEntities.add(columnEntity);
                    i++;
                }
                tuple2._1 = tableEntity;
                tuple2._2 = columnEntities;
                syncTasks.add(tuple2);
            }
        } catch (SQLException e) {
            e.printStackTrace();
        }

        MysqlUtil.closeConnection(connection);
        return syncTasks;
    }

    public static String getPrimaryKeyStr(TableEntity table, List<ColumnEntity> columnEntities) {
        return String.join(",", getPrimaryColumns(columnEntities));
    }

    public static String getPrimaryColumns(List<ColumnEntity> columnList) {
        return columnList.stream().filter(ColumnEntity::getIsPrimaryKey).map(ColumnEntity::getTargetColName).map(x -> "`" + x + "`").reduce((x, y) -> x + "," + y).orElseGet(null);
    }

    public static List<ColumnEntity> getPrimaryKeyList(List<ColumnEntity> columnEntityList) {
        ArrayList<ColumnEntity> keyCols = new ArrayList<>();
        columnEntityList.forEach(
                x -> {
                    if (x.getIsPrimaryKey()) {
                        keyCols.add(x);
                    }
                }
        );
        return keyCols.stream().sorted(Comparator.comparing(ColumnEntity::getColPosition)).collect(Collectors.toList());
    }

    public static List<Tuple2<String, String>> getColumnList(List<ColumnEntity> columnEntityList) {
        List<Tuple2<String, String>> resultColumnList = new ArrayList<>();
        columnEntityList.forEach(
                x -> {
                    Tuple2<String, String> field = new Tuple2<>();
                    field._1 = x.getFlinkColName();
                    field._2 = x.getTargetColType();
                    resultColumnList.add(field);
                }
        );

        return resultColumnList;
    }
}
