package git.soulbgm;

import org.apache.flink.table.api.StatementSet;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;

import java.io.IOException;
import java.nio.charset.StandardCharsets;
import java.nio.file.Files;
import java.nio.file.Paths;
import java.util.Arrays;
import java.util.Locale;

public class SQLUtil {

    /**
     * 开始
     *
     * @param port        端口
     * @param parallelism 并行度
     * @param sqlFilePath sql文件路径
     * @throws IOException IOException
     */
    public static void start(int port, int parallelism, String sqlFilePath) throws IOException {
        byte[] content = Files.readAllBytes(Paths.get(sqlFilePath));
        String formatSql = formatSql(new String(content, StandardCharsets.UTF_8));
        String[] split = formatSql.split(";");
        BaseSQLJob job = new BaseSQLJob() {
            @Override
            public void handle(StreamTableEnvironment tableEnv) {
                StatementSet statementSet = tableEnv.createStatementSet();
                for (String sql : split) {
                    String sqlTrim = sql.trim();
                    if (!"".equals(sqlTrim)) {
                        if (sqlTrim.toUpperCase(Locale.ROOT).startsWith("INSERT INTO ")) {
                            statementSet.addInsertSql(sql);
                        } else {
                            tableEnv.executeSql(sql);
                        }
                    }
                }
                statementSet.execute();
            }
        };
        job.start(port, parallelism);
    }

    /**
     * SQL格式
     *
     * @param sql SQL语句
     * @return {@link String}
     */
    private static String formatSql(String sql) {
        StringBuilder builder = new StringBuilder();
        Arrays.stream(sql.split("\n|\r\n"))
                .filter(o -> !o.startsWith("--") && !"".equals(o.trim()))
                .forEach(o -> builder.append(o).append("\n"));
        return builder.toString();
    }

    /**
     * 获取kafka连接器的连接属性
     *
     * @param topic   主题
     * @param groupId 组ID
     * @return {@link String}
     */
    public static String getKafkaDDL(String bootstrapServers, String topic, String groupId) {
        return " WITH (\n" +
                "  'connector' = 'kafka',\n" +
                "  'topic' = '" + topic + "',\n" +
                "  'properties.bootstrap.servers' = '" + bootstrapServers + "',\n" +
                "  'properties.group.id' = '" + groupId + "',\n" +
                "  'scan.startup.mode' = 'latest-offset',\n" +
                "  'format' = 'json'\n" +
                ")";
    }

    /**
     * 获取jdbc连接器的连接属性
     *
     * @param url       url
     * @param driver    驱动类名
     * @param username  用户名
     * @param password  密码
     * @param tableName 表名称
     * @return {@link String}
     */
    public static String getJdbcDDL(String url, String driver, String username, String password, String tableName) {
        return " WITH (\n" +
                "  'connector' = 'jdbc',\n" +
                "  'url' = '" + url + "',\n" +
                "  'driver' = '" + driver + "',\n" +
                "  'username' = '" + username + "',\n" +
                "  'password' = '" + password + "',\n" +
                "  'table-name' = '" + tableName + "'\n" +
                ")";
    }

}
