package com.gmall.realtime.common.util;

import com.gmall.realtime.common.constant.Constant;

public class SQLUtil {
    public static String getKafkaTopicDb(String groupId) {
        // 构造SQL语句，创建名为topic_db的表
        return "CREATE TABLE topic_db (\n" +
                "  `database` STRING,\n" + // 数据库名
                "  `table` STRING,\n" +    // 表名
                "  `type` STRING,\n" +     // 类型
                "  `ts` BIGINT,\n" +       // 时间戳
                "  `data` Map<String,String>,\n" + // 数据列，值为Map类型
                "   proc_time as proctime(),\n" + // 处理时间列
                "  row_time as TO_TIMESTAMP_LTZ(ts * 1000,3) ,\n" + // 行时间列，将时间戳转换为时间戳类型
                "  `old` Map<String,String>,\n" + // 旧数据列，值为Map类型
                "  WATERMARK FOR row_time AS row_time - INTERVAL '5' SECOND\n" + // 设置水印，用于处理事件时间
                ")"+getKafkaSourceSQL(Constant.TOPIC_DB,groupId); // 拼接Kafka源SQL语句
    }


    // 链接器
    public static String getKafkaSourceSQL(String topicName, String groupId) {
        return "WITH (\n" +
                "  'connector' = 'kafka',\n" +// 指定连接器类型为kafka
                "  'topic' = '" + topicName + "',\n" +// 指定Kafka的主题名称
                "  'properties.bootstrap.servers' = '" + Constant.KAFKA_BROKERS + "',\n" +// 指定Kafka的bootstrap servers地址
                "  'properties.group.id' = '" + groupId + "',\n" +// 指定消费者组ID
                "  'scan.startup.mode' = 'earliest-offset',\n" +// 指定扫描启动模式为earliest-offset
                "  'format' = 'json'\n" + // 指定数据格式为json
                ")";
    }


    /**
     * 获取kafka链接
     *
     * @param topicName
     * @return
     */
    public static String getUpsertKafkaSinkSQL(String topicName) {
        // 设置Kafka连接器属性
        // key.format  必选
        // value.format 必选
        // https://cloud.tencent.com/developer/article/1806609
        return "WITH (\n" +
                "  'connector' = 'upsert-kafka',\n" + // 设置Kafka连接器类型为upsert-kafka
                "  'topic' = '" + topicName + "',\n" + // 设置Kafka主题
                "  'properties.bootstrap.servers' = '" + Constant.KAFKA_BROKERS + "',\n" + // 设置Kafka broker地址
                "  'key.format' = 'json',\n" + // 设置key格式为json
                "  'value.format' = 'json'\n" + // 设置value格式为json
                ")";

    }


    /**
     * 获取kafka链接
     *
     * @param topicName
     * @return
     */
    public static String getKafkaSinkSQL(String topicName) {
        return "WITH (\n" +
                // Kafka连接器
                "  'connector' = 'kafka',\n" +
                // Kafka主题名称
                "  'topic' = '" + topicName + "',\n" +
                // Kafka的broker地址
                "  'properties.bootstrap.servers' = '" + Constant.KAFKA_BROKERS + "',\n" +
                // 数据格式
                "  'format' = 'json'\n" +
                ")";
    }


    /**
     *
     * @param table
     * @return
     */
    public static String getDorisSinkSQL(String table) {
        // 返回Doris Sink SQL字符串
        return "WITH (\n" +
                // 设置连接器为Doris
                "      'connector' = 'doris',\n" +
                // 设置FENodes地址
                "      'fenodes' = '" + Constant.FENODES + "',\n" +
                // 设置目标表标识符
                "      'table.identifier' = '" + Constant.DORIS_DATABASE + "." + table + "',\n" +
                // 设置用户名
                "      'username' = '" + Constant.DORIS_USERNAME + "',\n" +
                // 设置密码
                "      'password' = '" + Constant.DORIS_PASSWORD + "',\n" +
                // 设置Sink标签前缀
                "      'sink.label-prefix' = 'doris_label" + System.currentTimeMillis() + "'\n" +
                // 结束WITH子句
                ")";

    }

    public static String getUpsertKafkaDDL(String topic) {
        return "with(" +
                "  'connector' = 'upsert-kafka'," +
                "  'topic' = '" + topic + "'," +
                "  'properties.bootstrap.servers' = '" + Constant.KAFKA_BROKERS + "'," +
                "  'key.json.ignore-parse-errors' = 'true'," +
                "  'value.json.ignore-parse-errors' = 'true'," +
                "  'key.format' = 'json', " +
                "  'value.format' = 'json' " +
                ")";
    }

    public static String getKafkaDDLSource(String groupId, String topic) {
        return "with(" +
                "  'connector' = 'kafka'," +
                "  'properties.group.id' = '" + groupId + "'," +
                "  'topic' = '" + topic + "'," +
                "  'properties.bootstrap.servers' = '" + Constant.KAFKA_BROKERS + "'," +
                "  'scan.startup.mode' = 'earliest-offset'," +
                "  'json.ignore-parse-errors' = 'true'," + // 当 json 解析失败的时候,忽略这条数据
                "  'format' = 'json' " +
                ")";
    }

    public static String getKafkaDDLSink(String topic) {
        return "with(" +
                "  'connector' = 'kafka'," +
                "  'topic' = '" + topic + "'," +
                "  'properties.bootstrap.servers' = '" + Constant.KAFKA_BROKERS + "'," +
                "  'format' = 'json' " +
                ")";
    }
}
