package com.bujunjie.study.realtime.common.util;

import com.bujunjie.study.realtime.common.constant.FlinkConstant;

/**
 * <p> sql 的常规操作</p>
 *
 * @author bu.junjie
 * @version 1.0.0
 * @createTime 2025/9/16 16:37
 */
public class SQLUtil {


    /**
     * <p>获取 kafka 的 ddl 语句</p>
     *
     * @param topic        主题
     * @param groupId      groupId
     * @param kafkaBrokers kafka 机子市里
     * @return 返回 ddl 语句
     * @author bu.junjie
     * @date 2025/9/16 16:38
     */
    public static String getKafkaDDL(String topic, String groupId, String kafkaBrokers) {
        return " WITH (\n" +
                "  'connector' = 'kafka',\n" +
                "  'topic' = '" + topic + "',\n" +
                "  'properties.bootstrap.servers' = '" + FlinkConstant.KAFKA_BROKERS + "',\n" +
                "  'properties.group.id' = '" + groupId + "',\n" +
                "  'scan.startup.mode' = 'latest-offset',\n" +
                "  'format' = 'json'\n" +
                ")";
    }


    /**
     * <p>获取 hbase 的连接</p>
     *
     * @param tableName 表名称
     * @return 返回 hbase ddl 语句
     * @author bu.junjie
     * @date 2025/9/16 17:10
     */
    public static String getHBaseDDL(String tableName) {
        return " WITH (\n" +
                " 'connector' = 'hbase-2.2',\n" +
                " 'table-name' = '" + FlinkConstant.HBASE_NAMESPACE + ":" + tableName + "',\n" +
                " 'zookeeper.quorum' = 'hadoop102,hadoop103,hadoop104:2181',\n" +
                " 'lookup.async' = 'true',\n" +
                " 'lookup.cache' = 'PARTIAL',\n" +
                " 'lookup.partial-cache.max-rows' = '500',\n" +
                " 'lookup.partial-cache.expire-after-write' = '1 hour',\n" +
                " 'lookup.partial-cache.expire-after-access' = '1 hour'\n" +
                ")";
    }


    /**
     * <p>kafka 的 upsetKafka</p>
     *
     * @param topic kafka 的 topic
     * @return 返回 ddl 的语句
     * @author bu.junjie
     * @date 2025/9/16 17:20
     */
    public static String getUpsertKafkaDDL(String topic) {
        return " WITH (\n" +
                "  'connector' = 'upsert-kafka',\n" +
                "  'topic' = '" + topic + "',\n" +
                "  'properties.bootstrap.servers' = '" + FlinkConstant.KAFKA_BROKERS + "',\n" +
                "  'key.format' = 'json',\n" +
                "  'value.format' = 'json'\n" +
                ")";
    }
}
