package com.atguigu.gmall.realtime.util;

import com.atguigu.gmall.realtime.common.Constant;

/**
 * @Author lzc
 * @Date 2022/12/6 09:16
 */
public class SQLUtil {
    
    public static String getKafkaSourceDDL(String topic, String groupId, String... dataFormat) {
        String format = "json";
        if (dataFormat.length > 0) {
            format = dataFormat[0];
        }
        
        return "with(" +
            " 'connector' = 'kafka', " +
            " 'properties.bootstrap.servers' = '" + Constant.KAFKA_BROKERS + "', " +
            " 'properties.group.id' = '" + groupId + "', " +
            " 'topic' = '" + topic + "', " +
            " 'scan.startup.mode' = 'latest-offset', " +
            //" 'json.ignore-parse-errors' = 'true', " +  // 当 json 解析失败的时候, 忽略失败
            " 'format' = '" + format + "' " +
            ")";
    }
    
    public static String getKafkaSinkDDL(String topic, String... dataFormat) {
        String format = "json";
        if (dataFormat.length > 0) {
            format = dataFormat[0];
        }
        
        return "with(" +
            " 'connector' = 'kafka', " +
            " 'properties.bootstrap.servers' = '" + Constant.KAFKA_BROKERS + "', " +
            " 'topic' = '" + topic + "', " +
            //" 'json.ignore-parse-errors' = 'true', " +  // 当 json 解析失败的时候, 忽略失败
            " 'format' = '" + format + "' " +
            ")";
    }
    
    public static String getUpsertDDL(String topic, String... dataFormat) {
        String format = "json";
        if (dataFormat.length > 0) {
            format = dataFormat[0];
        }
        
        return "with(" +
            " 'connector' = 'upsert-kafka', " +
            " 'properties.bootstrap.servers' = '" + Constant.KAFKA_BROKERS + "', " +
            " 'topic' = '" + topic + "', " +
            " 'key.format' = '" + format + "', " +
            " 'value.format' = '" + format + "' " +
            ")";
    }
}
