package com.atguigu.edu.realtime.utils;

import com.atguigu.edu.realtime.common.Constant;

public class SQLUtil {
    public static String getKafkaSourceDDL(String topic, String groupId, String... dataFormat) {
        //可变参数，format类型不确定，没有赋值时 默认值为json格式
        String format = "json";
        if (dataFormat.length > 0) {
            format = dataFormat[0];
        }
        return "with(" +
                " 'connector'='kafka'," +
                " 'properties.bootstrap.servers'='" + Constant.KAFKA_BROKERS + "'," +
                " 'properties.group.id'='" + groupId + "'," +
                " 'topic'='" + topic + "'," +
                " 'scan.startup.mode'='latest-offset'," +
                //当json解析失败的时候，忽略失败(json的设置，其他格式不适用)
                " 'json.ignore-parse-errors'='true'," +
                " 'format'='" + format + "'" +
                ")";
    }

    public static String getKafkaSinkDDL(String topic,String... dataFormat) {
        String format = "json";
        if (dataFormat.length > 0){
            format = dataFormat[0];
        }
        return "with(" +
                " 'connector'='kafka'," +
                " 'properties.bootstrap.servers'='" + Constant.KAFKA_BROKERS + "'," +
                " 'topic'='" + topic + "'," +
                " 'format'='" + format + "'," +
                //当json解析失败的时候，忽略失败
                " 'json.ignore-parse-errors'='true'" +
                " )";
    }

    public static String getUpsertKafkaSinkDDL(String topic,String... dataFormat) {
        String format = "json";
        if (dataFormat.length > 0){
            format=dataFormat[0];
        }
        return "with(" +
                " 'connector'='upsert-kafka'," +
                " 'properties.bootstrap.servers'='" + Constant.KAFKA_BROKERS + "'," +
                " 'topic'='" + topic + "'," +
                " 'key.format'='" + format + "'," +
                " 'value.format'='" + format + "'" +
                ")";
    }
}
