package com.atguigu.gmall.realtime.common.util;

import com.atguigu.gmall.realtime.common.constant.Constant;

public class FlinkSqlUtil {
    public static String getDorisSinkDDL(String database, String table){
        /*return " WITH (" +
                " 'connector' = 'doris', " +
                " 'fenodes' = '"+Constant.DORIS_FE_NODES+"' ," +
                " 'table.identifier' = ' "+ database + "." + table + "' , " +
                " 'username' = '"+Constant.DORIS_USER_NAME+"', " +
                " 'password' = '"+Constant.DORIS_PASSWORD+"' ," +
                " 'sink.enable-2pc' = 'false' " +
                ")";*/
        return " WITH ( " +
                "      'connector' = 'doris', " +
                "      'fenodes' = '"+ Constant.DORIS_FE_NODES+"'," +
                "      'table.identifier' = '" + database + "." + table +  "'," +
                "      'username' = '"+ Constant.DORIS_USER_NAME +"'," +
                "      'password' = '"+ Constant.DORIS_PASSWORD +"'," +
                "      'sink.enable-2pc' = 'false'  " +
                //"    'sink.label-prefix' = 'doris_label"+ System.currentTimeMillis() +"' ," +
                ")" ;
    }

    public static String getHbaseDDL(String namespaceName, String tableName){
        return "WITH (" +
                " 'connector' = 'hbase-2.2'," +
                        "  'table-name' = '"+namespaceName+ ":"+tableName +"'," +
                        " 'zookeeper.quorum' = '"+ Constant.ZOOKEEPER_QUORUM+"', " +
                        "  'lookup.async' = 'true'," +
                        " 'lookup.cache' = 'PARTIAL'," +
                        " 'lookup.partial-cache.max-rows' = '100'," +
                        " 'lookup.partial-cache.expire-after-write' = '1 hour', " +
                        " 'lookup.partial-cache.expire-after-access' = '1 hour' " +
                        ")";
    }

    // 获取UpsertKafkaSink的建表的DDl, with部分的语句

    public static String getUpsertKafkaSinkDDl(String  topic){
        return "WITH (" +
                "  'connector' = 'upsert-kafka'," +
                        "  'topic' = '" + topic + "',"+
                        "  'properties.bootstrap.servers' = 'hadoop102:9092,hadoop103:9092,hadoop104:9092', " +
                        "  'key.format' = 'json', " +
                        " 'value.format' = 'json' " +
                        ")";
    }


    // 获取KafkaSink的建表的DDL, with部分的语句

    public static String getKafkaSinkDDL(String  topic){
        return "WITH (" +
                "  'connector' = 'kafka'," +
                        "  'topic' = '" + topic + "',"+
                        "  'properties.bootstrap.servers' = 'hadoop102:9092,hadoop103:9092,hadoop104:9092', " +
                        " 'sink.delivery-guarantee' = 'at-least-once', " + // exactly-once 或者 at-least-once
                        // " 'sink.transaction-id-prefix' 'gmall-realtime-" + System.currentTimeMillis()+" ', " +
                        // checkpoint timeout <= producer timeout <= broker timeout
                        " 'properties.transaction.timeout.ms' = '900000',  "+
                        "  'format' = 'json'" +
                        ")";
    }


    // 获取KafkaSource 建表的DDL, with部分的雨具
    public static String getKafkaSourceDDL(String  topic, String groupId) {
        return " WITH (" +
                " 'connector' = 'kafka'," +
                "  'topic' = '" + topic + "',"+
                "  'properties.bootstrap.servers' = '"+ Constant.KAFKA_BROKERS +"'," +
                "  'properties.group.id' = '"+groupId+"'," +
                "  'scan.startup.mode' = 'latest-offset'," +
                "   'format' = 'json'" +
                "  )";
    }
}
