package com.practice.gmall.realtime.util;

import com.practice.gmall.realtime.common.Constant;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;

public class MyKafkaUtil {
    /**
     * @param groupId：消费组id
     * @Description: 获取Kafka Connector DDL语句的配置信息
     * @return: java.lang.String
     * @Author: lzy
     * @Date:2023-02-13 - 22:39
     */
    public static String getOdsDbDDL(String groupId) {
        return "WITH ( " +
                "  'connector' = 'kafka', " +
                "  'topic' = 'ods_db', " +
                "  'properties.bootstrap.servers' = 'hadoop162:9092', " +
                "  'properties.group.id' = '" + groupId + "', " +
                "  'scan.startup.mode' = 'earliest-offset', " +
                "  'json.ignore-parse-errors' = 'true'," +
                "  'format' = 'json' " +
                ")";
        //'json.ignore-parse-errors' = 'true', for catch parsing json object error
    }


    public static String getKafkaSourceDDL(String topic, String groupId) {
        return "with (  " +
                "      'connector' = 'kafka',  " +
                "      'format' = 'json',  " +
                "      'properties.bootstrap.servers' = 'hadoop162:9092,hadoop163:9092,hadoop164:9092',  " +
                "      'topic' = '"+topic+"',  " +
                "      'properties.group.id' = '"+groupId+"',  " +
                "      'scan.startup.mode' = 'group-offsets' " +
                "      )";
    }

    /**
     * @param
     * @Description: 读取Kafka中的ods_db 主题并且映射到表ods_db
     * @return: void
     * @Author: lzy
     * @Date:2023-02-13 - 22:31
     */
    public static void readTopicOdsDb(StreamTableEnvironment tableEnv, String groupId) {
        tableEnv.executeSql("CREATE TABLE ods_db ( " +
                "  `database` STRING, " +
                "  `table` STRING, " +
                "  `type` STRING, " +
                "  `data` MAP<STRING,STRING>, " +
                "  `old` MAP<STRING,STRING>, " +
                "  `ts`  BIGINT," +
                "  `pt` AS proctime(), " +
                "   et as to_timestamp_ltz(ts,0), " +
                "   watermark for et as et - interval '3' second " +
                ")" + MyKafkaUtil.getOdsDbDDL(groupId));
    }


    public static String getKafkaSinkDDL(String topic) {
        return "with( " +
                "      'connector' = 'kafka', " +
                "      'topic' = '"+ topic +"', " +
                "      'properties.bootstrap.servers' = 'hadoop162:9092,hadoop163:9092,hadoop164:9092', " +
                "      'sink.semantic' = 'exactly-once' ," +
                "      'properties.transaction.timeout.ms' = '900000', " +
                "      'format' = 'json' " +
                ")";
    }

    /**
     * @param topic the place store data in Kafka
     * @Description: return templates code block for upsert kafka connector DDL
     * @return: java.lang.String
     * @Author: lzy
     * @Date:2023-02-14 - 23:37
     */
    public static String getKafkaUpsertSinkDDL(String topic) {
        return "WITH( " +
                "'connector' = 'upsert-kafka',   " +
                "'properties.bootstrap.servers' = '"+Constant.KAFKA_BROKERS+"', " +
                "'topic' = '"+topic+"', " +
                "'key.format' = 'json',   " +
                "'value.format' = 'json')";
    }
}
