package com.bw.gmall.realtime.common.util;

import com.bw.gmall.realtime.common.constant.Constant;

public class SQLUtil {

    /**
     *
     * @param groupId
     * @param topic
     * @return
     *
     *   FlinkSql 操作的工具类
     *
     * 回撤流
     *    左表    null    标记为+I
     *    左表    null    标记为-D
     *    左表    右表     标记为+I
     *这样的数据如果写到kafka 主题中,kafka主题会接受3条消息
     * 左表   null
     * null
     * 左表   右表
     * 当从kafka 主题中读取数据的时候,存在空消息,如果使用的FlinkSQL的方式赌气,回自动的将空消息过滤掉:如果使用的是FlinkAPI的方式的话,默认的SimpleStringSchema是处理不了
     * 空消息的,需要自定义反序列化器
     *除了空消息外,在DWS层进行汇总操作的时候,还需要进行去重处理
     */
    public static String getKafkaDDLSource(String groupId,String topic){
        /**
         * upsert - kafka 有主键约束 是有upsert等操作下用这个方法 没有的用普通的 kafka 连接器就行
         */
        return  " WITH (\n" +
                "  'connector' = 'kafka',\n" +
                "  'topic' = '"+topic+"',\n" +
                "  'properties.bootstrap.servers' = '"+Constant.KAFKA_BROKERS+"',\n" +
                "  'properties.group.id' = '"+groupId+"',\n" +
                "  'scan.startup.mode' = 'latest-offset',\n" +  //历史 数据用 latest-offset  新数据 用 earliest-offset
                "  'json.ignore-parse-errors' = 'true',"+ //当 json 解析失败的时候,忽略这条数据
                "  'format' = 'json'\n" +
                ")";

        /*
        WITH (\n" +
                "  'connector' = 'kafka',\n" +
                "  'topic' = "+ Constant.TOPIC_DB +",\n" +
                "  'properties.bootstrap.servers' = 'hadoop102:9092',\n" +
                "  'properties.group.id' = 'testGroup',\n" +
                "  'scan.startup.mode' = 'earliest-offset',\n" +
                "  'format' = 'json'\n" +
                ")
         */
    }


    //获取 kafka 链接器
    public static String getKafkaDDLSink(String topic) {
        return " with(" +
                " 'connector' = 'kafka'," +
                " 'topic' = '" + topic + "'," +
                " 'properties.bootstrap.servers' = '" + Constant.KAFKA_BROKERS + "'," +
                " 'format' = 'json' " +
                ")";
    }

    //获取 Hbase 连接器
    public static String getHBaseDDL(String tableName){
        return " WITH (\n" +
                " 'connector' = 'hbase-2.2',\n" +
                        " 'table-name' = '"+Constant.HBASE_NAMESPACE + ":" +tableName+"',\n" +
                        " 'zookeeper.quorum' = 'hadoop102:2181,hadoop103:2181,hadoop104:2181',\n" +
                        " 'lookup.cache' = 'PARTIAL', " +
                        " 'lookup.async' = 'true', " +
                        " 'lookup.partial-cache.max-rows' = '20', " +
                        " 'lookup.partial-cache.expire-after-access' = '2hour' " +
                        ")";
    }

    //获取 upsert 链接器
    public  static  String getUpsertKafkaDDL(String topic){
        return "with(" +
                " 'connector' = 'upsert-kafka'," +
                " 'topic' = '" + topic + "'," +
                " 'properties.bootstrap.servers' = '" + Constant.KAFKA_BROKERS + "'," +
                " 'key.json.ignore-parse-errors' = 'true'," +
                " 'value.json.ignore-parse-errors' = 'true'," +
                " 'key.format' = 'json', " +
                " 'value.format' = 'json' " +
                ")";
    }

}
