package com.intct.dws;

import com.intct.common.FlinkSqlWithUtil;
import com.intct.func.RowKeyFunction;
import org.apache.flink.streaming.api.CheckpointingMode;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;

import java.util.UUID;

/**
 * @author gufg
 * @since 2025-10-22 11:06
 *
 * source: DWD(kafka)
 * sink: redis(展示层)、hbase(历史)
 *
 * 样例数据：11,17592719043682,1475977126,104.04276,30.69286
 * 处理：
 *      Flink水位线处理
 *      Redis: 有序集合(车辆订单轨迹)  如：zadd 主键(订单ID) 分数(time字段) 值(经度+":"+纬度)
 *             实时订单集合：  如：sadd 主键(订单ID)
 *             历史订单集合：  如：sadd 主键(订单ID)
 *      HBase：
 *          500 * 3000 =
 *          创建HBase表，做预分区
 *                第1分区            100|
 *                第2分区  100|       200|
 *                第3分区  200|       300|
 *                第4分区  300|       400|
 *                第5分区  400|
 *                ...
 *                  分区：1000
 *                      Rowkey = ABS(hashCode(订单号)) mod 1000 + substring(md5(订单号+tiem), 12)
 *                               100175927190436821475977126
 *              rowkey设计的原则：唯一性，散列性，尽量简短
 *              rowkey设计的一些技巧：数据的反转，加盐，hash等等
 *              rowkey字典序，最大字符=|
 *          保存
 */
public class DwsLocusSQL {
    public static void main(String[] args) {
        // 设置 Kafka 相关的系统属性
        System.setProperty("enable.metrics", "false");

        // 1 环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.    getExecutionEnvironment();
        // 设置全局并行度
         env.setParallelism(1);
        // 开启检查点，并且设置检查为一次语义
        env.enableCheckpointing(6000L, CheckpointingMode.EXACTLY_ONCE);
        env.getCheckpointConfig().setCheckpointStorage("file:/d:/test/ckpt");
        env.disableOperatorChaining();  // 禁用操作链，减少连接冲突

        // TableAPI环境
        StreamTableEnvironment tenv = StreamTableEnvironment.create(env);

        // 2 source映射 -- DWD(kafka)
        /*
        样例:
            {
              "driver_id": "15",
              "order_id": "17592719302995",
              "order_id_desens": "175********995",
              "lng": "104.04599",
              "lat": "30.67489",
              "time": "1475974881",
              "driver_name": null
            }
         */

        // 生成唯一的客户端 ID
        String uniqueClientId = "flink-job-" + UUID.randomUUID().toString().substring(0, 8);

        tenv.executeSql("CREATE TABLE source_dwd_locus (      " +
                "    driver_id STRING                                  " +
                "    ,order_id STRING                                  " +
                "    ,order_id_desens STRING                           " +
                "    ,lng STRING                                       " +
                "    ,lat STRING                                       " +
                "    ,`time` STRING                                    " +
                "    ,driver_name STRING                               " +
                "    ,pt AS PROCTIME()                                 " +
                ") " + FlinkSqlWithUtil.getKafkaSourceWith("dwd_locus", "dwd_locus_id", uniqueClientId));

        // 3 sink映射 -- redis
        tenv.executeSql("CREATE TABLE sink_redis_locus (      " +
                "    key STRING                                        " +
                "    ,score BIGINT                                     " +
                "    ,`value` STRING                                   " +
                ") " + FlinkSqlWithUtil.getRedisSourceWith("604800", "zadd"));

        // 4 插入Redis
        tenv.executeSql("INSERT INTO sink_redis_locus           " +
                " SELECT                                                 " +
                "     order_id                                           " +
                "     , CAST(`time` AS BIGINT)                           " +
                "     , CONCAT(lng, ':', lat)                            " +
                " FROM source_dwd_locus");

        // 5 映射HBase
        tenv.executeSql("CREATE TABLE sink_hbase_locus (        " +
                "    rowkey STRING                                       " +
                "    ,info ROW<                                           " +
                "       driver_id STRING                                 " +
                "       ,order_id STRING                                 " +
                "       ,order_id_desens STRING                          " +
                "       ,lng STRING                                      " +
                "       ,lat STRING                                      " +
                "       ,`time` STRING                                   " +
                "       ,driver_name STRING                              " +
                "    >                                                   " +
                ") " + FlinkSqlWithUtil.getHBaseSinkWith("ads", "ads_locucs"));

        // 6 注册函数
        tenv.createTemporarySystemFunction("rowkeyFunc", RowKeyFunction.class);

        // 7 插入HBase
        tenv.executeSql("INSERT INTO sink_hbase_locus            " +
                " SELECT                                                  " +
                "    rowkeyFunc(CONCAT(order_id, `time`), 5)              " +
                "    ,ROW(                                                " +
                "       driver_id                                         " +
                "       ,order_id                                         " +
                "       ,order_id_desens                                  " +
                "       ,lng                                              " +
                "       ,lat                                              " +
                "       ,`time`                                           " +
                "       ,driver_name                                      " +
                "       ) AS info                                         " +
                " FROM source_dwd_locus");
    }
}
