package com.intct.dwd;

import com.intct.func.DesensitizationFunction;
import org.apache.flink.streaming.api.CheckpointingMode;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;

/**
 * @author gufg
 * @since 2025-10-21 15:27
 * FlinkSQL进行车辆轨迹数据处理：
 *   转换、补全、脱敏、退维(lookup同步或异步)、DWD层
 *   测试：
 *      1、zookeeper服务、Kafka服务
 *      2、将Flume采集的车辆轨迹ODS(Kafka)层
 *      3、HDFS服务、HBase服务
 *      5、DWD程序启动
 *      6、验证到DWD(Kafka)层
 */
public class DwdLocusSQL {
    public static void main(String[] args) {
        // 1 环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        // 设置全局并行度
        env.setParallelism(1);
        // 开启检查点，并且设置检查为一次语义
        env.enableCheckpointing(6000L, CheckpointingMode.EXACTLY_ONCE);
        // TableAPI环境
        StreamTableEnvironment tenv = StreamTableEnvironment.create(env);

        // 2 source映射 -- kafka
        // 样例：{"op":"r","name":"locus","after":{"driver_id":"11","lng":"104.05268","time":"1475976820",
        // "order_id":"17592719043682","lat":"30.70273"},"ts_ms":1758252687777}
        tenv.executeSql("CREATE TABLE flink_locus (           " +
                "    op STRING                                         " +
                "    ,name STRING                                      " +
                "    ,after MAP<STRING, STRING>                        " +
                "    ,ts_ms BIGINT                                     " +
                "    ,pt AS PROCTIME()                                 " +
                ") WITH (                                              " +
                "    'connector'='kafka'                               " +
                "    ,'topic'='ods_locus'                              " +
                "    ,'properties.bootstrap.servers'='cdh-node:9092'   " +
                "    ,'properties.group.id'='locus_group'              " +
                "    ,'format'='json'                                  " +
                "    ,'scan.startup.mode'='earliest-offset'            " +
                ")                                                     ");

        // 3 处理数据
        // TODO 3.1 去重

        // TODO 3.2 规范化

        // 3.3 转换

        // TODO 3.4 过滤

        // 3.5 脱敏
        // 注册脱敏函数
        tenv.createTemporarySystemFunction("desens", DesensitizationFunction.class);

        // 3.6 退维
        // 3.6.1 映射维度表(同步)
        tenv.executeSql(" CREATE TABLE hbase_driver_info(           " +
                "     id	STRING                                           " +
                "     ,info  ROW<                                            " +
                "               mobile	               STRING                " +
                "               ,driver_name	       STRING                " +
                "               ,create_time	       STRING                " +
                "               ,register_city	       STRING                " +
                "               ,driver_type	       STRING                " +
                "               ,cancel_count	       STRING                " +
                "               ,driver_management_id  STRING                " +
                "     >                                                      " +
                " )WITH(                                                     " +
                "     'connector' = 'hbase-2.2'                              " +
                "     ,'zookeeper.quorum' = 'cdh-node:2181'                  " +
                "     ,'table-name' = 'dim:dim_driver_info'                  " +
                " )                                                          " );

        // 4 sink映射 -- dwd kafka
        tenv.executeSql("CREATE TABLE kafka_dwd_locus (          " +
                "  driver_id STRING                                       " +
                "  ,order_id STRING                                       " +
                "  ,order_id_desens STRING                                " +
                "  ,lng STRING                                            " +
                "  ,lat STRING                                            " +
                "  ,`time` STRING                                         " +
                "  ,driver_name STRING                                    " +
                "  ,PRIMARY KEY (`time`) NOT ENFORCED                     " +
                ") WITH (                                                 " +
                "  'connector' = 'upsert-kafka'                           " +
                "  ,'topic' = 'dwd_locus'                                 " +
                "  ,'properties.bootstrap.servers' = 'cdh-node:9092'      " +
                "  ,'key.format' = 'json'                                 " +
                "  ,'value.format' = 'json'                               " +
                ")                                                        ");

        // 一次语义：设置事务的前缀名称 、 开启检查点 、事务超时间

        // 5 转换 + 数据补全 + 退维 + sink数据(inser into)
        tenv.executeSql("INSERT INTO kafka_dwd_locus SELECT            " +
                "  f.after['driver_id'] AS driver_id                            " +
                ", f.after['order_id'] AS order_id                              " +
                ", desens(f.after['order_id'], 3, 3) AS order_id_desens         " +
                ", COALESCE(f.after['lng'], 'end') AS lng                       " +
                ", COALESCE(f.after['lat'], 'end') AS lat                       " +
                ", COALESCE(f.after['time'], '9999999999') AS `time`            " +
                ", h.info.driver_name AS driver_name                            " +
                " FROM flink_locus AS f                                         " +
                " LEFT JOIN hbase_driver_info FOR SYSTEM_TIME AS OF f.pt AS h   " +
                " ON f.after['driver_id'] = h.id                                ");
    }
}
