package com.intct.dwd;

import cn.hutool.core.util.DesensitizedUtil;
import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import com.intct.common.Constant;
import com.intct.func.LookupAsyncDimFun;
import com.intct.utils.KafkaUtil;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.java.utils.ParameterTool;
import org.apache.flink.connector.kafka.sink.KafkaSink;
import org.apache.flink.connector.kafka.source.KafkaSource;
import org.apache.flink.streaming.api.CheckpointingMode;
import org.apache.flink.streaming.api.datastream.AsyncDataStream;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;

import java.util.concurrent.TimeUnit;

/**
 * @author gufg
 * @since 2025-09-19 09:47
 * 车辆轨迹数据
 * 前期条件：模拟程序 ==> target => flume -> zk \ kafka
 * 工作内容：
 *      1、规范
 *          标准化
 *          数据质量
 *          脱敏
 *          ...
 *      2、维度退化(宽表) --> 多流关联(维度表保存HBase)
 *          2.1 直接关联HBase  -- 缺点：实时数据量比较大，查询非常频繁   同步效率非低
 *
 *          2.2 采用异步读取HBase  -- 缺点：实时数据量比较大，查询比较频繁  异步效率高一些
 *
 *          2.3 采用异步读取HBase和Redis(旁边路缓存)  -- 缺点：实时数据量比较大，异步效率高，存在问题：HBase与Redis之间数据一致
 *
 *      3、保存DWD(kafka)
 */
public class DwdLocusAPI {
    public static void main(String[] args) throws Exception {
        // 配置环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);

        // 开启检查
        env.enableCheckpointing(5000L, CheckpointingMode.EXACTLY_ONCE);

        // 从工程中resoucres路径读取配置文件
        ParameterTool parameterTool = ParameterTool.fromPropertiesFile(DwdLocusAPI.class.getResource("/intct.properties").getFile());

        // 数据源配置 ods kafka
        KafkaSource<String> kafkaSource = KafkaUtil.getKafkaSource(parameterTool, Constant.KAFKA_LOCUS_ODS_TOPIC_NAME);
        DataStreamSource<String> sourceDS = env.fromSource(kafkaSource, WatermarkStrategy.noWatermarks(), "dwd_locus_source");

        // sourceDS.print();

        // 数据转换
        SingleOutputStreamOperator<String> mapDS = sourceDS.map(new MapFunction<String, String>() {
            @Override
            public String map(String jsonStr) throws Exception {
                // 转换类型
                JSONObject jsonObj = JSON.parseObject(jsonStr);

                // 去掉多字段
                jsonObj.remove(Constant.FLINK_CDC_MYSQL_JSON_OP_KEY);
                jsonObj.remove("name");
                jsonObj.remove(Constant.FLINK_CDC_MYSQL_JSON_TS_MS_KEY);

                // after 数据
                JSONObject afterObj = jsonObj.getJSONObject(Constant.FLINK_CDC_MYSQL_JSON_AFTER_KEY);

               /*
                redis有序集合：
                    key_name = order_id
                    分数：time
                    value: lng + ":" + lat
                {"driver_id":"11","lng":"104.04276","time":"1475977126","order_id":"17592719043682","lat":"30.69286"}
                {"driver_id":"end","order_id":"17592719043682"}
                 */
                String driverId = afterObj.getString("driver_id");
                // 标准
                if ("end".equals(driverId)) {
                    afterObj.put("time", "9999999999");
                    afterObj.put("lng", "end");
                    afterObj.put("lat", "end");
                }

                // TODO 数据质量

                // 脱敏
                String orderId = afterObj.getString("order_id");
                String oderIdDesen = DesensitizedUtil.idCardNum(orderId, 3, 3);
                afterObj.put("oder_id_desen", oderIdDesen);

                // ...

                return afterObj.toJSONString();
            }
        });

        // mapDS.print();

        // 退维 (异步lookup + 旁路缓存)
        // 关联司机
        /*
                {"driver_id":"11","lng":"104.04276","time":"1475977126","order_id":"17592719043682","lat":"30.69286"}
                {"driver_id":"end","order_id":"17592719043682"}
         */
        SingleOutputStreamOperator<String> unorderedWait =
                AsyncDataStream.unorderedWait(
                        mapDS,
                        new LookupAsyncDimFun("intct83", "dim_driver_info", "driver_id"),
                        30,
                        TimeUnit.SECONDS,
                        1000);

//        unorderedWait.print();

        // 写入DWD  kafka
        KafkaSink<String> dwdLocus = KafkaUtil.getKafaSink(parameterTool, "dwd_locus", "dwd-locus");
        unorderedWait.sinkTo(dwdLocus);

        // 启动作业
        env.execute("dws_locus_job");
    }
}

























