package com.intct.dwd;

import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import com.intct.func.DimAsyncFunction;
import com.intct.func.LookupAsyncFunc;
import com.intct.func.LookupAsyncFunc1;
import com.intct.func.LookupSyncFunc;
import com.intct.ods.OdsManApp;
import com.intct.util.KafkaUtil;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.java.utils.ParameterTool;
import org.apache.flink.connector.kafka.source.KafkaSource;
import org.apache.flink.streaming.api.CheckpointingMode;
import org.apache.flink.streaming.api.datastream.AsyncDataStream;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;

import java.util.concurrent.TimeUnit;

/**
 * @author gufg
 * @since 2025-08-20 15:52
 * 车辆轨迹数据
 *  前期：Kafka数据：通过模拟程序-->source称到target(json)-->flume->ODS的kafka topic
 *  规范：
 *      标准化
 *      去除脏数据
 *      脱敏
 *      数据转换
 *  退维
 *  保存到dwd(kafka)
 *
 * 原始数据：
 *  {"op":"r","name":"locus","after":{"driver_id":"9","lng":"104.05345","time":"1475539314","order_id":"17592721831669","lat":"30.67478"},"ts_ms":1753688080876}
 *  {"op":"r","name":"locus","after":{"driver_id":"end","order_id":"17592721831669"},"ts_ms":1753688080876}
 */
public class DwdLocusApp {
    public static void main(String[] args) throws Exception {
        // 从命令中获取参数
        ParameterTool paraTools = ParameterTool.fromArgs(args);
        String mvnProjectPath = OdsManApp.class.getResource("/test.properties").getFile();

        String filePath = paraTools.get("filePath", mvnProjectPath);
        String topic = paraTools.get("kafka-topic-name", "ods_locus");

        // 从配置文件中获取配置参数
        ParameterTool propertiesFile = ParameterTool.fromPropertiesFile(filePath);

        // 创建Flink环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
//        env.setParallelism(1);

        // 启动检查
        env.enableCheckpointing(6000L, CheckpointingMode.EXACTLY_ONCE);

        // 读取Kafka ods层数据
        KafkaSource<String> kafkaSource = KafkaUtil.getKafkaSource(propertiesFile, topic, "dwd_locus_group_id");
        DataStreamSource<String> sourceDS = env.fromSource(kafkaSource, WatermarkStrategy.noWatermarks(), "dwd_locus_group_id");

        // 数据过滤
        SingleOutputStreamOperator<String> filterDS = sourceDS.filter(source -> "locus".equals(JSON.parseObject(source).getString("name")));

//        filterDS.print("filterDS >>>> ");
//        SingleOutputStreamOperator<String> filterDS = sourceDS.filter(new FilterFunction<String>() {
//            @Override
//            public boolean filter(String jsonStr) throws Exception {
//                // 从json str 转为json object
//                JSONObject jsonObj = JSONObject.parseObject(jsonStr);
//                String name = jsonObj.getString("name");
//                return "locus".equals(name);
//            }
//        });

        // 数据处理
        SingleOutputStreamOperator<JSONObject> mapDS = filterDS.map(new MapFunction<String, JSONObject>() {
            @Override
            public JSONObject map(String jsonStr) throws Exception {
                /*
                 * 原始数据：
                 *  {"op":"r","name":"locus","after":{"driver_id":"9","lng":"104.05345","time":"1475539314","order_id":"17592721831669","lat":"30.67478"},"ts_ms":1753688080876}
                 *  {"op":"r","name":"locus","after":{"driver_id":"end","order_id":"17592721831669"},"ts_ms":1753688080876}
                 */
                // 从json str 转为json object
                JSONObject jsonObj = JSONObject.parseObject(jsonStr);

                return jsonObj.getJSONObject("after");
            }
        });

        // TODO 标准

        // TODO 脱敏
        // {"driver_id":"9","lng":"104.05345","time":"1475539314","order_id":"17592721831669","lat":"30.67478"}

        // 退维(打宽)
        // 关联HBase -- 同步 Lookup方式
//        SingleOutputStreamOperator<String> dimMapDS = mapDS.map(new LookupSyncFunc("intct83", "dim_driver_info", "driver_id"));
//        // 输出数据 kafka
//        dimMapDS.sinkTo(KafkaUtil.getKafkaSink(propertiesFile, "dwd_locus"));

        // 关联维度表-- 异步 lookup + 旁路缓存(redis)
        // HBase与旁路缓存数据同步的问题？？？
        // mysql: driver_info  司机：李四 车牌：鲁B12345
        // hbase --> 司机：李四 车牌：鲁B12345
        // 同步redis key:李四  value: 鲁B12345
        // mysql: driver_info  司机：李四 车牌：鲁B54321
        // hbase --> 司机：李四 车牌：鲁B54321

        // 关联维表 + 旁路缓存(redis)

        /*
            解决：
                1 关联维表：redis -> hbase -> redis - > 返回
                    -> 1.1 同步维度表 发现变化 hbase redis 双写
                    --> 1.2 同步维度表 发现变化 hbase(修改) redis(删除)

        */
        SingleOutputStreamOperator<String> unorderedWait = AsyncDataStream.unorderedWait(mapDS,
                new LookupAsyncFunc1("intct83", "dim_driver_info", "driver_id"),60, TimeUnit.SECONDS, 1000);

//        SingleOutputStreamOperator<JSONObject> unorderedWait = AsyncDataStream.orderedWait(mapDS, new DimAsyncFunction<JSONObject>("intct83", "dim_driver_info", "driver_id"),10, TimeUnit.SECONDS, 1000);

        unorderedWait.print("unorderedWait >>> ");
        // 输出数据 kafka
//        unorderedWait.sinkTo(KafkaUtil.getKafkaSink(propertiesFile, "dwd_locus"));

        // 启动作业
        env.execute();
    }
}
