package com.intct.dws;

import com.alibaba.fastjson.JSONObject;
import com.intct.hbase.bean.LocusRedisBean;
import com.intct.common.Constant;
import com.intct.func.LocusSinkHBase;
import com.intct.util.KafkaUtil;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.connector.kafka.source.KafkaSource;
import org.apache.flink.streaming.api.CheckpointingMode;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;

import java.util.Arrays;

/**
 * @author gufg
 * @since 2025-07-28 15:46
 * 1、样例数据：
 * {
 *   "driverId": "11",
 *   "lat": "30.70324",
 *   "lng": "104.05355",
 *   "orderId": "17592719043682",
 *   "time": 1475976808
 * }
 *
 * {
 *   "driverId": "end",
 *   "orderId": "17592719043682",
 *   "time": 0
 * }
 *
 * 2、数据的乱序：
 * 2.1、正常数据顺序
 * orderId              time                    lng             lat
 * 17592719043682       17592719043682          104.05355       30.70324
 * 17592719043682       17592719043683          104.05356       30.70325
 * 17592719043682       17592719043684          104.05357       30.70327
 * 17592719043682       17592719043685          104.05358       30.70328
 *
 * 2.2、数据乱序
 * orderId              time                    lng             lat            topic(parttion)
 * 17592719043682       17592719043685          104.05358       30.70328        0
 * 17592719043682       17592719043682          104.05355       30.70324        1
 * 17592719043682       17592719043684          104.05357       30.70327        2
 * 17592719043682       17592719043683          104.05356       30.70325        1
 *
 * 2.3 解决数据乱序问题的方案：
 *      1、Flink：水位线
 *      2、Redis：有序集合
 *          正常订单数据：zadd 键(订单ID) 分数(时间戳)  值(lng+":"+lat)
 *          订单结束标识：zadd 键(订单ID) 分数(9999999999)  值("end:end")
 * 2.4 将数据写HBase
 *      0、宽表(退维)
 *      1、rowkey : orderID
 *      2、表名：intct82:ads_locus
 *      3、列族：order_info
 *      4、列名: JSON
 *      5、值：JSON
 *
 */
public class DwsLocusApp {
    public static void main(String[] args) throws Exception {
        // 创建环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);
        env.enableCheckpointing(5000L, CheckpointingMode.EXACTLY_ONCE);
        
        // 读取轨迹(kafka -- dwd)数据
        KafkaSource<String> kafkaSource = KafkaUtil.getKafkaSource(Constant.KAFKA_DWD_LOCUS_TOPIC, "dws_locus_group_id");
        DataStreamSource<String> sourceDS = env.fromSource(kafkaSource, WatermarkStrategy.noWatermarks(), "dws_locus_source");

        // 处理数据
        SingleOutputStreamOperator<LocusRedisBean> redisMapDS = sourceDS.map(new MapFunction<String, LocusRedisBean>() {
            @Override
            public LocusRedisBean map(String jsonStr) throws Exception {
            /*
            样例数据：
              {
                "driverId": "11",
                "lat": "30.70324",
                "lng": "104.05355",
                "orderId": "17592719043682",
                "time": 1475976808
              }

              {
                "driverId": "end",
                "orderId": "17592719043682",
                "time": 0
              }
             */
                // 转换JSONObject
                JSONObject jsonObj = JSONObject.parseObject(jsonStr);

                String driverId = jsonObj.getString("driverId");
                String orderId = jsonObj.getString("orderId");
                String value = "end:end";
                long score = 9999999999L;

                // 某个订单正数据--未结束订单
                if (!"end".equals(driverId)) {
                    value = jsonObj.getString("lng") + ":" + jsonObj.getString("lat");
                    score = jsonObj.getLong("time");
                }

                return LocusRedisBean.builder()
                        .orderId(orderId)
                        .score(score)
                        .value(value)
                        .build();
            }
        });

        // 输出到Redis(自定义Sink)
        // redisMapDS.addSink(new RedisSinkFunction());

        // 输出到HBase
        sourceDS.addSink(new LocusSinkHBase("intct82", "dws_locus", Arrays.asList("orderId", "time")));

        // 启动作业 
        env.execute();
    }
}













