package com.intct.dwd;

import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import com.intct.hbase.bean.LocusBean;
import com.intct.func.DwdLocusProcessFunc;
import com.intct.util.HBaseUtil;
import com.intct.util.KafkaUtil;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.configuration.RestOptions;
import org.apache.flink.connector.kafka.sink.KafkaSink;
import org.apache.flink.connector.kafka.source.KafkaSource;
import org.apache.flink.streaming.api.CheckpointingMode;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.util.Bytes;

/**
 * @author gufg
 * @since 2025-07-04 09:22
 * 处理车辆轨迹数据
 */
public class DwdLocusApp {
    public static void main(String[] args) throws Exception {
        // 配置属性
        Configuration conf = new Configuration();
        conf.set(RestOptions.BIND_PORT, "8081");

        // 创建环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(conf);
        env.setParallelism(1);

        // 启检查点
        env.enableCheckpointing(5000L, CheckpointingMode.EXACTLY_ONCE);

        // 读取ODS(Kafka topic: ods_locus)数据
        KafkaSource<String> kafkaSource = KafkaUtil.getKafkaSource("ods_locus", "ods_locus_group_id");
        DataStreamSource<String> odsLocusDS = env.fromSource(kafkaSource, WatermarkStrategy.noWatermarks(), "ods_locus_job");

        // 数据筛选、过滤
        SingleOutputStreamOperator<String> filterDS = odsLocusDS.filter(f -> "locus".equals(JSONObject.parseObject(f).getString("name")));

        // 数据转换
        SingleOutputStreamOperator<LocusBean> processDS = filterDS.process(new DwdLocusProcessFunc());

        // 退维(关联DIM) （宽表）
        SingleOutputStreamOperator<String> mapDS = processDS.map(new MapFunction<LocusBean, String>() {
            @Override
            public String map(LocusBean locusBean) throws Exception {
                // HBase ==> HDFS(磁盘)  ==> redis(内存)
                // 司机
                String tableName = "dim_driver_info";
                String rowKey = locusBean.getDriverId();
                // 查询HBase
                Result result = HBaseUtil.getRowKey(tableName, rowKey);
                String driverName = "";

                Cell[] cells = result.rawCells();
                for (Cell cell : cells) {
                    String column = Bytes.toString(CellUtil.cloneQualifier(cell));
                    if ("driver_name".equals(column)) {
                        driverName = Bytes.toString(CellUtil.cloneValue(cell));
                    }
                }

                locusBean.setDriverName(driverName);

                return JSON.toJSONString(locusBean);
            }
        });

        // TODO 写入到Kafka
        KafkaSink<String> kafkaSink = KafkaUtil.getKafkaSink("dwd_locus", "dwd_locus_job");
        mapDS.sinkTo(kafkaSink);

        // 启动作业
        env.execute("dwd_locus_job");
    }
}
