package com.intct.dws;

import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import com.intct.hbase.bean.AreaBean;
import com.intct.hbase.bean.VehicleBean;
import com.intct.common.Constant;
import com.intct.util.HBaseUtil;
import com.intct.util.KafkaUtil;
import org.apache.commons.lang3.time.DateFormatUtils;
import org.apache.flink.api.common.eventtime.SerializableTimestampAssigner;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.common.typeinfo.Types;
import org.apache.flink.connector.kafka.source.KafkaSource;
import org.apache.flink.streaming.api.CheckpointingMode;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.KeyedStream;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.windowing.WindowFunction;
import org.apache.flink.streaming.api.windowing.assigners.TumblingEventTimeWindows;
import org.apache.flink.streaming.api.windowing.time.Time;
import org.apache.flink.streaming.api.windowing.windows.TimeWindow;
import org.apache.flink.util.Collector;
import org.apache.flink.util.OutputTag;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.util.Bytes;

import java.util.HashSet;
import java.util.Set;

/**
 * @author gufg
 * @since 2025-07-30 09:25
 *
 *
 */
public class DwsVehicleApp {
    public static void main(String[] args) throws Exception {
        // 配置环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        // 设置并行度
        env.setParallelism(1);
        // 启动检查点
        env.enableCheckpointing(5000L, CheckpointingMode.EXACTLY_ONCE);

        // 配置源
        KafkaSource<String> kafkaSource = KafkaUtil.getKafkaSource(Constant.KAFKA_DWD_VEHICLE_TOPIC, "dws-vehicle-group-id");
        DataStreamSource<String> sourceDS = env.fromSource(kafkaSource, WatermarkStrategy.noWatermarks(), "dws-vehicle-source-name");

        // 将JSONString转为JavaBean
        SingleOutputStreamOperator<VehicleBean> mapDS = sourceDS.map(value -> JSONObject.parseObject(value, VehicleBean.class));
        /*
            {
              "actionTime": 1753778152,
              "areaId": "110119",
              "driverId": "727",
              "carShelfNumber": "Y260038928D",
              "car": "399",
              "type": "0",
              "speed": 58
            }
         */

        mapDS.print();
        // 定义watermark
        SingleOutputStreamOperator<VehicleBean> vehicleDS = mapDS.assignTimestampsAndWatermarks(
                WatermarkStrategy
                        // 升序watermark，没有等时间
                        .<VehicleBean>forMonotonousTimestamps()
                        // 乱序，需要传入等时间
                        //.forBoundedOutOfOrderness()
                        // 指定事件时间，从数据中提取
                        .withTimestampAssigner(
                                new SerializableTimestampAssigner<VehicleBean>() {
                                    @Override
                                    public long extractTimestamp(VehicleBean vehicleBean, long recordTimestamp) {
                                        // 采用数据中的actionTime字段为事件时间
                                        return vehicleBean.getActionTime() * 1000;
                                    }
                                }
                        )
        );

        // 按照区域进行分组
        KeyedStream<VehicleBean, String> vehicleKeyedStream = vehicleDS.keyBy(value -> value.getAreaId());

        // 定义侧输出流
        OutputTag<VehicleBean> vehicleBeanOutputTag = new OutputTag<VehicleBean>("vehicle_output", Types.POJO(VehicleBean.class));

        // 开窗
        /*
            window与 windowAll区别：
            window：多并行度，使用在KeyedStream。效率：高
            windowAll：1个并行度，使用在DataStream。效率：低
         */
        SingleOutputStreamOperator<AreaBean> areaStream = vehicleKeyedStream
                .window(
                        // 滚动 - 事件时间
                        TumblingEventTimeWindows.of(Time.minutes(1))
                )
                // 推迟窗口关闭时间 2秒
                .allowedLateness(Time.seconds(2))
                // 关闭窗口后，到达数据，保存到侧输出流
                .sideOutputLateData(vehicleBeanOutputTag)
                .apply(new WindowFunction<VehicleBean, AreaBean, String, TimeWindow>() {
                    @Override
                    public void apply(String areaId, TimeWindow window, Iterable<VehicleBean> input, Collector<AreaBean> out) throws Exception {

                        // 获取窗口开始时间和结束时间
                        String start = DateFormatUtils.format(window.getStart(), "yyyy-MM-dd HH:mm:ss");
                        String end = DateFormatUtils.format(window.getEnd(), "yyyy-MM-dd HH:mm:ss");

                        // 声明Set集合，主要采用Set集合中元素不重复特性
                        Set<String> carSet = new HashSet<>();

                        // 5分钟窗口 城阳区上马街道  车数理：10 50 100
                        for (VehicleBean vehicleBean : input) {
                            carSet.add(vehicleBean.getCar());
                        }

                        // 包装成AreaBean
                        AreaBean areaBean = AreaBean.builder()
                                .areaId(areaId)
                                .carCount(carSet.size())
                                .windowStart(start)
                                .windowEnd(end)
                                .build();

                        out.collect(areaBean);
                    }
                });
        // 4、退维 (HBase实时读写访问、广播状态、Redis)
        SingleOutputStreamOperator<String> mapDimDS = areaStream.map(new MapFunction<AreaBean, String>() {
            @Override
            public String map(AreaBean areaBean) throws Exception {
                // 将JavaBean转为JSONObject
//                String jsonStr = JSON.toJSONString(areaBean);
//                JSONObject jsonObj = JSONObject.parseObject(jsonStr);
                JSONObject jsonObj = (JSONObject)JSON.toJSON(areaBean);

                // 连接HBase查询
                // scan get : get 'intct82:t1', '1001'
                String namepsace = "intct82";
                String tableName = "dim_area_info";
                String rowkey = areaBean.getAreaId();

                Result result = HBaseUtil.getRowkey(namepsace, tableName, rowkey);
                Cell[] cells = result.rawCells();

                for (Cell cell : cells) {
                    String colName = Bytes.toString(CellUtil.cloneQualifier(cell));
                    String value = Bytes.toString(CellUtil.cloneValue(cell));
                    jsonObj.put(colName, value);
                }

                //  返回JSONString
                /*
                    {
                      "actionTime": 1753778152,
                      "areaId": "110119",
                      "driverId": "727",
                      "carShelfNumber": "Y260038928D",
                      "car": "399",
                      "type": "0",
                      "speed": 58,
                      "name": "北京市延庆区"
                    }
                 */

                return jsonObj.toJSONString();
            }
        });

        // 5、写出数据HBase

        // 启动作业
        env.execute();
    }
}
