package net.bwie.realtime.jtp.dws.log.job;

import com.alibaba.fastjson.JSON;
import net.bwie.realtime.jtp.common.utils.JdbcUtil;
import net.bwie.realtime.jtp.common.utils.KafkaUtil;
import net.bwie.realtime.jtp.dws.log.bean.PageViewBean;
import net.bwie.realtime.jtp.dws.log.function.PageViewBeanMapFunction;
import net.bwie.realtime.jtp.dws.log.function.PageViewWindomFunction;
import org.apache.flink.api.common.eventtime.SerializableTimestampAssigner;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.datastream.KeyedStream;
import org.apache.flink.streaming.api.datastream.WindowedStream;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.windowing.assigners.TumblingEventTimeWindows;
import org.apache.flink.streaming.api.windowing.time.Time;
import org.apache.flink.streaming.api.windowing.windows.TimeWindow;

import java.time.Duration;

/**
 * @Author: FuHe
 * @Date: 2025/5/19
 */
public class JtpLogPageViewMinuteWindowDwsJob {
    public static void main(String[] args) throws Exception {
        // 执行环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);
        // 数据源
        DataStream<String> pageStream = KafkaUtil.consumerKafka(env, "dwd-traffic-page-log");
//        pageStream.print();
        // 数据转换
        DataStream<String> resultStream = handle(pageStream);
//        resultStream.print();
        // 数据输出
        JdbcUtil.sinkToClickhouseUpsert(
                resultStream, "INSERT INTO jtp_log_report.dws_log_page_view_window_report(\n" +
                        "    window_start_time, window_end_time,\n" +
                        "    brand, channel, province, is_new,\n" +
                        "    pv_count, pv_during_time, uv_count, session_count,\n" +
                        "    ts)\n" +
                        "VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)"
        );
        // 触发执行
        env.execute("JtpLogPageViewMinuteWindowDwsJob");
    }

    /**
     * 对页面浏览日志数据进行汇总处理
     */
    private static DataStream<String> handle(DataStream<String> pageStream) {
        // 1.按照设备ID分组，计算UV，使用状态变量State记录今日是否第一次访问
        KeyedStream<String, String> midStream = pageStream.keyBy(json -> JSON.parseObject(json).getJSONObject("common").getString("mid"));
        // 2.将流中每条日志数据封装实体类Bean对象
        DataStream<PageViewBean> beanStream = midStream.map(new PageViewBeanMapFunction());
        // 3.水位线
        DataStream<PageViewBean> timeStream = beanStream.assignTimestampsAndWatermarks(WatermarkStrategy.
                <PageViewBean>forBoundedOutOfOrderness(Duration.ofSeconds(0))
                .withTimestampAssigner(
                        new SerializableTimestampAssigner<PageViewBean>() {
                            @Override
                            public long extractTimestamp(PageViewBean element, long recordTimestamp) {
                                return element.getTs();
                            }
                        }
                ));
        // 4.根据 ar省份、ba品牌、ch渠道、is_new是否是新用户进行分组
        KeyedStream<PageViewBean, String> keyedStream = timeStream.keyBy(
                bean -> bean.getBrand() + "," + bean.getChannel() + "," + bean.getProvince() + "," + bean.getIsNew()
        );
        // 5.开窗    滚动窗口大小为1分钟
        WindowedStream<PageViewBean, String, TimeWindow> windowStream = keyedStream.window(
                TumblingEventTimeWindows.of(Time.minutes(1))
        );
        // 6.聚合  对窗口数据计算
        DataStream<String> reportStream = windowStream.apply(new PageViewWindomFunction());
        return reportStream;
    }


}
