package net.bwie.realtime.jtp.dws.log.job;

import com.alibaba.fastjson.JSON;
import net.bwie.realtime.jtp.dws.log.bean.PageViewBean;
import net.bwie.realtime.jtp.dws.log.function.PageViewBeanMapFunction;
import net.bwie.realtime.jtp.dws.log.function.PageViewWindowFunction;
import net.bwie.realtime.jtp.utils.DorisUtil;
import net.bwie.realtime.jtp.utils.KafkaUtil;
import org.apache.flink.api.common.eventtime.SerializableTimestampAssigner;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.datastream.KeyedStream;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.datastream.WindowedStream;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.windowing.assigners.TumblingEventTimeWindows;
import org.apache.flink.streaming.api.windowing.time.Time;
import org.apache.flink.streaming.api.windowing.windows.TimeWindow;

import java.time.Duration;

/**
 * 当日APP流量日志数据进行实时汇总统计，按照分钟级别窗口汇总计算
 *     粒度：ar地区、ba品牌、ch渠道、is_new新老访客
 *     指标：pv(页面浏览数)、浏览总时长、uv(唯一访客数)、sv(会话数)
 */

public class JtpTrafficPageViewMinuteWindowDwsJob {
    public static void main(String[] args) throws Exception {
        //1-执行环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);
        env.enableCheckpointing(3000L);

        //2-数据源-source
        DataStream<String> pageStream = KafkaUtil.consumerKafka(env, "dwd-traffic-page-log");
//      pageStream.print("page======>>>");

        //3-数据转换
        DataStream<String> resultStream = handle(pageStream);
        resultStream.print("result============>>>");
        //2025-08-19 08:40:00,2025-08-19 08:41:00,2025-08-19,vivo,xiaomi,北京,1,8,8,56,571998

        //4-数据输出
        DorisUtil.saveToDoris(
            resultStream,"jtp_realtime_report","dws_traffic_page_view_window_report"
        );
//      KafkaUtil.producerKafka(resultStream,"test11");

        //5-触发执行
        env.execute("JtpTrafficPageViewMinuteWindowDwsJob");

    }

    /**
     * DWS汇总层，对页面浏览日志数据进行汇总计算，其中分组keyby,窗口window计算
     * @param stream
     * @return
     */
    private static DataStream<String> handle(DataStream<String> stream) {
        //s1-按照mid设备ID分组，用于计算uv，使用状态state记录今日是否是第一次访问
        KeyedStream<String, String> midStream = stream.keyBy(
                json -> JSON.parseObject(json).getJSONObject("common").getString("mid")
        );
        //s2-将流中每条日志数据封装实体类Bean对象
        DataStream<PageViewBean> beanStream = midStream.map(new PageViewBeanMapFunction());
        //s3-事件时间字段和水位线
        SingleOutputStreamOperator<PageViewBean> timeStream = beanStream.assignTimestampsAndWatermarks(
                WatermarkStrategy
                        .<PageViewBean>forBoundedOutOfOrderness(Duration.ofSeconds(0))
                        .withTimestampAssigner(
                                new SerializableTimestampAssigner<PageViewBean>() {
                                    @Override
                                    public long extractTimestamp(PageViewBean pageViewBean, long l) {
                                        return pageViewBean.getTs();
                                    }
                                })
        );

        //s4-分组keyby:ar地区、ba品牌、ch渠道、is_new新老访客
        KeyedStream<PageViewBean, String> keyedStream = timeStream.keyBy(
                bean -> bean.getBrand() + "," + bean.getChannel() + "," + bean.getProvince() + "," + bean.getIsNew()
        );
        //s5-开窗：滚动窗口，滚动窗口大小为1分钟
        WindowedStream<PageViewBean, String, TimeWindow> windowStream = keyedStream.window(
                TumblingEventTimeWindows.of(Time.minutes(1))
        );
        //s6-聚合：对窗口中的数据计算
        SingleOutputStreamOperator<String> reportStream = windowStream.apply(new PageViewWindowFunction());

        //s7-返回结果
        return reportStream;
    }
}
