package org.example.realtime.jtp.dws.log.job;

import com.alibaba.fastjson.JSON;
import org.apache.flink.api.common.eventtime.SerializableTimestampAssigner;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.datastream.KeyedStream;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.datastream.WindowedStream;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.windowing.assigners.TumblingEventTimeWindows;
import org.apache.flink.streaming.api.windowing.time.Time;
import org.apache.flink.streaming.api.windowing.windows.TimeWindow;
import org.example.realtime.jtp.common.utils.JdbcUtil;
import org.example.realtime.jtp.common.utils.KafkaUtil;
import org.example.realtime.jtp.dws.log.entity.PageViewBean;
import org.example.realtime.jtp.dws.log.function.PageViewBeanMapFunction;
import org.example.realtime.jtp.dws.log.function.PageViewWindowFunction;

import java.time.Duration;

public class JtpTrafficPageViewMinuteWindowDwsJob {
    public static void main(String[] args) throws Exception {
        //执行环境-env
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);

        //数据源-source
        DataStream<String> pageStream = KafkaUtil.consumerKafka(env, "dwd-traffic-page-log");
        //pageStream.print("pageStream");

        //数据转换-transformation
        DataStream<String> resultStream=handel(pageStream);
        //数据输出-sink
        JdbcUtil.sinkToClickhouseUpsert(
                resultStream,
                "INSERT INTO jtp_log_report.dws_log_page_view_window_report(\n" +
                        "    window_start_time, window_end_time,\n" +
                        "    brand, channel, province, is_new,\n" +
                        "    pv_count, pv_during_time, uv_count, session_count,\n" +
                        "    ts\n" +
                        ")\n" +
                        "VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?);"
        );
        //触发执行-execute
        env.execute("JtpTrafficPageViewMinuteWindowDwsJob");
    }

    /**
    对页面浏览日志数据进行汇总计算
     */
    private static DataStream<String> handel(DataStream<String> pageStream) {
        //按照mid设备ID分组，用以计算UV，使用状态State记录今日是否第一次访问
        KeyedStream<String, String> midStream = pageStream.keyBy(
                json -> JSON.parseObject(json).getJSONObject("common").getString("mid")
        );

        //将流中每条日志数据封装实体类Bean对象
        DataStream<PageViewBean> beanStream = midStream.map(new PageViewBeanMapFunction());
//        beanStream.print();
        //事件事件字段和水位线
        DataStream<PageViewBean> timeStream = beanStream.assignTimestampsAndWatermarks(
                WatermarkStrategy.<PageViewBean>forBoundedOutOfOrderness(Duration.ofSeconds(0))
                        .withTimestampAssigner(
                                new SerializableTimestampAssigner<PageViewBean>() {
                                    @Override
                                    public long extractTimestamp(PageViewBean element, long recordTimestamp) {
                                        return element.getTs();
                                    }
                                }
                        )
        );
//        midStream.print();
        //分组keyby ar地区 ba品牌 ch渠道 is_new是否新用户
        KeyedStream<PageViewBean, String> keyedStream = timeStream.keyBy(
                bean -> bean.getBrand() + "," + bean.getChannel() + "," + bean.getProvince() + "," + bean.getIsNew()
        );
//        keyedStream.print();
        //开窗 滚动窗口 滚动窗口大小为1分钟
        WindowedStream<PageViewBean, String, TimeWindow> window = keyedStream.window(TumblingEventTimeWindows.of(Time.minutes(1)));


        SingleOutputStreamOperator<String> reportStream = window.apply(new PageViewWindowFunction());

        return reportStream;
    }
}