package net.bwie.realtime.jtp.dws.log2.job2;

import com.alibaba.fastjson.JSON;
import net.bwie.realtime.jtp.dws.log2.bean2.PageViewBean2;
import net.bwie.realtime.jtp.dws.log2.function2.PageViewBeanMapFunction2;
import net.bwie.realtime.jtp.dws.log2.function2.PageViewReportReduceFunction2;
import net.bwie.realtime.jtp.dws.log2.function2.PageViewWindowFunction2;
import net.bwie.realtime.jtp.utils.DorisUtil;
import net.bwie.realtime.jtp.utils.KafkaUtil;
import org.apache.flink.api.common.eventtime.SerializableTimestampAssigner;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.datastream.KeyedStream;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.datastream.WindowedStream;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.windowing.assigners.TumblingEventTimeWindows;
import org.apache.flink.streaming.api.windowing.time.Time;
import org.apache.flink.streaming.api.windowing.windows.TimeWindow;

import java.time.Duration;

public class JtpTrafficPageViewMinuteWindowDwsJob2 {
    public static void main(String[] args) throws Exception {
        //1.执行环境-env
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);
        env.enableCheckpointing(3000L);

        //2.数据源-source
        DataStream<String> pageStream = KafkaUtil.consumerKafka(env, "dwd-traffic-page-log");
        pageStream.print("page");

        //3.数据转换-transformation
        DataStream<String> resultStream = handle(pageStream);
        resultStream.print("resultStream");

        //4.数据输出-sink
        DorisUtil.saveToDoris(
                resultStream,"jtp_realtime_report","dws_traffic_page_view_window_report"
        );

        //5.触发执行-execute
        env.execute("JtpTrafficPageViewMinuteWindowDwsJob2");
    }

    private static DataStream<String> handle(DataStream<String> stream) {
        //1.按照mid设备ID分组，用于计算UV,使用状态State记录今日是否第一次访问
        KeyedStream<String, String> midStream = stream.keyBy(
                json -> JSON.parseObject(json).getJSONObject("common").getString("mid")
        );

        //2.将流中每条日志数据封装实体类Bean对象
        DataStream<PageViewBean2> beanStream = midStream.map(new PageViewBeanMapFunction2());


        //3.事件时间字段和水位线
        DataStream<PageViewBean2> timeStream = beanStream.assignTimestampsAndWatermarks(
                WatermarkStrategy.<PageViewBean2>forBoundedOutOfOrderness(Duration.ofSeconds(0))
                        .withTimestampAssigner(
                                new SerializableTimestampAssigner<PageViewBean2>() {
                                    @Override
                                    public long extractTimestamp(PageViewBean2 element, long recordTimestamp) {
                                        return element.getTs();
                                    }
                                }
                        )
        );

        //4.分组keyBy:ar地区，ba品牌，ch渠道，is_new新老访客
        KeyedStream<PageViewBean2, String> keyedStream = timeStream.keyBy(
                bean -> bean.getBrand() + "," + bean.getChannel() + "," + bean.getProvince() + "," + bean.getIsNew()
        );

        //5.开窗：滚动窗口，滚动窗口大小1分钟
        WindowedStream<PageViewBean2, String, TimeWindow> windowStream = keyedStream.window(
                TumblingEventTimeWindows.of(Time.minutes(1))
        );

        //6.聚合：对窗口中数据计算
//        SingleOutputStreamOperator<String> reportStream = windowStream.apply(new PageViewWindowFunction2());

        SingleOutputStreamOperator<String> resultStream=windowStream.reduce(
                new PageViewReportReduceFunction2(),new PageViewWindowFunction2()
        );

        //7.返回结果
        return resultStream;

    }
}
