package net.bwie.realtime.jtp.dws.log.job;
import com.alibaba.fastjson.JSON;
import net.bwie.realtime.jtp.dws.log.fuction.PageViewBean;
import net.bwie.realtime.jtp.dws.log.fuction.PageViewBeanMapFunction;
import net.bwie.realtime.jtp.dws.log.fuction.PageViewWindowFunction;
import net.bwie.realtime.jtp.utils.DorisUtil;
import net.bwie.realtime.jtp.utils.KafkaUtil;
import org.apache.flink.api.common.eventtime.SerializableTimestampAssigner;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.datastream.KeyedStream;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.datastream.WindowedStream;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.windowing.assigners.TumblingEventTimeWindows;
import org.apache.flink.streaming.api.windowing.time.Time;
import org.apache.flink.streaming.api.windowing.windows.TimeWindow;
import java.time.Duration;

public class JtpTrafficPageViewMinuteWindowDwsJob {
    public static void main(String[] args) throws Exception {
        // 1.env执行环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);
        env.enableCheckpointing(5000L);
        // 2.消费kafka
        DataStream<String> pageLog = KafkaUtil.consumerKafka(env, "dwd-traffic-page-logs");
//        pageLog.print("Kafka");
        // 3.数据转换
        DataStream<String> resultStream = handle(pageLog);
        resultStream.print();
        // 4.数据输出
        DorisUtil.saveToDoris(
                resultStream, "jtp_realtime_report", "dws_traffic_page_view_window_report"
        );
        // 5.触发执行  数据好像就是不对 那再重新来 应该是哪又错了 应该执行f1吧 f1是启动啊 启动?
        env.execute("JtpTrafficPageViewMinuteWindowDwsJob");
    }

    private static DataStream<String> handle(DataStream<String> stream) {
        // 1.按照mid设备ID进行分组,用于计算UV,使用状态记录今日是否第一次访问
        KeyedStream<String, String> midStream = stream.keyBy(
                json -> JSON.parseObject(json).getJSONObject("common").getString("mid")
        );
        // 2.将流中每一条数据封装实体类Bean对象
        SingleOutputStreamOperator<PageViewBean> beanStream = midStream.map(new PageViewBeanMapFunction());
        // 3.事件时间字段和水位线
        SingleOutputStreamOperator<PageViewBean> timeStream = beanStream.assignTimestampsAndWatermarks(
                WatermarkStrategy
                        //允许乱序数据最大时间,如果是0,不考虑乱序迟到数据
                        .<PageViewBean>forBoundedOutOfOrderness(Duration.ofSeconds(0))
                        .withTimestampAssigner(new SerializableTimestampAssigner<PageViewBean>() {
                            @Override
                            public long extractTimestamp(PageViewBean element, long recordTimestamp) {
                                return element.getTs();
                            }
                        })
        );
        // 4.分组KeyBy:ar地区,ba品牌,ch渠道,is_new新老访客
        KeyedStream<PageViewBean, String> keyedStream = timeStream.keyBy(
                bean -> bean.getBrand() + "," + bean.getChannel() + "," + bean.getProvince() + "," + bean.getIsNew()
        );
        // 5.窗口:滚动窗口,窗口大小为一分钟
        WindowedStream<PageViewBean, String, TimeWindow> windowStream = keyedStream.window(
                TumblingEventTimeWindows.of(Time.minutes(1))
        );
        // 6.聚合:对窗口中数据计算
        SingleOutputStreamOperator<String> reportStream = windowStream.apply(new PageViewWindowFunction());
        return reportStream;
    }
}
