package com.bw.wjw.job;

import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;

import com.bw.wjw.JdbcUtil;
import com.bw.wjw.KafkaUtil;
import com.bw.wjw.bean.PageViewBean;
import com.bw.wjw.function.PageViewBeanMapFunction;
import com.bw.wjw.function.PageViewReportReduceFunction;
import com.bw.wjw.function.PageViewWindowFunction;
import org.apache.flink.api.common.eventtime.SerializableTimestampAssigner;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.datastream.KeyedStream;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.datastream.WindowedStream;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.windowing.assigners.TumblingEventTimeWindows;
import org.apache.flink.streaming.api.windowing.time.Time;
import org.apache.flink.streaming.api.windowing.windows.TimeWindow;
import java.time.Duration;


public class JtpLogPageViewMinuteWindowDwsJob {
    public static void main(String[] args) throws Exception {

        // 1. 执行环境-env
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);

        // 2. 数据源-source
        DataStream<String> kafkaStream = KafkaUtil.consumerKafka(env, "dwd-log-page-view-log");
        // kafkaStream.print();

        // 3. 数据转换-transformation
        DataStream<String> reportStream = handle(kafkaStream);

        // 4. 数据接收器-sink
        JdbcUtil.sinkToClickhouseUpsert(
                reportStream,
                "INSERT INTO jtp_log_report.dws_log_page_view_window_report(\n" +
                        "    window_start_time, window_end_time,\n" +
                        "    province, brand, channel,is_new,\n" +
                        "    pv_count, pv_during_time, uv_count, session_count,\n" +
                        "    ts\n" +
                        ")\n" +
                        "VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)"
        );

        // 5. 触发执行-execute
        env.execute("JtpLogPageViewMinuteWindowDwsJob");
    }


    /**
     * 对页面浏览日志数据进行汇总计算
     */
    private static DataStream<String> handle(DataStream<String> kafkaStream) {
        // s1-对JSON数据解析：jsonStr -> jsonObject 方便后续使用
        SingleOutputStreamOperator<JSONObject> jsonObjectStream = kafkaStream.map(JSON::parseObject);
//         jsonObjectStream.print();

        // s2-按照mid对日志数据进行分组：计算UV
        KeyedStream<JSONObject, String> midStream = jsonObjectStream.keyBy(
                jsonObject -> jsonObject.getJSONObject("common").getString("mid")
        );
//         midStream.print("mid");

        // s3-对分组流中数据进行类型转换
        SingleOutputStreamOperator<PageViewBean> beanStream = midStream.map(new PageViewBeanMapFunction());
//         beanStream.print("bean");

        // s4-分配数据中事件时间字段及指定Watermark水位线
        SingleOutputStreamOperator<PageViewBean> timeStream = beanStream.assignTimestampsAndWatermarks(
                WatermarkStrategy
                        .<PageViewBean>forBoundedOutOfOrderness(Duration.ofSeconds(0))
                        .withTimestampAssigner(new SerializableTimestampAssigner<PageViewBean>() {
                            @Override
                            public long extractTimestamp(PageViewBean element, long recordTimestamp) {
                                return element.getTs();
                            }
                        })
        );

        // s5-指定窗口计算时分钟字段
        KeyedStream<PageViewBean, String> keyedStream = timeStream.keyBy(
                bean -> bean.getProvince() + "," + bean.getBrand() + "," + bean.getChannel() + "," + bean.getIsNew()
        );

        // s6-设置窗口：分钟级别窗口
        WindowedStream<PageViewBean, String, TimeWindow> windowStream = keyedStream.window(
                TumblingEventTimeWindows.of(Time.minutes(1))
        );

//         s7-窗口数据聚合计算
//        SingleOutputStreamOperator<String> resultStream = windowStream.apply(new PageViewWindowFunction());
//        // resultStream.print("result");

        SingleOutputStreamOperator<String> resultStream =
                windowStream.reduce(new PageViewReportReduceFunction(),new PageViewWindowFunction());

        // 将结果返回
        return resultStream;
    }
}


