package net.bwie.realtime.jtp.dws.log.job;

import com.alibaba.fastjson.JSON;
import net.bwie.realtime.jtp.common.utils.JdbcUtil;
import net.bwie.realtime.jtp.common.utils.KafkaUtil;
import net.bwie.realtime.jtp.dws.log.been.PageViewBean;
import net.bwie.realtime.jtp.dws.log.function.PageViewBeanMapFunction;
import net.bwie.realtime.jtp.dws.log.function.PageViewReduceFunction;
import net.bwie.realtime.jtp.dws.log.function.PageViewWindowFunction;
import org.apache.flink.api.common.eventtime.SerializableTimestampAssigner;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.datastream.KeyedStream;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.datastream.WindowedStream;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.windowing.assigners.TumblingEventTimeWindows;
import org.apache.flink.streaming.api.windowing.time.Time;
import org.apache.flink.streaming.api.windowing.windows.TimeWindow;

import java.time.Duration;

public class JtpTrafficPageViewMinuteWindowDwsJob {
    public static void main(String[] args) throws Exception {
        // 定义流执行环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        // 设置并行度
        env.setParallelism(1);
        // kafka读取page数据
        DataStream<String> pageStream = KafkaUtil.consumerKafka(env, "dwd-traffic-page-log");
        // 打印page数据
        // pageStream.print("pageStream");
        // 数据转换
        DataStream<String> requestStream = handle(pageStream);
        //  requestStream.print("requestStream");
        // 结果写入clickhouse
        JdbcUtil.sinkToClickhouseUpsert(
                requestStream,
                "INSERT INTO jtp_log_report.dws_traffic_page_view_window_report(\n" +
                        "    window_start_time, window_end_time,\n" +
                        "    brand, channel,province,is_new,\n" +
                        "    session_count,pv_count, pv_during_time, uv_count,\n" +
                        "    ts\n" +
                        ")\n" +
                        "VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)"
                );
        // 执行job
        env.execute("JtpTrafficPageViewMinuteWindowDwsJob");
    }

    private static DataStream<String> handle(DataStream<String> pageStream) {
        // 根据mid分组
        KeyedStream<String, String> pageKeyStream = pageStream.keyBy(
                t -> JSON.parseObject(t).getJSONObject("common").getString("mid")
        );
        // 转换为PageViewBean
        SingleOutputStreamOperator<PageViewBean> beanStream = pageKeyStream.map(
                new PageViewBeanMapFunction()
        );
        // 设置水位线
        SingleOutputStreamOperator<PageViewBean> watermarks = beanStream.assignTimestampsAndWatermarks(
                WatermarkStrategy.<PageViewBean>forBoundedOutOfOrderness(Duration.ofSeconds(0))
                        .withTimestampAssigner(new SerializableTimestampAssigner<PageViewBean>() {
                            @Override
                            public long extractTimestamp(PageViewBean element, long recordTimestamp) {
                                return element.getTs();
                            }
                        })
        );
        // 根据维度分组
        KeyedStream<PageViewBean, String> keyedStream = watermarks.keyBy(
                t -> t.getBrand() + "," + t.getChannel() + "," + t.getProvince() + "," + t.getIsNew()
        );
        // 聚合指标6
        WindowedStream<PageViewBean, String, TimeWindow> window = keyedStream.window(
                TumblingEventTimeWindows.of(Time.minutes(1))
        );
        // return window.apply(new PageViewWindowFunction());
        return window.reduce(new PageViewReduceFunction(),new PageViewWindowFunction());
    }

}