package net.bwie.realtime.jtp.dws.log.job;


import com.alibaba.fastjson.JSON;

import net.bwie.realtime.jtp.common.utils.JdbcUtil;
import net.bwie.realtime.jtp.common.utils.KafkaUtil;
import net.bwie.realtime.jtp.dws.log.bean.PageViewBean;
import net.bwie.realtime.jtp.dws.log.function.PageViewBeanMapFunction;
import net.bwie.realtime.jtp.dws.log.function.PageViewWindowFunction;
import org.apache.flink.api.common.eventtime.SerializableTimestampAssigner;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.datastream.KeyedStream;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.datastream.WindowedStream;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.windowing.assigners.TumblingEventTimeWindows;
import org.apache.flink.streaming.api.windowing.time.Time;
import org.apache.flink.streaming.api.windowing.windows.TimeWindow;

import java.time.Duration;

/**
 * 当日APP流量日志数据进行实时汇总统计，按照分钟级别窗口汇总计算
 */

public class JtpTrafficPageViewMinuteWindowDwsJob {
    public static void main(String[] args)throws Exception {
        //创建执行环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        //设置并行度为1
        env.setParallelism(1);
        //从kafka中读取数据
        DataStream<String> kafkaStream = KafkaUtil.consumerKafka(env, "dwd-traffic-page-log");
        DataStream<String> reducedStream=handle(kafkaStream);

        //输出到clickhouse
        JdbcUtil.sinkToClickhouseUpsert(reducedStream,
                "INSERT INTO jtp_log_report.dws_traffic_page_view_window_report(\n" +
                        " window_start_time,window_end_time,\n" +
                        " brand, channel, province, is_new, pv_count, pv_during_time,\n" +
                        " uv_count, session_count, ts) VALUES\n" +
                        " (?,?,?,?,?,?,?,?,?,?,?)");

        reducedStream.print("reducedStream");
        env.execute("JtpLogPageViewMinuteWindowDwsJob");


    }

    private static DataStream<String> handle(DataStream<String> kafkaStream) {
        //1.将数据转换成JSONObject
        KeyedStream<String, String> midStream = kafkaStream.keyBy(
                json -> JSON.parseObject(json).getJSONObject("common").getString("mid")
        );
        //2.将数据转换成PageViewBean
        DataStream<PageViewBean> beanStream = midStream.map(new PageViewBeanMapFunction());
        //3.设置水位线
        DataStream<PageViewBean> timeStream = beanStream.assignTimestampsAndWatermarks(WatermarkStrategy
                // 设置延迟时间
                .<PageViewBean>forBoundedOutOfOrderness(Duration.ofSeconds(0))
                .withTimestampAssigner(
                        new SerializableTimestampAssigner<PageViewBean>() {
                            @Override
                            public long extractTimestamp(PageViewBean element, long recordTimestamp) {
                                if (element == null) {
                                    // 可以选择忽略该事件，或者使用当前系统时间作为替代
                                    return System.currentTimeMillis(); // 或者 -1 表示无效
                                }
                                return element.getTs();
                            }
                })
        );
//        4.按照维度进行分组
        KeyedStream<PageViewBean, String> keyedStream = timeStream.keyBy(bean -> bean.getBrand() + "," + bean.getChannel() + "," +
                bean.getProvince() + "," + bean.getIsNew());
        //5.开窗
        WindowedStream<PageViewBean, String, TimeWindow> windowStream = keyedStream.window(
                TumblingEventTimeWindows.of(Time.minutes(1))
        );
        SingleOutputStreamOperator<String> reportStream = windowStream.apply(new PageViewWindowFunction());
        return reportStream;
    }
}
