package net.bwie.realtime.jtp.log.job;


import com.alibaba.fastjson.JSON;
import net.bwie.realtime.jtp.common.utils.JdbcUtil;
import net.bwie.realtime.jtp.common.utils.KafkaUtil;
import net.bwie.realtime.jtp.log.bean.PageViewBean;
import net.bwie.realtime.jtp.log.function.PageViewBeanMapFunction;
import net.bwie.realtime.jtp.log.function.PageViewReportReduceFunction;
import net.bwie.realtime.jtp.log.function.PageViewWindowFunction;
import org.apache.flink.api.common.eventtime.SerializableTimestampAssigner;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.datastream.KeyedStream;
import org.apache.flink.streaming.api.datastream.WindowedStream;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.windowing.assigners.TumblingProcessingTimeWindows;
import org.apache.flink.streaming.api.windowing.time.Time;
import org.apache.flink.streaming.api.windowing.windows.TimeWindow;

import java.time.Duration;

/**
 * 简介说明:
 *
 * @author: LiLi
 * @date: 2025/05/19 08:57:03
 * @version: 1.0
 */
public class JtpTrafficPageViewMinuteWindowDwsJob {
    public static void main(String[] args) throws Exception {

        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();

        env.setParallelism(1);

        DataStream<String> pageStream = KafkaUtil.consumerKafka(env, "dwd_log_page_view_log");

//        SingleOutputStreamOperator<JSONObject> JsonObjectStream = kafkaStream.map(JSON::parseObject);

//        JsonObjectStream.print("page_log");

        DataStream<String> resultStream = handle(pageStream);

//        resultStream.print("JtpTrafficPageViewMinuteWindowDwsJob");

        JdbcUtil.sinkToClickhouseUpsert(resultStream,
                "INSERT INTO jtp_log_report.dws_traffic_page_view_window_report(\n" +
                        "  window_start_time, window_end_time,\n" +
                        "  brand, channel, province,is_new,\n" +
                        "  pv_count, pv_during_time, uv_count, session_count,\n" +
                        "  ts\n" +
                        ")\n" +
                        "VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?);"
        );

        env.execute("JtpTrafficPageViewMinuteWindowDwsJob");
    }

    private static DataStream<String> handle(DataStream<String> pageStream) {

        KeyedStream<String, String> midStream = pageStream.keyBy(json -> JSON.parseObject(json).getJSONObject("common").getString("mid"));

//        midStream.print("page_log");

        DataStream<PageViewBean> beanStream = midStream.map(new PageViewBeanMapFunction());

        DataStream<PageViewBean> timeStream = beanStream.
                assignTimestampsAndWatermarks(WatermarkStrategy.
                        <PageViewBean>forBoundedOutOfOrderness(Duration.ofSeconds(0))
                        .withTimestampAssigner(new SerializableTimestampAssigner<PageViewBean>() {
                            @Override
                            public long extractTimestamp(PageViewBean pageViewBean, long l) {
                                return pageViewBean.getTs();
                            }
                        }));

//        timeStream.print("page_log");

      // 4.转换数据结构
        KeyedStream<PageViewBean, String> keyedStream = timeStream.
                keyBy(bean ->  bean.getBrand() + "," + bean.getChannel()+","+ bean.getProvince() + "," + bean.getIsNew());

        WindowedStream<PageViewBean, String, TimeWindow> windowStream = keyedStream.
                window(TumblingProcessingTimeWindows.of(Time.minutes(1)));

//        DataStream<String> reportStream = windowStream.apply(new PageViewWindowFunction());
        DataStream<String> reportStream = windowStream.reduce(new PageViewReportReduceFunction(),new PageViewWindowFunction());


        return reportStream;
    }
}
