package net.lmc.realtime.jtp.dws.log.job;

import com.alibaba.fastjson.JSON;
import net.lmc.bwie.realtime.jtp.common.utils.JdbcUtil;
import net.lmc.bwie.realtime.jtp.common.utils.KafkaUtil;
import net.lmc.realtime.jtp.dws.log.bean.PageViewBean;
import net.lmc.realtime.jtp.dws.log.function.PageViewBeanMapFunction;
import net.lmc.realtime.jtp.dws.log.function.PageViewWindowFunction;
import org.apache.flink.api.common.eventtime.SerializableTimestampAssigner;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.datastream.KeyedStream;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.datastream.WindowedStream;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.windowing.assigners.TumblingEventTimeWindows;
import org.apache.flink.streaming.api.windowing.time.Time;
import org.apache.flink.streaming.api.windowing.windows.TimeWindow;

import java.time.Duration;

public class JtpTrafficPageViewMinuteWindowDwsJob {
    public static void main(String[] args) throws Exception {
        //执行环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);

        //数据源
        DataStream<String> kafkaStream = KafkaUtil.consumerKafka(env, "dwd-traffic-page-log");
        //输出数据源
        //kafkaStream.print();

        //数据转换
        DataStream<String> reportStream = handle(kafkaStream);

        //数据存储到clickhouse
        JdbcUtil.sinkToClickhouseUpsert(
                reportStream,
                "INSERT INTO jtp_log_report.dws_traffic_page_view_window_report(\n" +
                        "        window_start_time, window_end_time,\n" +
                        "        brand, channel,province,is_new,\n" +
                        "        pv_count, pv_during_time, uv_count ,session_count,\n" +
                        "        ts\n" +
                        ")\n" +
                        "VALUES (?,?,?,?,?,?,?,?,?,?,?)"
        );

        //触发执行
        env.execute("JtpTrafficPageViewMinuteWindowDwsJob");
    }

    private static DataStream<String> handle(DataStream<String> kafkaStream) {

        //1 通过mid对数据进行分组 计算出UV
        KeyedStream<String, Object> midStream = kafkaStream.keyBy(
                json -> JSON.parseObject(json).getJSONObject("common").getString("mid")
        );

        //2 对分组流中的数据进行类型转换
        SingleOutputStreamOperator<PageViewBean> beanStream = midStream.map(new PageViewBeanMapFunction());

        //3 设置时间字段和水位线
        SingleOutputStreamOperator<PageViewBean> timeStream = beanStream.assignTimestampsAndWatermarks(
                WatermarkStrategy
                        .<PageViewBean>forBoundedOutOfOrderness(Duration.ofSeconds(0))
                        .withTimestampAssigner(
                                new SerializableTimestampAssigner<PageViewBean>() {
                                    @Override
                                    public long extractTimestamp(PageViewBean element, long recordTimestamp) {
                                        return element.getTs();
                                    }
                                }
                        )
        );

        //4 分组，根据地区，渠道，品牌，新老用户
        KeyedStream<PageViewBean, String> keyedStream = timeStream.keyBy(
                bean -> bean.getBrand() + "," + bean.getChannel() + "," + bean.getProvince() + "," + bean.getIsNew()
        );

        WindowedStream<PageViewBean, String, TimeWindow> windowStream = keyedStream.window(
                TumblingEventTimeWindows.of(Time.minutes(1))
        );

        //5 聚合 对数据进行计算
        SingleOutputStreamOperator<String> reportStream = windowStream.apply(new PageViewWindowFunction());


        //6 返回数据
        return reportStream;



    }
}
