package com.atguigu.edu.realtime.app.dws;

import com.alibaba.fastjson.JSONObject;
import com.atguigu.edu.realtime.bean.DwsTrafficSessionCountBean;
import com.atguigu.edu.realtime.common.kafkaTopics;
import com.atguigu.edu.realtime.util.DateFormatUtil;
import com.atguigu.edu.realtime.util.KafkaUtil;
import org.apache.flink.api.common.eventtime.SerializableTimestampAssigner;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.common.functions.ReduceFunction;
import org.apache.flink.api.java.functions.KeySelector;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.KeyedStream;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.windowing.WindowFunction;
import org.apache.flink.streaming.api.windowing.assigners.TumblingEventTimeWindows;
import org.apache.flink.streaming.api.windowing.time.Time;
import org.apache.flink.streaming.api.windowing.windows.TimeWindow;
import org.apache.flink.util.Collector;

import java.time.Duration;

public class DwsTrafficSessionCount {
    public static void main(String[] args) throws Exception {
        // TODO 1、获取执行环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);

        // TODO  2、设置检查点和状态后端
        /*env.enableCheckpointing(5 * 60 * 1000L, CheckpointingMode.EXACTLY_ONCE);
        //延迟时间
        env.getCheckpointConfig().setCheckpointTimeout(3 * 60 * 1000L);
        //最多可以出现多少状态后端
        env.getCheckpointConfig().setMaxConcurrentCheckpoints(2);
        //检查点保存位置
        env.getCheckpointConfig().setCheckpointStorage("hdfs://hadoop102:8020/gmall/ck");
        System.setProperty("HADOOP_USER_NAME", "atguigu");

        //开启状态后端
        env.setStateBackend(new HashMapStateBackend());*/

        //TODO 3、读取kafka数据
        String groupId = "dws_traffic_session_count";
        DataStreamSource<String> kafkaStream = env.addSource(KafkaUtil.getKafkaConsumer(kafkaTopics.DWD_TRAFFIC_PAGE_OPT, groupId));

        //TODO 4、转化数据格式

        SingleOutputStreamOperator<DwsTrafficSessionCountBean> mapStream = kafkaStream.map(new MapFunction<String, DwsTrafficSessionCountBean>() {
            @Override
            public DwsTrafficSessionCountBean map(String value) throws Exception {
                JSONObject jsonObject = null;
                try {
                    jsonObject = JSONObject.parseObject(value);
                } catch (Exception e) {
                    e.printStackTrace();
                }

                return DwsTrafficSessionCountBean.builder()
                        .session_id(jsonObject.getString("session_id"))
                        .source(jsonObject.getString("source"))
                        .is_new(jsonObject.getString("is_new"))
                        .page_cnt(1L)
                        .during_time_cnt(jsonObject.getLong("during_time"))
                        .ts(jsonObject.getLong("ts"))
                        .build();
            }

        });

        //TODO 4、添加水位线

        SingleOutputStreamOperator<DwsTrafficSessionCountBean> watermarksStream = mapStream.assignTimestampsAndWatermarks(WatermarkStrategy.<DwsTrafficSessionCountBean>forBoundedOutOfOrderness(Duration.ofSeconds(5L)).withTimestampAssigner(new SerializableTimestampAssigner<DwsTrafficSessionCountBean>() {
            @Override
            public long extractTimestamp(DwsTrafficSessionCountBean element, long recordTimestamp) {
                return element.getTs();
            }
        }));

        KeyedStream<DwsTrafficSessionCountBean, String> keyedStream = watermarksStream.keyBy(new KeySelector<DwsTrafficSessionCountBean, String>() {
            @Override
            public String getKey(DwsTrafficSessionCountBean value) throws Exception {
                return value.getSession_id();
            }
        });

        SingleOutputStreamOperator<DwsTrafficSessionCountBean> reduceStream = keyedStream.window(TumblingEventTimeWindows.of(Time.seconds(10L)))
                .reduce(new ReduceFunction<DwsTrafficSessionCountBean>() {
                    @Override
                    public DwsTrafficSessionCountBean reduce(DwsTrafficSessionCountBean value1, DwsTrafficSessionCountBean value2) throws Exception {
                        value1.setDuring_time_cnt(value1.getDuring_time_cnt() + value2.getDuring_time_cnt());
                        value1.setPage_cnt(value1.getPage_cnt() + value2.getPage_cnt());
                        return value1;
                    }
                }, new WindowFunction<DwsTrafficSessionCountBean, DwsTrafficSessionCountBean, String, TimeWindow>() {
                    @Override
                    public void apply(String s, TimeWindow window, Iterable<DwsTrafficSessionCountBean> input, Collector<DwsTrafficSessionCountBean> out) throws Exception {
                        DwsTrafficSessionCountBean dwsTrafficSessionCountBean = input.iterator().next();
                        dwsTrafficSessionCountBean.setStt(DateFormatUtil.toYmdHms(window.getStart()));
                        dwsTrafficSessionCountBean.setEdt(DateFormatUtil.toYmdHms(window.getEnd()));
                        dwsTrafficSessionCountBean.setSystime(System.currentTimeMillis());
                        out.collect(dwsTrafficSessionCountBean);
                    }
                });
        reduceStream.print();



        //
        env.execute(groupId);


    }

}
