package com.atguigu.app.dws;

import com.alibaba.fastjson.JSONObject;
import com.atguigu.bean.SessionPageCountBean;
import com.atguigu.util.ClickHouseUtil;
import com.atguigu.util.DateFormatUtil;
import com.atguigu.util.KafkaUtil;
import org.apache.flink.api.common.eventtime.SerializableTimestampAssigner;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.api.common.functions.FlatMapFunction;
import org.apache.flink.api.common.functions.ReduceFunction;
import org.apache.flink.api.java.functions.KeySelector;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.windowing.WindowFunction;
import org.apache.flink.streaming.api.windowing.assigners.TumblingEventTimeWindows;
import org.apache.flink.streaming.api.windowing.time.Time;
import org.apache.flink.streaming.api.windowing.windows.TimeWindow;
import org.apache.flink.util.Collector;

import java.time.Duration;

public class DwsSessionPageCount {
    public static void main(String[] args) throws Exception {
          StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
                  env.setParallelism(1);
                  // TODO 2 设置状态后端
                  /*
                  env.enableCheckpointing(5 * 60 * 1000L, CheckpointingMode.EXACTLY_ONCE );
                  env.getCheckpointConfig().setCheckpointTimeout( 3 * 60 * 1000L );
                  env.getCheckpointConfig().setMaxConcurrentCheckpoints(2);
                  env.setStateBackend(new HashMapStateBackend());
                  env.getCheckpointConfig().setCheckpointStorage("hdfs://hadoop102:8020/gmall/ck");
                  System.setProperty("HADOOP_USER_NAME", "atguigu");
                   */
        String topicId = "dwd_traffic_page_log";
        String groupId = "dws_session_page_count";
        DataStreamSource<String> kafkaSource = env.addSource(KafkaUtil.getFlinkKafkaConsumer(topicId, groupId));
        SingleOutputStreamOperator<SessionPageCountBean> jsonObjectStream = kafkaSource.flatMap(new FlatMapFunction<String, SessionPageCountBean>() {
            @Override
            public void flatMap(String value, Collector<SessionPageCountBean> out) throws Exception {
                JSONObject jsonObject = JSONObject.parseObject(value);
//                System.out.println("2" + jsonObject.toJSONString());
                out.collect(SessionPageCountBean.builder()
                        .pagesCount(1L)
                        .ts(jsonObject.getLong("ts"))
                        .isNew(jsonObject.getJSONObject("common").getString("is_new"))
                        .sid(jsonObject.getJSONObject("common").getString("sid"))
                        .build());
            }
        });
//        jsonObjectStream.print("1");
        SingleOutputStreamOperator<SessionPageCountBean> reduceStream = jsonObjectStream
                .assignTimestampsAndWatermarks(WatermarkStrategy.<SessionPageCountBean>forBoundedOutOfOrderness(Duration.ofSeconds(2L))
                .withTimestampAssigner(new SerializableTimestampAssigner<SessionPageCountBean>() {
                    @Override
                    public long extractTimestamp(SessionPageCountBean element, long recordTimestamp) {
//                        System.out.println("1");
                        return element.getTs() * 1000L;
                    }
                })).keyBy(new KeySelector<SessionPageCountBean, String>() {
            @Override
            public String getKey(SessionPageCountBean value) throws Exception {
                return value.getSid();
            }
        }).window(TumblingEventTimeWindows.of(Time.seconds(10L)))
                .reduce(new ReduceFunction<SessionPageCountBean>() {
                    @Override
                    public SessionPageCountBean reduce(SessionPageCountBean value1, SessionPageCountBean value2) throws Exception {
//                        System.out.println("2");
                        value1.setPagesCount(value1.getPagesCount() + value2.getPagesCount());
                        return value1;
                    }
                }, new WindowFunction<SessionPageCountBean, SessionPageCountBean, String, TimeWindow>() {
                    @Override
                    public void apply(String s, TimeWindow window, Iterable<SessionPageCountBean> input, Collector<SessionPageCountBean> out) throws Exception {
//                        System.out.println("3:");
                        SessionPageCountBean next = input.iterator().next();
                        next.setStt(DateFormatUtil.toYmdHms(window.getStart()));
                        next.setEtt(DateFormatUtil.toYmdHms(window.getEnd()));
                        next.setSidCount(1L);
                        next.setTs(System.currentTimeMillis());
                        out.collect(next);
                    }
                });
        reduceStream.print();
        reduceStream.addSink(ClickHouseUtil.getClickHouseSink("insert into dws_session_pc values (?,?,?,?,?)"));
       SingleOutputStreamOperator<SessionPageCountBean> resultStream = reduceStream
                .flatMap(new FlatMapFunction<SessionPageCountBean, SessionPageCountBean>() {
                    @Override
                    public void flatMap(SessionPageCountBean value, Collector<SessionPageCountBean> out) throws Exception {
                        if (value.getPagesCount() == 1) {
                            value.setJumpCount(1L);
                        }else {
                            value.setJumpCount(0L);
                        }
                        out.collect(value);
                    }
                })
                .keyBy(new KeySelector<SessionPageCountBean, String>() {
                    @Override
                    public String getKey(SessionPageCountBean value) throws Exception {
                        return value.getIsNew();
                    }
                }).window(TumblingEventTimeWindows.of(Time.seconds(10L)))
                .reduce(new ReduceFunction<SessionPageCountBean>() {
                    @Override
                    public SessionPageCountBean reduce(SessionPageCountBean value1, SessionPageCountBean value2) throws Exception {
                        value1.setJumpCount(value1.getJumpCount() + value2.getJumpCount());
                        value1.setSidCount(value1.getSidCount() + value2.getSidCount());
                        return value1;
                    }
                }, new WindowFunction<SessionPageCountBean, SessionPageCountBean, String, TimeWindow>() {
                    @Override
                    public void apply(String s, TimeWindow window, Iterable<SessionPageCountBean> input, Collector<SessionPageCountBean> out) throws Exception {
//                        System.out.println("3:");
                        SessionPageCountBean next = input.iterator().next();
                        next.setStt(DateFormatUtil.toYmdHms(window.getStart()));
                        next.setEtt(DateFormatUtil.toYmdHms(window.getEnd()));
                        next.setTs(System.currentTimeMillis());
                        out.collect(next);
                    }
                });
        resultStream.print("1234567");
        reduceStream.addSink(ClickHouseUtil.getClickHouseSink("insert into dws_session_page_count values (?,?,?,?,?)"));
        env.execute(groupId);
    }
}
