package com.atguigu.app.dws;

import com.alibaba.fastjson.JSONObject;
import com.atguigu.bean.PageAndUniqueCountBean;
import com.atguigu.bean.SessionPageCountBean;
import com.atguigu.util.ClickHouseUtil;
import com.atguigu.util.DateFormatUtil;
import com.atguigu.util.KafkaUtil;
import org.apache.flink.api.common.eventtime.SerializableTimestampAssigner;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.api.common.functions.FlatMapFunction;
import org.apache.flink.api.common.functions.ReduceFunction;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.windowing.AllWindowFunction;
import org.apache.flink.streaming.api.windowing.assigners.TumblingEventTimeWindows;
import org.apache.flink.streaming.api.windowing.time.Time;
import org.apache.flink.streaming.api.windowing.windows.TimeWindow;
import org.apache.flink.util.Collector;

import java.time.Duration;

public class DwsPageAndUniqueCount {
    public static void main(String[] args) throws Exception {
          StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
                  env.setParallelism(1);
                  // TODO 2 设置状态后端
                  /*
                  env.enableCheckpointing(5 * 60 * 1000L, CheckpointingMode.EXACTLY_ONCE );
                  env.getCheckpointConfig().setCheckpointTimeout( 3 * 60 * 1000L );
                  env.getCheckpointConfig().setMaxConcurrentCheckpoints(2);
                  env.setStateBackend(new HashMapStateBackend());
                  env.getCheckpointConfig().setCheckpointStorage("hdfs://hadoop102:8020/gmall/ck");
                  System.setProperty("HADOOP_USER_NAME", "atguigu");
                   */
        String topicId = "dwd_page_and_unique_count";
        String groupId = "dws_page_and_unique_count";
        DataStreamSource<String> kafkaSource = env.addSource(KafkaUtil.getFlinkKafkaConsumer(topicId, groupId));
        SingleOutputStreamOperator<PageAndUniqueCountBean> flatMap = kafkaSource.flatMap(new FlatMapFunction<String, PageAndUniqueCountBean>() {
            @Override
            public void flatMap(String value, Collector<PageAndUniqueCountBean> out) throws Exception {
                JSONObject jsonObject = JSONObject.parseObject(value);
                out.collect(PageAndUniqueCountBean.builder()
                        .pagesCount(jsonObject.getLong("page_count"))
                        .isUnique(jsonObject.getLong("isUnique"))
                        .ts(jsonObject.getLong("ts"))
                        .build());
            }
        });
//        flatMap.print("1");
        SingleOutputStreamOperator<PageAndUniqueCountBean> reduce = flatMap.assignTimestampsAndWatermarks(WatermarkStrategy.<PageAndUniqueCountBean>forBoundedOutOfOrderness(Duration.ofSeconds(2l))
                .withTimestampAssigner(new SerializableTimestampAssigner<PageAndUniqueCountBean>() {
                    @Override
                    public long extractTimestamp(PageAndUniqueCountBean element, long recordTimestamp) {
                        return element.getTs();
                    }
                })).windowAll(TumblingEventTimeWindows.of(Time.seconds(10L)))
                .reduce(new ReduceFunction<PageAndUniqueCountBean>() {
                    @Override
                    public PageAndUniqueCountBean reduce(PageAndUniqueCountBean value1, PageAndUniqueCountBean value2) throws Exception {
                        value1.setIsUnique(value1.getIsUnique() + value2.getIsUnique());
                        value1.setPagesCount(value1.getPagesCount() + value2.getPagesCount());
                        return value1;
                    }
                }, new AllWindowFunction<PageAndUniqueCountBean, PageAndUniqueCountBean, TimeWindow>() {
                    @Override
                    public void apply(TimeWindow window, Iterable<PageAndUniqueCountBean> values, Collector<PageAndUniqueCountBean> out) throws Exception {
                        PageAndUniqueCountBean next = values.iterator().next();
                        next.setStt(DateFormatUtil.toYmdHms(window.getStart()));
                        next.setEtt(DateFormatUtil.toYmdHms(window.getEnd()));
                        next.setTs(System.currentTimeMillis());
                        System.out.println(next.getPagesCount());
                        out.collect(next);
                    }
                });
        reduce.print();
        reduce.addSink(ClickHouseUtil
                .getClickHouseSink("insert into dws_page_and_unique_count values(?,?,?,?,?,?,?,?,?,?,?)"));
        env.execute();
    }
}
