package com.atguigu.edu.realtime.app.dws.traffic;

import com.alibaba.fastjson.JSONObject;
import com.atguigu.edu.realtime.bean.KeywordBean;
import com.atguigu.edu.realtime.common.KafkaTopicConfig;
import com.atguigu.edu.realtime.sink.MyDorisSink;
import com.atguigu.edu.realtime.util.DateFormatUtil;
import com.atguigu.edu.realtime.util.KafkaUtil;
import com.atguigu.edu.realtime.util.KeyWordUtil;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.api.common.functions.AggregateFunction;
import org.apache.flink.api.common.functions.FlatMapFunction;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.KeyedStream;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.windowing.ProcessWindowFunction;
import org.apache.flink.streaming.api.windowing.assigners.TumblingProcessingTimeWindows;
import org.apache.flink.streaming.api.windowing.time.Time;
import org.apache.flink.streaming.api.windowing.windows.TimeWindow;
import org.apache.flink.util.Collector;

import java.time.Duration;
import java.util.List;

/**
 * @ClassName: DwsTrafficKeywordPageViewWindows
 * @Description: TODO DWS层流量阈关键词粒度面浏览各窗口汇总表
 * @Author: zhaoxunfeng
 * @Date: 2022-08-31 19:20
 * @Version: 1.0.0
 */
public class DwsTrafficKeywordPageViewWindows {
    public static void main(String[] args) {
        //TODO 1、创建执行环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(3);

        //TODO 2、读取kafka dwd_traffic_page_view的数据,并创建相应的流
        String groupId = "DwsTrafficKeywordPageViewWindows";
        DataStreamSource<String> kafkaDS = env.addSource(KafkaUtil.getKafkaConsumer(KafkaTopicConfig.DWD_TRAFFIC_PAGE_LOG_TOPIC, groupId));

        //TODO 3、过滤数据得到带有关键字的页面信息
        SingleOutputStreamOperator<Tuple2<String, Long>> filterDS = kafkaDS.flatMap(new FlatMapFunction<String, Tuple2<String, Long>>() {
            @Override
            public void flatMap(String value, Collector<Tuple2<String, Long>> out) throws Exception {
                JSONObject json = JSONObject.parseObject(value);
                JSONObject page = json.getJSONObject("page");
                String item = page.getString("item");
                Long ts = json.getLong("ts");
                if (item != null && "keyword".equals(page.getString("item_type"))) {
                    List<String> words = KeyWordUtil.analyze(item);
                    for (String word : words) {
                        out.collect(Tuple2.of(word, ts));
                    }
                }
            }
        });

        //TODO 4、分配时间戳并按照切分的单词进行分区
        KeyedStream<Tuple2<String, Long>, String> keyedStream = filterDS
                .assignTimestampsAndWatermarks(
                        WatermarkStrategy
                                .<Tuple2<String, Long>>forBoundedOutOfOrderness(Duration.ofSeconds(3))
                                .withTimestampAssigner((t, ts) -> t.f1)
                )
                .keyBy(t -> t.f0);

        //TODO 5、开窗、聚合、计算
        SingleOutputStreamOperator<KeywordBean> resultDS = keyedStream.window(TumblingProcessingTimeWindows.of(Time.seconds(10L)))
                .aggregate(new AggregateFunction<Tuple2<String, Long>, Long, Long>() {
                    @Override
                    public Long createAccumulator() {
                        return 0L;
                    }

                    @Override
                    public Long add(Tuple2<String, Long> value, Long accumulator) {
                        return accumulator + 1L;
                    }

                    @Override
                    public Long getResult(Long accumulator) {
                        return accumulator;
                    }

                    @Override
                    public Long merge(Long a, Long b) {
                        return a + b;
                    }
                }, new ProcessWindowFunction<Long, KeywordBean, String, TimeWindow>() {
                    @Override
                    public void process(String word,
                                        Context context,
                                        Iterable<Long> elements,
                                        Collector<KeywordBean> out) throws Exception {
                        Long score = elements.iterator().next();
                        String stt = DateFormatUtil.toDateTimeString(context.window().getStart());
                        String edt = DateFormatUtil.toDateTimeString(context.window().getEnd());
                        String cur_date = DateFormatUtil.toPartitionDate(context.window().getStart());
                        out.collect(new KeywordBean(stt, edt, word, score, cur_date));
                    }
                });

        //TODO 6、将处理完的结果数据写入到数据库中
        resultDS.addSink(new MyDorisSink<KeywordBean>("dws_traffic_keyword_page_view_window","cur_date"));

        //TODO 7、开始执行程序
        try {
            env.execute();
        } catch (Exception e) {
            throw new RuntimeException(e);
        }
    }
}
