package com.mjf.app.dws;

import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import com.mjf.bean.KeywordStats;
import com.mjf.utils.ClickHouseUtil;
import com.mjf.utils.DateTimeUtil;
import com.mjf.utils.KeywordUtil;
import com.mjf.utils.MyKafkaUtil;
import org.apache.flink.api.common.eventtime.SerializableTimestampAssigner;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.api.common.functions.FilterFunction;
import org.apache.flink.api.common.functions.FlatMapFunction;
import org.apache.flink.api.common.functions.ReduceFunction;
import org.apache.flink.api.java.tuple.Tuple3;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.windowing.WindowFunction;
import org.apache.flink.streaming.api.windowing.assigners.TumblingEventTimeWindows;
import org.apache.flink.streaming.api.windowing.time.Time;
import org.apache.flink.streaming.api.windowing.windows.TimeWindow;
import org.apache.flink.util.Collector;

import java.time.Duration;
import java.util.Date;
import java.util.List;

/**
 * Desc: 搜索关键字计算
 * <p>
 * 数据流向：web/app -> nginx -> springboot -> kafka(ods) -> flinkApp -> kafka(dwd) -> flinkApp -> clickhouse
 * 程序：gmall2020-mock-log-2020-12-18.jar -> flink-logger.sh(包含 nginx/springboot/kafka(ods))
 * -> BaseLogApp(包含 flinkApp/kafka(dwd)) -> KeywordStatsApp(包含 flinkApp/clickhouse(dws))
 */
public class KeywordStatsApp {
    public static void main(String[] args) throws Exception {

        // 1.获取执行环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);

/*
        // 测试时关闭
        // 设置状态后端
        env.setStateBackend(new FsStateBackend("hdfs://hadoop102:9000/gmall-flink/checkpoint"));
        env.enableCheckpointing(5000L, CheckpointingMode.EXACTLY_ONCE);
        env.getCheckpointConfig().setCheckpointTimeout(10000L);
        env.getCheckpointConfig().setMaxConcurrentCheckpoints(2);
        env.getCheckpointConfig().setMinPauseBetweenCheckpoints(3000L);

        // 设置重启策略
        env.setRestartStrategy(RestartStrategies.fixedDelayRestart(3, 10));
*/

        // 2.提取时间戳生成WaterMark
        String groupId = "KeywordStatsApp";
        String pageViewSourceTopic = "dwd_page_log";

        SingleOutputStreamOperator<JSONObject> jsonObjDS = env
                .addSource(MyKafkaUtil.getKafkaConsumer(pageViewSourceTopic, groupId))
                .map(JSON::parseObject)
                .assignTimestampsAndWatermarks(
                        WatermarkStrategy.<JSONObject>forBoundedOutOfOrderness(Duration.ofSeconds(1))
                                .withTimestampAssigner(new SerializableTimestampAssigner<JSONObject>() {
                                    @Override
                                    public long extractTimestamp(JSONObject element, long recordTimestamp) {
                                        return element.getLong("ts");
                                    }
                                })
                );

        // 3.过滤数据  上一跳页面为"search" and 搜索词 is not null
        SingleOutputStreamOperator<JSONObject> filterDS = jsonObjDS.filter(new FilterFunction<JSONObject>() {
            @Override
            public boolean filter(JSONObject jsonObj) throws Exception {
                // 获取page信息
                JSONObject page = jsonObj.getJSONObject("page");
                String last_page_id = page.getString("last_page_id");
                String item = page.getString("item");

                return "search".equals(last_page_id) && item != null;
            }
        });


        // 4.进行分词处理
        SingleOutputStreamOperator<Tuple3<String, Long, Long>> keywordDS = filterDS.flatMap(new FlatMapFunction<JSONObject, Tuple3<String, Long, Long>>() {
            @Override
            public void flatMap(JSONObject jsonObj, Collector<Tuple3<String, Long, Long>> out) throws Exception {
                // 获取搜索内容
                String item = jsonObj.getJSONObject("page").getString("item");
                // 获取时间戳
                Long ts = jsonObj.getLong("ts");

                // 对搜索内容进行分词
                List<String> keywords = KeywordUtil.analyze(item);

                // 将分词后的内容写出
                for (String keyword : keywords) {
                    out.collect(new Tuple3<>(keyword, 1L, ts));
                }
            }
        });

        // 5.分组、开窗、聚合
        SingleOutputStreamOperator<KeywordStats> keywordStatsDS = keywordDS.keyBy(line -> line.f0)
                .window(TumblingEventTimeWindows.of(Time.seconds(10)))
                .reduce(
                        new ReduceFunction<Tuple3<String, Long, Long>>() {
                            @Override
                            public Tuple3<String, Long, Long> reduce(Tuple3<String, Long, Long> value1, Tuple3<String, Long, Long> value2) throws Exception {
                                return new Tuple3<>(value1.f0, value1.f1 + value2.f1, value2.f2);
                            }
                        },
                        new WindowFunction<Tuple3<String, Long, Long>, KeywordStats, String, TimeWindow>() {
                            @Override
                            public void apply(String key, TimeWindow window, Iterable<Tuple3<String, Long, Long>> input, Collector<KeywordStats> out) throws Exception {
                                // 获取聚合后的结果
                                Tuple3<String, Long, Long> next = input.iterator().next();

                                out.collect(
                                        new KeywordStats(
                                                next.f0,
                                                next.f1,
                                                "search",
                                                DateTimeUtil.toYMDhms(new Date(window.getStart())),
                                                DateTimeUtil.toYMDhms(new Date(window.getEnd())),
                                                next.f2
                                        )
                                );

                            }
                        }
                );

        // 6.将数据打印并写入ClickHouse
        keywordStatsDS.print();
        keywordStatsDS.addSink(ClickHouseUtil.getSink("insert into keyword_stats(keyword,ct,source,stt,edt,ts) values(?,?,?,?,?,?)"));

        // 7.启动任务
        env.execute(KeywordStatsApp.class.getName());

    }
}
