package com.atguigu.gmall.app.dws;

import com.alibaba.fastjson.JSONObject;
import com.atguigu.gmall.bean.KeywordBean;
import com.atguigu.gmall.util.DateFormatUtils;
import com.atguigu.gmall.util.KeywordUtils;
import com.atguigu.gmall.util.MyClickHouseUtils;
import com.atguigu.gmall.util.MyKafkaUtils;
import jdk.nashorn.api.scripting.JSObject;
import org.apache.flink.api.common.eventtime.SerializableTimestampAssigner;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.api.common.functions.FlatMapFunction;
import org.apache.flink.api.common.functions.ReduceFunction;
import org.apache.flink.api.java.functions.KeySelector;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.KeyedStream;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.datastream.WindowedStream;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.windowing.WindowFunction;
import org.apache.flink.streaming.api.windowing.assigners.TumblingEventTimeWindows;
import org.apache.flink.streaming.api.windowing.time.Time;
import org.apache.flink.streaming.api.windowing.windows.TimeWindow;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;
import org.apache.flink.util.Collector;

import java.time.Duration;
import java.util.List;

/**
 * 流量域来源关键词粒度页面浏览各窗口汇 总表（DataStream）
 * 数据流：web/app -> Nginx -> 日志服务器(.log) -> Flume -> Kafka(ODS) -> FlinkApp -> Kafka(DWD) -> FlinkApp -> ClickHouse(DWS)
 * 程  序：     Mock(lg.sh) -> Flume(f1) -> Kafka(ZK) -> BaseLogApp -> Kafka(ZK) -> DwsTrafficSourceKeywordPageViewWindow > ClickHouse(ZK)
 *
 * @author : ranzlupup
 * @since : 2023/6/5 09:18
 */
public class DwsTrafficSourceKeywordPageViewWindowDS {
    public static void main(String[] args) throws Exception {

        //TODO 1.获取执行环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);

        // 1.1 状态后端设置
//        env.enableCheckpointing(3000L, CheckpointingMode.EXACTLY_ONCE);
//        env.getCheckpointConfig().setCheckpointTimeout(60 * 1000L);
//        env.getCheckpointConfig().setMinPauseBetweenCheckpoints(3000L);
//        env.getCheckpointConfig().enableExternalizedCheckpoints(
//                CheckpointConfig.ExternalizedCheckpointCleanup.RETAIN_ON_CANCELLATION
//        );
//        env.setRestartStrategy(RestartStrategies.failureRateRestart(
//                3, Time.days(1), Time.minutes(1)
//        ));
//        env.setStateBackend(new HashMapStateBackend());
//        env.getCheckpointConfig().setCheckpointStorage(
//                "hdfs://hadoop102:8020/ck"
//        );
//        System.setProperty("HADOOP_USER_NAME", "atguigu");

        //TODO 2.读取Kafka page_log 主题的数据创建表并且提取时间戳生成Watermark
        String pageLogTopicName = "FLINK_DWD_PAGE_LOG";
        String groupId = "dws_traffic_source_keyword_page_view_window_ds";
        DataStreamSource<String> pageLogKafkaDS = env.addSource(MyKafkaUtils.getFlinkKafkaConsumer(pageLogTopicName, groupId));

        //TODO 3.过滤出搜索数据
        SingleOutputStreamOperator<JSONObject> jsonObjectFilterDS = pageLogKafkaDS.flatMap(new FlatMapFunction<String, JSONObject>() {
            @Override
            public void flatMap(String value, Collector<JSONObject> out) throws Exception {
                JSONObject jsonObject = JSONObject.parseObject(value);
                JSONObject page = jsonObject.getJSONObject("page");
                String lastPageId = page.getString("last_page_id");
                String itemType = page.getString("item_type");
                String item = page.getString("item");
                if ("search".equals(lastPageId) && "keyword".equals(itemType) && item != null) {
                    out.collect(jsonObject);
                }
            }
        });
        // jsonObjectDS.print("jsonObjectFilterDS >>>>>>>>>);

        //TODO 4.切词
        SingleOutputStreamOperator<KeywordBean> keywordBeanDS = jsonObjectFilterDS.flatMap(
                new FlatMapFunction<JSONObject, KeywordBean>() {
                    @Override
                    public void flatMap(JSONObject value, Collector<KeywordBean> out) throws Exception {
                        JSONObject page = value.getJSONObject("page");
                        String item = page.getString("item");
                        Long ts = value.getLong("ts");
                        List<String> keywords = KeywordUtils.splitKeyword(item);
                        for (String keyword : keywords) {
                            out.collect(new KeywordBean(
                                    "",
                                    "",
                                    "search",
                                    keyword,
                                    1L,
                                    ts
                            ));
                        }
                    }
                }
        );
        // keywordBeanDS.print("keywordBeanDS>>>>>>>");

        //TODO 5.提取Watermark
        SingleOutputStreamOperator<KeywordBean> keywordBeanWithWmDS = keywordBeanDS.assignTimestampsAndWatermarks(
                WatermarkStrategy.<KeywordBean>forBoundedOutOfOrderness(Duration.ofSeconds(2)).withTimestampAssigner(
                        new SerializableTimestampAssigner<KeywordBean>() {
                            @Override
                            public long extractTimestamp(KeywordBean element, long recordTimestamp) {
                                return element.getTs();
                            }
                        }
                )
        );

        //TODO 6.分组
        KeyedStream<KeywordBean, String> keyedStream = keywordBeanWithWmDS.keyBy(new KeySelector<KeywordBean, String>() {
            @Override
            public String getKey(KeywordBean value) throws Exception {
                return value.getKeyword();
            }
        });

        //TODO 7.开窗
        WindowedStream<KeywordBean, String, TimeWindow> windowedStream = keyedStream.window(TumblingEventTimeWindows.of(Time.seconds(10)));

        //TODO 8.聚合
        SingleOutputStreamOperator<KeywordBean> resultDS = windowedStream.reduce(
                new ReduceFunction<KeywordBean>() {
                    @Override
                    public KeywordBean reduce(KeywordBean value1, KeywordBean value2) throws Exception {
                        value1.setKeyword_count(value1.getKeyword_count() + value2.getKeyword_count());
                        return value1;
                    }
                },
                new WindowFunction<KeywordBean, KeywordBean, String, TimeWindow>() {
                    @Override
                    public void apply(String s, TimeWindow window, Iterable<KeywordBean> input, Collector<KeywordBean> out) throws Exception {
                        KeywordBean next = input.iterator().next();
                        next.setEdt(DateFormatUtils.toYmdHms(window.getEnd()));
                        next.setStt(DateFormatUtils.toYmdHms(window.getStart()));
                        next.setTs(System.currentTimeMillis());
                        out.collect(next);
                    }
                });
        resultDS.print("resultDS>>>>>>>>>>>>");

        //TODO 9.将数据写出到ClickHouse
        resultDS.addSink(MyClickHouseUtils.getSinkFunction("insert into dws_traffic_source_keyword_page_view_window values(?,?,?,?,?,?)"));

        //TODO 10.执行任务
        env.execute("DwsTrafficSourceKeywordPageViewWindowDS");
    }
}
