package com.raylu.realtime.app.dws;

import com.alibaba.fastjson.JSON;
import com.raylu.realtime.app.func.ConvertTimeStampFunction;
import com.raylu.realtime.app.func.KeywordTableFunctionByIk;
import com.raylu.realtime.bean.KeywordStats;
import com.raylu.realtime.bean.PageBean;
import com.raylu.realtime.utils.KafkaSourceUtil;
import com.raylu.realtime.utils.KeywordUtil;
import com.raylu.realtime.utils.PropertiesUtil;
import org.apache.flink.api.common.eventtime.SerializableTimestampAssigner;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.api.common.functions.FlatMapFunction;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.windowing.ProcessWindowFunction;
import org.apache.flink.streaming.api.windowing.assigners.TumblingEventTimeWindows;
import org.apache.flink.streaming.api.windowing.time.Time;
import org.apache.flink.streaming.api.windowing.windows.TimeWindow;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;
import org.apache.flink.util.Collector;

import java.sql.Timestamp;
import java.time.Duration;
import java.util.List;
import java.util.Properties;

/**
 * Description:
 * <p>
 * Create by lucienoz on 2022/1/13.
 * Copyright © 2022 lucienoz. All rights reserved.
 */
public class KeywordStatsApp2API {

    public static void main(String[] args) throws Exception {
        Properties load = PropertiesUtil.load("config.properties");
        //TODO 1. 准备flink运行环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(4);

        //TODO 2. 设置检查点
//        env.enableCheckpointing(5000L);
//        env.getCheckpointConfig().setCheckpointTimeout(60*1000L);
//        env.getCheckpointConfig().setCheckpointingMode(CheckpointingMode.EXACTLY_ONCE);
//        env.getCheckpointConfig().enableExternalizedCheckpoints(CheckpointConfig.ExternalizedCheckpointCleanup.RETAIN_ON_CANCELLATION);
//        env.setRestartStrategy(RestartStrategies.fixedDelayRestart(3, Time.seconds(3)));
//        env.setStateBackend(new FsStateBackend(load.getProperty("keyword.stats.app.fsstatebackend.url")));
//        System.setProperty("HADOOP_USER_NAME", "raylu");
        SingleOutputStreamOperator<String> stringSingleOutputStreamOperator = env
                .addSource(KafkaSourceUtil.getKafkaSource(load.getProperty("keyword.stats.app.kafka.source-topic1"), load.getProperty("keyword.stats.app.kafka.group-id")))
                .map(r -> JSON.parseObject(r, PageBean.class))
                .assignTimestampsAndWatermarks(WatermarkStrategy.<PageBean>forBoundedOutOfOrderness(Duration.ofSeconds(3L))
                        .withTimestampAssigner(new SerializableTimestampAssigner<PageBean>() {
                            @Override
                            public long extractTimestamp(PageBean element, long recordTimestamp) {
                                return element.getTs();
                            }
                        }))
                .filter(r -> "good_list".equals(r.getPage_id()) && r.getItem() != null)
                .flatMap(new FlatMapFunction<PageBean, String>() {
                    @Override
                    public void flatMap(PageBean value, Collector<String> out) throws Exception {
                        List<String> keyword = KeywordUtil.getKeyword(value.getItem());
                        for (String s : keyword) {
                            out.collect(s);
                        }
                    }
                });
        stringSingleOutputStreamOperator.print("----");
        stringSingleOutputStreamOperator
                .keyBy(r -> r)
                .window(TumblingEventTimeWindows.of(Time.seconds(10L)))
                        .process(new ProcessWindowFunction<String, KeywordStats, String, TimeWindow>() {
                            @Override
                            public void process(String s, ProcessWindowFunction<String, KeywordStats, String, TimeWindow>.Context context, Iterable<String> elements, Collector<KeywordStats> out) throws Exception {
                                long start = context.window().getStart();
                                long end = context.window().getEnd();
                                KeywordStats keywordStats = new KeywordStats(new Timestamp(start), new Timestamp(end), s, "", elements.spliterator().getExactSizeIfKnown());
                                out.collect(keywordStats);

                            }
                        }).print();

        //TODO 7. 将聚合好的数据写入clickhouse


        env.execute();


    }


}
