package com.atguigu.app.dws;

import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import com.atguigu.bean.UserRegisterBean;
import com.atguigu.utils.DateFormatUtil;
import com.atguigu.utils.KafkaUtil;
import com.atguigu.utils.MyClickHouseUtil;
import org.apache.flink.api.common.eventtime.SerializableTimestampAssigner;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.api.common.functions.ReduceFunction;
import org.apache.flink.streaming.api.datastream.AllWindowedStream;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.windowing.AllWindowFunction;
import org.apache.flink.streaming.api.windowing.assigners.TumblingEventTimeWindows;
import org.apache.flink.streaming.api.windowing.time.Time;
import org.apache.flink.streaming.api.windowing.windows.TimeWindow;
import org.apache.flink.util.Collector;

import java.time.Duration;
//数据流：web/app -> mysql(binlog) -> maxwell -> kafka(ods) -> flinkApp(也用到了flinkCDC) -> kafka(dwd)->flinkApp->clickhouse(dws)
//程序 ：   Mock -> mysql         -> maxwell -> kafka(zk)  ->BaseDBApp                   -> kafka(zk)->Dws05UserUserRegisterWindow->clickhouse(zk)
/*
5 用户域用户注册各窗口汇总表（练习）
10.5.1 主要任务
	从 DWD 层用户注册表中读取数据，统计各窗口注册用户数，写入 ClickHouse。

 */
//todo 1.获取执行环境
//todo 2.读取kafka 用户注册主题数据创建流
//todo 3.将数据转化为javabean
//todo 4.提取时间戳生成watermark
//todo 5.开窗聚合
//todo 6.将数据写出到clickhouse
//todo 7.启动任务
public class Dws05UserUserRegisterWindow {
    public static void main(String[] args) throws Exception {
        //todo 1.获取执行环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);

        //todo 生产环境一定要写，测试注释掉，否则每次测试都得开hdfs
//        需要从checkpoint或savepoint启动程序
//        //2.1 开启checkpoint，每隔5s钟做一次ck，并指定ck的一致性语义
//        env.enableCheckpointing(3000L, CheckpointingMode.EXACTLY_ONCE);//exactly once：默认barrier对齐
//        //2.2 设置超时时间为1min
//        env.getCheckpointConfig().setCheckpointTimeout(60*1000L);//设置超时时间设置checkpoint的超时时间为1min，是指做一次checkpoint的时间；如果超时则认为本次checkpoint失败，这个checkpoint就丢了，继续一下一次checkpoint即可
//        //2.3设置两次重启的最小时间间隔为3s
//        env.getCheckpointConfig().setMinPauseBetweenCheckpoints(3000L);
//        //2.4设置任务关闭的时候保留最后一次ck数据
//        env.getCheckpointConfig().enableExternalizedCheckpoints(
//                CheckpointConfig.ExternalizedCheckpointCleanup.RETAIN_ON_CANCELLATION
//        );
//        //2.5 指定从ck自动重启策略
//        env.setRestartStrategy(RestartStrategies.failureRateRestart(
//                3, Time.days(1L),Time.minutes(1L)
//        ));
//        //2.6 设置状态后端
//        env.setStateBackend(new HashMapStateBackend());//本地状态位置
//        env.getCheckpointConfig().setCheckpointStorage(
//                "hdfs://hadoop102:8020/flinkCDC/220828"
//        );//checkpoint状态位置
//        //2.7 设置访问HDFS的用户名
//        System.setProperty("HADOOP_USER_NAME","atguigu");

        //todo 2.读取kafka 用户注册主题数据创建流
        String topic = "dwd_user_register";
        String groupId = "user_register_220828";
        DataStreamSource<String> kafkaDS = env.addSource(KafkaUtil.getFlinkKafkaConsumer(topic, groupId));

        //todo 3.将数据转化为javabean
        SingleOutputStreamOperator<UserRegisterBean> userRegisterDS= kafkaDS.map(line -> {
            JSONObject jsonObject = JSON.parseObject(line);
            return new UserRegisterBean(
                    "",
                    "",
                    1L,
                    DateFormatUtil.toTs(jsonObject.getString("create_time"), true));
        });

        //todo 4.提取时间戳生成watermark
        SingleOutputStreamOperator<UserRegisterBean> userRegisterWithWMDS = userRegisterDS.assignTimestampsAndWatermarks(
                WatermarkStrategy
                        .<UserRegisterBean>forBoundedOutOfOrderness(Duration.ofSeconds(2))
                        .withTimestampAssigner(new SerializableTimestampAssigner<UserRegisterBean>() {
                            @Override
                            public long extractTimestamp(UserRegisterBean element, long recordTimestamp) {
                                return element.getTs();
                            }
                        })
        );

        //todo 5.开窗聚合
        SingleOutputStreamOperator<UserRegisterBean> resultDS = userRegisterWithWMDS.windowAll(TumblingEventTimeWindows.of(Time.seconds(10)))
                .reduce(new ReduceFunction<UserRegisterBean>() {
                    @Override
                    public UserRegisterBean reduce(UserRegisterBean value1, UserRegisterBean value2) throws Exception {
                        value1.setRegisterCt(value1.getRegisterCt() + value2.getRegisterCt());
                        return value1;
                    }
                }, new AllWindowFunction<UserRegisterBean, UserRegisterBean, TimeWindow>() {
                    @Override
                    public void apply(TimeWindow window, Iterable<UserRegisterBean> values, Collector<UserRegisterBean> out) throws Exception {
                        for (UserRegisterBean value : values) {
                            value.setStt(DateFormatUtil.toYmdHms(window.getStart()));
                            value.setEdt(DateFormatUtil.toYmdHms(window.getEnd()));
                            value.setTs(System.currentTimeMillis());
                            out.collect(value);
                        }
                    }
                });

        resultDS.print("即将写到clickhouse的数据");


        //todo 6.将数据写出到clickhouse
        resultDS.addSink(MyClickHouseUtil.getSinkFunction("insert into dws_user_user_register_window values(?,?,?,?)"));

        //todo 7.启动任务
        env.execute("Dws05UserUserRegisterWindow");
    }
}
