package com.atguigu.edu.app.dws;

import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import com.atguigu.edu.bean.UserRegisterBean;
import com.atguigu.edu.util.ClickHouseUtil;
import com.atguigu.edu.util.DateFormatUtil;
import com.atguigu.edu.util.KafkaUtil;
import org.apache.flink.api.common.ExecutionConfig;
import org.apache.flink.api.common.eventtime.SerializableTimestampAssigner;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.common.functions.ReduceFunction;
import org.apache.flink.api.common.typeutils.TypeSerializer;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.windowing.AllWindowFunction;
import org.apache.flink.streaming.api.windowing.assigners.TumblingEventTimeWindows;
import org.apache.flink.streaming.api.windowing.assigners.WindowAssigner;
import org.apache.flink.streaming.api.windowing.time.Time;
import org.apache.flink.streaming.api.windowing.triggers.Trigger;
import org.apache.flink.streaming.api.windowing.windows.TimeWindow;
import org.apache.flink.streaming.api.windowing.windows.Window;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;
import org.apache.flink.util.Collector;

import java.time.Duration;
import java.util.Collection;

public class DwsUserUserRegisterWindow {
    public static void main(String[] args) throws Exception {
        //TODO 1 获取流的执行环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();

        //设置并行度
        env.setParallelism(1);

        StreamTableEnvironment tableEnv = StreamTableEnvironment.create(env);

        //TODO 2 设置检查点和状态后端
        /*
        env.enableCheckpointing(5 * 60 * 1000L, CheckpointingMode.EXACTLY_ONCE );
        env.getCheckpointConfig().setCheckpointTimeout( 3 * 60 * 1000L );
        env.getCheckpointConfig().setMaxConcurrentCheckpoints(2);
        env.setStateBackend(new HashMapStateBackend());
        env.getCheckpointConfig().setCheckpointStorage("hdfs://hadoop102:8020/gmall/ck");
        System.setProperty("HADOOP_USER_NAME", "atguigu");
         */

        //读取kafka对应数据
        String topicName = "dwd_user_register";
        String groupId = "dws_user_user_register_window";
        DataStreamSource<String> streamSource = env.addSource(KafkaUtil.getKafkaConsumer(topicName, groupId));


        //
        SingleOutputStreamOperator<UserRegisterBean> beanStream = streamSource.map(new MapFunction<String,
                UserRegisterBean>() {
            @Override
            public UserRegisterBean map(String value) throws Exception {
                JSONObject jsonObject = JSON.parseObject(value);
                return new UserRegisterBean("", "", 1L, jsonObject.getLong("ts"));
            }
        });


        //设置水位线
        SingleOutputStreamOperator<UserRegisterBean> beanWithWatermarkStream =
                beanStream.assignTimestampsAndWatermarks(WatermarkStrategy
                        .<UserRegisterBean>forBoundedOutOfOrderness(Duration.ofSeconds(2))
                        .withTimestampAssigner(new SerializableTimestampAssigner<UserRegisterBean>() {

                                                   @Override
                                                   public long extractTimestamp(UserRegisterBean element,
                                                                                long recordTimestamp) {
                                                       return element.getTs();
                                                   }
                                               }
                        ));


        //开窗聚合
        SingleOutputStreamOperator<UserRegisterBean> resultStream =
                beanWithWatermarkStream.windowAll(TumblingEventTimeWindows.of(Time.seconds(10L)))
                        .reduce(new ReduceFunction<UserRegisterBean>() {
                            @Override
                            public UserRegisterBean reduce(UserRegisterBean value1, UserRegisterBean value2) throws Exception {
                                value1.setRegisterCt(value1.getRegisterCt() + value2.getRegisterCt());
                                return value1;
                            }
                        }, new AllWindowFunction<UserRegisterBean, UserRegisterBean, TimeWindow>() {
                            @Override
                            public void apply(TimeWindow window, Iterable<UserRegisterBean> values,
                                              Collector<UserRegisterBean> out) throws Exception {
                                UserRegisterBean userRegisterBean = values.iterator().next();
                                userRegisterBean.setStt(DateFormatUtil.toYmdHms(window.getStart()));
                                userRegisterBean.setEdt(DateFormatUtil.toYmdHms(window.getEnd()));
                                userRegisterBean.setTs(System.currentTimeMillis());
                                out.collect(userRegisterBean);
                            }
                        });

        //写入Phoenix
        resultStream.addSink(ClickHouseUtil.getClickHouseSinkFunc("insert into table  dws_user_user_register_window values(?,?,?,?)"));

        // TODO  执行任务
        env.execute(groupId);
    }
}
