package com.atliuzu.app.dws;

import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import com.atliuzu.bean.UserLoginBean;
import com.atliuzu.bean.UserNewAndVistorBean;
import com.atliuzu.utils.ClickHouseUtil;
import com.atliuzu.utils.DateFormatUtil;
import com.atliuzu.utils.MyKafkaUtil;
import org.apache.flink.api.common.eventtime.SerializableTimestampAssigner;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.api.common.functions.FlatMapFunction;
import org.apache.flink.api.common.functions.ReduceFunction;
import org.apache.flink.api.common.functions.RichFlatMapFunction;
import org.apache.flink.api.common.state.ValueState;
import org.apache.flink.api.common.state.ValueStateDescriptor;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.streaming.api.datastream.*;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.windowing.AllWindowFunction;
import org.apache.flink.streaming.api.windowing.assigners.TumblingEventTimeWindows;
import org.apache.flink.streaming.api.windowing.time.Time;
import org.apache.flink.streaming.api.windowing.windows.TimeWindow;
import org.apache.flink.util.Collector;

import java.time.Duration;

/**
 * @author w
 * @create 2022-08-20-10:22
 */
public class DwsUserUserNewAndLoginWindow {
    public static void main(String[] args) throws Exception {
        //TODO 1.获取执行环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment ( );
        env.setParallelism ( 1 );

        //TODO 2.状态后端设置
//        env.enableCheckpointing(3000L, CheckpointingMode.EXACTLY_ONCE);
//        env.getCheckpointConfig().setCheckpointTimeout(60 * 1000L);
//        env.getCheckpointConfig().setMinPauseBetweenCheckpoints(3000L);
//        env.getCheckpointConfig().enableExternalizedCheckpoints(
//                CheckpointConfig.ExternalizedCheckpointCleanup.RETAIN_ON_CANCELLATION
//        );
//        env.setRestartStrategy(RestartStrategies.failureRateRestart(
//                3, Time.days(1), Time.minutes(1)
//        ));
//        env.setStateBackend(new HashMapStateBackend());
//        env.getCheckpointConfig().setCheckpointStorage(
//                "hdfs://hadoop102:8020/ck"
//        );
//        System.setProperty("HADOOP_USER_NAME", "atguigu");

        //TODO 3.读取DWD层页面日志数据
        String topicLogin = "dwd_traffic_unique_visitor_detail";

        String topicNewUser = "dwd_user_register";
        String groupId = "dwd_user_register_06";
        DataStreamSource<String> newUserSourceDS = env.addSource ( MyKafkaUtil.getFlinkKafkaConsumer ( topicNewUser, groupId ) );

        DataStreamSource<String> userLoginSourceDS = env.addSource ( MyKafkaUtil.getFlinkKafkaConsumer ( topicLogin, groupId ) );

        //TODO 4.转换为javabean对象
        //处理新增用户数据
        SingleOutputStreamOperator<UserNewAndVistorBean> newUserDS = newUserSourceDS.map ( line -> {
            JSONObject jsonObject = JSON.parseObject ( line );

            long ts = jsonObject.getLong ( "ts" ) * 1000;
            return new UserNewAndVistorBean (
                    "",
                    "",
                    1L,
                    0L,
                    ts
            );
        } );



        SingleOutputStreamOperator<UserNewAndVistorBean> visitorDS = userLoginSourceDS.map ( line -> {
            JSONObject jsonObject = JSON.parseObject ( line );
            return new UserNewAndVistorBean (
                    "",
                    "",
                    0L,
                    1L,
                    jsonObject.getLong ( "ts" )
            );
        } );

        //TODO 5.提取事件时间生成watermark
        DataStream<UserNewAndVistorBean> unionDS = newUserDS.union ( visitorDS );

        SingleOutputStreamOperator<UserNewAndVistorBean> watermarkDS = unionDS.assignTimestampsAndWatermarks ( WatermarkStrategy.<UserNewAndVistorBean>forBoundedOutOfOrderness ( Duration.ofSeconds ( 2 ) ).withTimestampAssigner ( new SerializableTimestampAssigner<UserNewAndVistorBean> ( ) {
            @Override
            public long extractTimestamp(UserNewAndVistorBean element, long recordTimestamp) {
                return element.getTs ( );
            }
        } ) );

        //TODO 6.开窗,聚合
        AllWindowedStream<UserNewAndVistorBean, TimeWindow> windowedStream = watermarkDS.windowAll ( TumblingEventTimeWindows.of ( Time.seconds ( 10L ) ) );

        SingleOutputStreamOperator<UserNewAndVistorBean> reduceDS = windowedStream.reduce ( new ReduceFunction<UserNewAndVistorBean> ( ) {
            @Override
            public UserNewAndVistorBean reduce(UserNewAndVistorBean value1, UserNewAndVistorBean value2) throws Exception {
                value1.setNewusCt ( value1.getNewusCt ( ) + value2.getNewusCt ( ) );
                value1.setUvCt ( value1.getUvCt ( ) + value2.getUvCt ( ) );
                return value1;
            }
        }, new AllWindowFunction<UserNewAndVistorBean, UserNewAndVistorBean, TimeWindow> ( ) {
            @Override
            public void apply(TimeWindow window, Iterable<UserNewAndVistorBean> values, Collector<UserNewAndVistorBean> out) throws Exception {
                UserNewAndVistorBean next = values.iterator ( ).next ( );
                next.setTs ( System.currentTimeMillis ( ) );
                next.setStt ( DateFormatUtil.toYmdHms ( window.getStart ( ) ) );
                next.setEdt ( DateFormatUtil.toYmdHms ( window.getEnd ( ) ) );

                out.collect ( next );
            }
        } );

        reduceDS.print ( "reduceDS>>>>>" );
        //TODO 7.将数据写出到clickhouse
     /*   reduceDS.addSink ( ClickHouseUtil.getJdbcSink ( "insert into dws_user_new_and_login_window values(?,?,?,?,?)" ) );
        */
        //TODO 8.启动任务
        env.execute ( "DwsUserUserNewAndLoginWindow" );
    }
}
