package com.zhu.app.dws;

import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import com.zhu.bean.UserRegisterBean;
import com.zhu.config.KafkaTopicConfig;
import com.zhu.utils.ClickHouseUtil;
import com.zhu.utils.DataFormatUtil;
import com.zhu.utils.ZhuKafkaUtil;
import org.apache.flink.api.common.eventtime.SerializableTimestampAssigner;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.api.common.functions.AggregateFunction;
import org.apache.flink.api.common.functions.ReduceFunction;
import org.apache.flink.streaming.api.datastream.AllWindowedStream;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.windowing.AllWindowFunction;
import org.apache.flink.streaming.api.windowing.assigners.TumblingEventTimeWindows;
import org.apache.flink.streaming.api.windowing.time.Time;
import org.apache.flink.streaming.api.windowing.windows.TimeWindow;
import org.apache.flink.util.Collector;
import org.apache.hadoop.hdfs.protocol.datatransfer.sasl.SaslDataTransferClient;

import java.time.Duration;

/**
 * DWS层用户域 各个窗口注册汇总表
 * 读取DWD层数据 dwd_user_region
 */
public class DWSUserUserRegisterWindowApp {


    public static void main(String[] args) throws Exception {

        //todo env
        StreamExecutionEnvironment streamExecutionEnvironment = StreamExecutionEnvironment.getExecutionEnvironment();
        streamExecutionEnvironment.setParallelism(4);  // kafka topic_partition 4

        //checkpoint
        /*
        streamExecutionEnvironment.setStateBackend(new HashMapStateBackend());
        streamExecutionEnvironment.getCheckpointConfig().setCheckpointStorage(ClusterParametersConfig.HDFS_CHECKPOINT_FILE_DIR);  //检查点保存在hdfs
        System.setProperty("HADOOP_USER_NAME", "zhu");
        streamExecutionEnvironment.getCheckpointConfig().setCheckpointTimeout(10 * 60000L);  //TimeOut
        streamExecutionEnvironment.getCheckpointConfig().setMaxConcurrentCheckpoints(2);  //最大共存检查点
        streamExecutionEnvironment.setRestartStrategy(RestartStrategies.fixedDelayRestart(3, 5 * 1000L));  //重启策略
         */

        //todo 读取kafka dwd_user_register
        String topic = KafkaTopicConfig.KAFKA_DWD_USER_REGISTER_TOPIC;
        String groupId = "dws_user_user_register_window" + KafkaTopicConfig.KAFKA_GROUP_ID_LAST_NAME;
        DataStreamSource<String> kafkaUserRegisterDStream = streamExecutionEnvironment.addSource(ZhuKafkaUtil.getFlinkKafkaConsumer(topic, groupId));

        //todo json 将每行数据转换成javaBean对象
        SingleOutputStreamOperator<UserRegisterBean> userRegisterBeanDStream = kafkaUserRegisterDStream.map(
                line -> {
                    JSONObject jsonObject = JSON.parseObject(line);
                    String createTime = jsonObject.getString("create_time");
                    return new UserRegisterBean("", "", 1L, DataFormatUtil.toTs(createTime, true));
                }
        );
        //jsonDStream.print(">>>>>");


        //todo waterMark
        SingleOutputStreamOperator<UserRegisterBean> userRegisterWithWaterMarkDStream
                = userRegisterBeanDStream.assignTimestampsAndWatermarks(WatermarkStrategy.<UserRegisterBean>forBoundedOutOfOrderness(Duration.ofSeconds(2))
                .withTimestampAssigner(new SerializableTimestampAssigner<UserRegisterBean>() {
                    @Override
                    public long extractTimestamp(UserRegisterBean userRegisterBean, long l) {
                        return userRegisterBean.getTs();//yyyy-MM-dd HH:mm:ss

                    }
                }));

        //todo window reduce
        AllWindowedStream<UserRegisterBean, TimeWindow> allWindowUserRegisterDStream
                = userRegisterWithWaterMarkDStream.windowAll(TumblingEventTimeWindows.of(Time.seconds(10)));

        SingleOutputStreamOperator<UserRegisterBean> resultDStream = allWindowUserRegisterDStream.reduce(
                new ReduceFunction<UserRegisterBean>() {
                    @Override
                    public UserRegisterBean reduce(UserRegisterBean value1, UserRegisterBean value2) throws Exception {
                        value1.setRegisterCt(value1.getRegisterCt() + value2.getRegisterCt());
                        return value1;
                    }
                },
                new AllWindowFunction<UserRegisterBean, UserRegisterBean, TimeWindow>() {
                    @Override
                    public void apply(TimeWindow timeWindow, Iterable<UserRegisterBean> iterable, Collector<UserRegisterBean> collector) throws Exception {
                        String start = DataFormatUtil.toYmdHms(timeWindow.getStart());
                        String end = DataFormatUtil.toYmdHms(timeWindow.getEnd());
                        long ts = System.currentTimeMillis();
                        UserRegisterBean resultBean = iterable.iterator().next();
                        resultBean.setStt(start);
                        resultBean.setEdt(end);
                        resultBean.setTs(ts);
                        collector.collect(resultBean);
                    }
                }
        );

        //todo write clickhouse
        resultDStream.print(">>>>>");
        resultDStream.addSink(ClickHouseUtil.getClickHouseSinkFunction("" +
                      "insert into user_user_register_window values(?,?,?)"));


        //todo execute job
        streamExecutionEnvironment.execute();
    }
}
