package com.atguigu.edu.realtime.app.dws.user;

import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import com.alibaba.fastjson.PropertyNamingStrategy;
import com.alibaba.fastjson.serializer.SerializeConfig;
import com.atguigu.edu.realtime.app.BaseAppV2;
import com.atguigu.edu.realtime.bean.UserNewActiveBean;
import com.atguigu.edu.realtime.common.Constant;
import com.atguigu.edu.realtime.common.KafkaTopicConfig;
import com.atguigu.edu.realtime.util.DateFormatUtil;
import com.atguigu.edu.realtime.util.FlinkSinkUtil;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.api.common.functions.ReduceFunction;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.windowing.ProcessAllWindowFunction;
import org.apache.flink.streaming.api.windowing.assigners.TumblingEventTimeWindows;
import org.apache.flink.streaming.api.windowing.time.Time;
import org.apache.flink.streaming.api.windowing.windows.TimeWindow;
import org.apache.flink.util.Collector;

import java.time.Duration;
import java.util.HashMap;

public class DwdUserNewActiveWindow extends BaseAppV2 {
    public static void main(String[] args) {
        new DwdUserNewActiveWindow().init(3077,
                2,
                "DwdUserNewActiveWindow",
                Constant.TOPIC_DWD_USER_REGISTER, KafkaTopicConfig.DWD_TRAFFIC_UNIQUE_VISITOR_DETAIL_TOPIC);
    }
    @Override
    protected void handle(StreamExecutionEnvironment env, HashMap<String, DataStreamSource<String>> streams) {
        //转成同一种数据类型，然后union成一个流
        DataStream<UserNewActiveBean> beanStream = parseAndUnion(streams);
        //开窗聚合
        SingleOutputStreamOperator<UserNewActiveBean> result = winAgg(beanStream);
        //写出到Doris
        writeToDoris(result);

    }

    private void writeToDoris(SingleOutputStreamOperator<UserNewActiveBean> result) {
        result.map(bean -> {
            SerializeConfig conf = new SerializeConfig();
            conf.propertyNamingStrategy = PropertyNamingStrategy.SnakeCase;
            return JSON.toJSONString(bean, conf);
        }).addSink(FlinkSinkUtil.getDorisSink("edu.dws_user_new_active_window"));
    }

    private SingleOutputStreamOperator<UserNewActiveBean> winAgg(DataStream<UserNewActiveBean> beanStream) {
        return beanStream
                .assignTimestampsAndWatermarks(WatermarkStrategy
                        .<UserNewActiveBean>forBoundedOutOfOrderness(Duration.ofSeconds(10))
                        .withTimestampAssigner((bean,ts)->bean.getTs())
                )
                .windowAll(TumblingEventTimeWindows.of(Time.seconds(5)))
                .reduce(new ReduceFunction<UserNewActiveBean>() {
                    @Override
                    public UserNewActiveBean reduce(UserNewActiveBean b1, UserNewActiveBean b2) throws Exception {
                        b1.setNewUserCt(b1.getNewUserCt() + b2.getNewUserCt());
                        b1.setActiveUserCt(b1.getActiveUserCt() + b2.getActiveUserCt());

                        return b1;
                    }
                }, new ProcessAllWindowFunction<UserNewActiveBean, UserNewActiveBean, TimeWindow>() {
                    @Override
                    public void process(Context ctx, Iterable<UserNewActiveBean> iterable, Collector<UserNewActiveBean> out) throws Exception {
                        UserNewActiveBean bean = iterable.iterator().next();
                        bean.setStt(DateFormatUtil.toDateTimeString(ctx.window().getStart()));
                        bean.setEdt(DateFormatUtil.toDateTimeString(ctx.window().getEnd()));
                        bean.setCurDate(DateFormatUtil.toDateString(ctx.window().getStart()));

                        out.collect(bean);
                    }
                });
    }

    private DataStream<UserNewActiveBean> parseAndUnion(HashMap<String, DataStreamSource<String>> streams) {
        //register   new
        SingleOutputStreamOperator<UserNewActiveBean> registerStream = streams
                .get(Constant.TOPIC_DWD_USER_REGISTER)
                .map(json -> {
                    JSONObject obj = JSON.parseObject(json);
                    Long ts = obj.getLong("ts");
                    return new UserNewActiveBean("", "", "",
                            1L, 0L,
                            ts);

                });

        //uulogin   active
        SingleOutputStreamOperator<UserNewActiveBean> loginStream = streams
                .get(KafkaTopicConfig.DWD_TRAFFIC_UNIQUE_VISITOR_DETAIL_TOPIC)
                .map(json -> {
                    JSONObject obj = JSON.parseObject(json);
                    Long ts = obj.getLong("ts");
                    return new UserNewActiveBean("", "", "",
                            0L, 1L,
                            ts);
                });

        //union
        return registerStream.union(loginStream);
    }
}
