package com.atguigu.gmall.realtime.app.dws;

import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import com.atguigu.gmall.realtime.app.func.BeanToJsonStrFunction;
import com.atguigu.gmall.realtime.beans.TrafficPageViewBean;
import com.atguigu.gmall.realtime.utils.DateFormatUtil;
import com.atguigu.gmall.realtime.utils.DorisUtil;
import com.atguigu.gmall.realtime.utils.MyKafkaUtil;
import org.apache.commons.lang3.StringUtils;
import org.apache.flink.api.common.eventtime.SerializableTimestampAssigner;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.api.common.functions.ReduceFunction;
import org.apache.flink.api.common.state.StateTtlConfig;
import org.apache.flink.api.common.state.ValueState;
import org.apache.flink.api.common.state.ValueStateDescriptor;
import org.apache.flink.api.common.time.Time;
import org.apache.flink.api.java.functions.KeySelector;
import org.apache.flink.api.java.tuple.Tuple4;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.connector.kafka.source.KafkaSource;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.KeyedStream;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.datastream.WindowedStream;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.KeyedProcessFunction;
import org.apache.flink.streaming.api.functions.windowing.WindowFunction;
import org.apache.flink.streaming.api.windowing.assigners.TumblingEventTimeWindows;
import org.apache.flink.streaming.api.windowing.windows.TimeWindow;
import org.apache.flink.util.Collector;

/**
 * @author Felix
 * @date 2023/12/27
 * 版本、地区、渠道、新老访客维度聚合统计
 * 需要启动的进程
 *      zk、kafka、flume、doris、DwdTrafficBaseLogSplit、DwsTrafficVcChArIsNewPageViewWindow
 */
public class DwsTrafficVcChArIsNewPageViewWindow {
    public static void main(String[] args) throws Exception {
        //TODO 1.基本环境准备
        //1.1 指定流处理环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        //1.2 设置并行度
        env.setParallelism(4);
        //TODO 2.检查点相关的设置
        env.enableCheckpointing(5000L);

        //TODO 3.从kafka主题中读取数据
        //3.1 声明消费的主题以及消费者组
        String topic = "dwd_traffic_page_log";
        String groupId = "dws_traffic_vc_ch_ar_isnew_group";
        //3.2 创建消费者对象
        KafkaSource<String> kafkaSource = MyKafkaUtil.getKafkaSource(topic, groupId);
        //3.3 消费数据 封装为流
        DataStreamSource<String> kafkaStrDS
            = env.fromSource(kafkaSource, WatermarkStrategy.noWatermarks(), "kafka_source");
        //TODO 4.对流中数据类型进行转换    jsonStr->jsonObj
        /*SingleOutputStreamOperator<JSONObject> jsonObjDS = kafkaStrDS.map(
            new MapFunction<String, JSONObject>() {
                @Override
                public JSONObject map(String jsonStr) throws Exception {
                    return JSON.parseObject(jsonStr);
                }
            }
        );
        SingleOutputStreamOperator<JSONObject> jsonObjDS = kafkaStrDS.map(
            jsonStr -> JSON.parseObject(jsonStr)
        );
        */
        SingleOutputStreamOperator<JSONObject> jsonObjDS = kafkaStrDS.map(JSON::parseObject);
        // jsonObjDS.print(">>>");

        //TODO 5.按照mid进行分组
        KeyedStream<JSONObject, String> midKeyedDS
            = jsonObjDS.keyBy(jsonObj -> jsonObj.getJSONObject("common").getString("mid"));
        //TODO 6.对流中的数据进行类型转换  jsonObj->统计的实体类对象   相当于wordcount的计数
        SingleOutputStreamOperator<TrafficPageViewBean> beanDS = midKeyedDS.process(
            new KeyedProcessFunction<String, JSONObject, TrafficPageViewBean>() {
                private ValueState<String> lastVisitDateState;

                @Override
                public void open(Configuration parameters) throws Exception {
                    ValueStateDescriptor<String> valueStateDescriptor
                        = new ValueStateDescriptor<String>("lastVisitDateState",String.class);
                    valueStateDescriptor.enableTimeToLive(StateTtlConfig.newBuilder(Time.days(1)).build());
                    lastVisitDateState = getRuntimeContext().getState(valueStateDescriptor);
                }

                @Override
                public void processElement(JSONObject jsonObj, Context ctx, Collector<TrafficPageViewBean> out) throws Exception {
                    JSONObject commonJsonObj = jsonObj.getJSONObject("common");
                    JSONObject pageJsonObj = jsonObj.getJSONObject("page");
                    Long ts = jsonObj.getLong("ts");


                    String vc = commonJsonObj.getString("vc");
                    String ch = commonJsonObj.getString("ch");
                    String ar = commonJsonObj.getString("ar");
                    String isNew = commonJsonObj.getString("is_new");


                    //从状态中获取当前设备的上次访问日期
                    String lastVisitDate = lastVisitDateState.value();
                    String curVisitDate = DateFormatUtil.toDate(ts);
                    Long uvCt = 0L;
                    if(StringUtils.isEmpty(lastVisitDate)||!lastVisitDate.equals(curVisitDate)){
                        uvCt = 1L;
                        lastVisitDateState.update(curVisitDate);
                    }

                    Long svCt = StringUtils.isEmpty(pageJsonObj.getString("last_page_id")) ? 1L : 0L ;

                    TrafficPageViewBean viewBean = new TrafficPageViewBean(
                        "",
                        "",
                        vc,
                        ch,
                        ar,
                        isNew,
                        "",
                        uvCt,
                        svCt,
                        1L,
                        pageJsonObj.getLong("during_time"),
                        ts
                    );
                    out.collect(viewBean);
                }
            }
        );

        //beanDS.print(">>>");
        //TODO 7.指定Watermark的生成策略以及提取时间时间字段
        SingleOutputStreamOperator<TrafficPageViewBean> withWatermarkDS = beanDS.assignTimestampsAndWatermarks(
            WatermarkStrategy
                .<TrafficPageViewBean>forMonotonousTimestamps()
                .withTimestampAssigner(
                    new SerializableTimestampAssigner<TrafficPageViewBean>() {
                        @Override
                        public long extractTimestamp(TrafficPageViewBean bean, long recordTimestamp) {
                            return bean.getTs();
                        }
                    }
                )
        );

        //TODO 8.按照统计的维度进行分组
        KeyedStream<TrafficPageViewBean, Tuple4<String, String, String, String>> dimKeyedDS = withWatermarkDS.keyBy(
            new KeySelector<TrafficPageViewBean, Tuple4<String, String, String, String>>() {
                @Override
                public Tuple4<String, String, String, String> getKey(TrafficPageViewBean viewBean) throws Exception {
                    return Tuple4.of(
                        viewBean.getVc(),
                        viewBean.getCh(),
                        viewBean.getAr(),
                        viewBean.getIsNew()
                    );
                }
            }
        );
        //TODO 9.对分组的数据进行开窗
        WindowedStream<TrafficPageViewBean, Tuple4<String, String, String, String>, TimeWindow> windowDS
            = dimKeyedDS.window(TumblingEventTimeWindows.of(org.apache.flink.streaming.api.windowing.time.Time.seconds(10)));
        //TODO 10.聚合计算
        /*
        窗口的聚合
            增量聚合:
                reduce    :窗口中的数据类型和聚合类型以及向下游传递的类型一致
                aggregate :窗口中的数据类型和聚合类型以及向下游传递的类型不一致
            全量聚合：
                apply     :方法中接收的参数是Window对象
                process   :方法中接收的参数是Context对象(更底层)
            增量 + 全量： 增量只保留中间结果，省空间；全量可以通过上下文获取更详细的信息
        */
        SingleOutputStreamOperator<TrafficPageViewBean> reduceDS = windowDS.reduce(
            new ReduceFunction<TrafficPageViewBean>() {
                @Override
                public TrafficPageViewBean reduce(TrafficPageViewBean value1, TrafficPageViewBean value2) throws Exception {
                    value1.setPvCt(value1.getPvCt() + value2.getPvCt());
                    value1.setUvCt(value1.getUvCt() + value2.getUvCt());
                    value1.setSvCt(value1.getSvCt() + value2.getSvCt());
                    value1.setDurSum(value1.getDurSum() + value2.getDurSum());
                    return value1;
                }
            },
            new WindowFunction<TrafficPageViewBean, TrafficPageViewBean, Tuple4<String, String, String, String>, TimeWindow>() {
                @Override
                public void apply(Tuple4<String, String, String, String> stringStringStringStringTuple4, TimeWindow window, Iterable<TrafficPageViewBean> input, Collector<TrafficPageViewBean> out) throws Exception {
                    String stt = DateFormatUtil.toYmdHms(window.getStart());
                    String edt = DateFormatUtil.toYmdHms(window.getEnd());
                    String curDate = DateFormatUtil.toDate(window.getStart());

                    for (TrafficPageViewBean viewBean : input) {
                        viewBean.setStt(stt);
                        viewBean.setEdt(edt);
                        viewBean.setCurDate(curDate);
                        //千万别忘了
                        out.collect(viewBean);
                    }
                }
            }
        );
        reduceDS.print(">>>>");
        //TODO 11.将聚合的结果写到Doris中
        reduceDS
            .map(new BeanToJsonStrFunction<>())
            .sinkTo(DorisUtil.getDorisSink("dws_traffic_vc_ch_ar_is_new_page_view_window"));

        env.execute();
    }
}
