package com.atguigu.app.dws;

import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import com.atguigu.bean.TrafficPageViewBean2;
import com.atguigu.utils.DateFormatUtil;
import com.atguigu.utils.KafkaUtil;
import com.atguigu.utils.MyClickhouseUtil;
import org.apache.flink.api.common.eventtime.SerializableTimestampAssigner;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.api.common.functions.ReduceFunction;
import org.apache.flink.api.common.functions.RichMapFunction;
import org.apache.flink.api.common.state.StateTtlConfig;
import org.apache.flink.api.common.state.ValueState;
import org.apache.flink.api.common.state.ValueStateDescriptor;
import org.apache.flink.api.common.time.Time;
import org.apache.flink.api.common.typeinfo.Types;
import org.apache.flink.api.java.tuple.Tuple4;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.KeyedStream;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.windowing.WindowFunction;
import org.apache.flink.streaming.api.windowing.assigners.TumblingEventTimeWindows;
import org.apache.flink.streaming.api.windowing.windows.TimeWindow;
import org.apache.flink.util.Collector;


import java.time.Duration;
/*
10.2 流量域版本-渠道-地区-访客类别粒度页面浏览各窗口汇总表
10.2.1 主要任务
DWS 层是为 ADS 层服务的，通过对指标体系的分析，本节汇总表中需要有会话数、页面浏览数、浏览总时长和独立访客数四个度量字段。本节的任务是统计这四个指标，并将维度和度量数据写入 ClickHouse 汇总表。
 */
//todo 1.获取执行环境
//todo 2.读取dwd页面日志主题数据创建流
//todo 3.转换数据为json对象
//todo 4.将数据按照mid进行分组(独立访客要求去重，所以要按照mid分组)
//todo 5.计算各个度量值，转换为javabean
//todo 6.提取事件时间生成watermark
//todo 7.分组开窗聚合
//todo 8.将数据写出到clickhouse
//todo 9.启动任务

public class Dws02TrafficVcChArIsNewPageViewWindow {
    public static void main(String[] args) throws Exception {
        //todo 1.创建环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);

        //todo 2.读取dwd页面日志主题数据
        DataStreamSource<String> kafkaDS = env.addSource(KafkaUtil.getFlinkKafkaConsumer("page_topic", "VcChArIsNewPage"));

        //todo 3.将数据转化为json对象，并提取事件时间戳和watermark
        SingleOutputStreamOperator<JSONObject> jsonObjWithWMDS = kafkaDS.map(s -> JSON.parseObject(s))
                .assignTimestampsAndWatermarks(
                        WatermarkStrategy
                                .<JSONObject>forBoundedOutOfOrderness(Duration.ofSeconds(2))
                                .withTimestampAssigner(new SerializableTimestampAssigner<JSONObject>() {
                                    @Override
                                    public long extractTimestamp(JSONObject element, long recordTimestamp) {
                                        return element.getLong("ts");
                                    }
                                }));

        jsonObjWithWMDS.print("jsonObjWithWMDS>>>");
        //todo 4.将数据按照mid分组（后面要标记独立访客）
        KeyedStream<JSONObject, String> keyByMidDS = jsonObjWithWMDS.keyBy(s -> s.getJSONObject("common").getString("mid"), Types.STRING);
        keyByMidDS.print("keyByMidDS>>>");

        //todo 5.利用状态编程标记独立访客，并给各个度量值赋值
        SingleOutputStreamOperator<TrafficPageViewBean2> TrafficPageViewBeanDS = keyByMidDS.map(new RichMapFunction<JSONObject, TrafficPageViewBean2>() {
            //定义状态
            private ValueState<String> uvState;

            @Override
            public void open(Configuration parameters) throws Exception {
                //初始化状态
                ValueStateDescriptor<String> valueStateDescriptor = new ValueStateDescriptor<>("uv-state", String.class);
                StateTtlConfig ttlConfig = new StateTtlConfig.Builder(Time.hours(24))
                        .setUpdateType(StateTtlConfig.UpdateType.OnCreateAndWrite)
                        .build();
                valueStateDescriptor.enableTimeToLive(ttlConfig);

                uvState = getRuntimeContext().getState(valueStateDescriptor);

            }

            @Override
            public TrafficPageViewBean2 map(JSONObject value) throws Exception {
                //获取状态
                String lastVistDt = uvState.value();

                //获取今天日期
                String curDt = DateFormatUtil.toDate(value.getLong("ts"));

                //定义变量
                Long uvCt = 0L;
                //标记独立访客
                if (lastVistDt == null || curDt.compareTo(lastVistDt) > 0) {
                    //说明该条是该mid今天第一条数据
                    uvCt = 1L;
                    uvState.update(curDt);
                }

                //获取浏览时长
                JSONObject page = value.getJSONObject("page");
                Long durSum = page.getLong("during_time");


                //标记是否为一个新的会话
                Long svCt = 0L;
                if (page.getString("last_page_id") == null) {
                    svCt = 1L;
                }


                JSONObject common = value.getJSONObject("common");


                return TrafficPageViewBean2.builder()
                        .ar(common.getString("ar"))
                        .ch(common.getString("ch"))
                        .vc(common.getString("vc"))
                        .isNew(common.getString("is_new"))
                        .uvCt(uvCt)//独立访客
                        .durSum(durSum)//浏览时长
                        .svCt(svCt)//会话数
                        .pvCt(1L)//页面浏览数,按照页面提交的，所以每条数据都是一个页面
                        .build();

            }
        });

        TrafficPageViewBeanDS.print("TrafficPageViewBeanDS>>>");
        //todo 6.分组、开窗、聚合
        KeyedStream<TrafficPageViewBean2, Tuple4<String, String, String, String>> keyedStream = TrafficPageViewBeanDS.keyBy(value ->

                new Tuple4<>(value.getAr(), value.getCh(), value.getVc(), value.getIsNew()),Types.TUPLE(Types.STRING,Types.STRING,Types.STRING,Types.STRING));

        SingleOutputStreamOperator<TrafficPageViewBean2> resultDS = keyedStream.window(TumblingEventTimeWindows.of(org.apache.flink.streaming.api.windowing.time.Time.seconds(10)))
                .reduce(new ReduceFunction<TrafficPageViewBean2>() {
                    /**
                     *
                     * @param value1 上一次聚合后的值
                     * @param value2 当前值
                     * @return
                     * @throws Exception
                     */
                    @Override
                    public TrafficPageViewBean2 reduce(TrafficPageViewBean2 value1, TrafficPageViewBean2 value2) throws Exception {
                        value1.setPvCt(value1.getPvCt() + value2.getPvCt());
                        value1.setDurSum(value1.getDurSum() + value2.getDurSum());
                        value1.setUvCt(value1.getUvCt() + value2.getUvCt());
                        value1.setSvCt(value1.getSvCt() + value2.getSvCt());
                        return value1;


                    }
                }, new WindowFunction<TrafficPageViewBean2, TrafficPageViewBean2, Tuple4<String, String, String, String>, TimeWindow>() {
                    @Override
                    public void apply(Tuple4<String, String, String, String> stringStringStringStringTuple4, TimeWindow window, Iterable<TrafficPageViewBean2> input, Collector<TrafficPageViewBean2> out) throws Exception {
                        TrafficPageViewBean2 next = input.iterator().next();
                        next.setTs(System.currentTimeMillis());
                        next.setStt(DateFormatUtil.toYmdHms(window.getStart()));
                        next.setEdt(DateFormatUtil.toYmdHms(window.getEnd()));

                        out.collect(next);
                    }
                });

        resultDS.print("即将写入clickhouse的数据：");
        //todo 7.将数据写到clickhouse
        resultDS.addSink(MyClickhouseUtil.getSinkFunction("insert into dws_traffic_vc_ch_ar_is_new_page_view_window values(?,?,?,?,?,?,?,?,?,?,?)"));


        //todo 8.启动任务
        env.execute();

    }
}
