package com.atguigu.app.dws;

import com.alibaba.fastjson.JSONObject;
import com.atguigu.Func.MyWindowFunction;
import com.atguigu.Util.ClickHouseUtil;
import com.atguigu.Util.DateFormatUtil;
import com.atguigu.Util.MyKafkaUtil;
import com.atguigu.bean.TrafficPageViewBean;
import org.apache.flink.api.common.eventtime.SerializableTimestampAssigner;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.api.common.functions.ReduceFunction;
import org.apache.flink.api.common.functions.RichMapFunction;
import org.apache.flink.api.common.state.ValueState;
import org.apache.flink.api.common.state.ValueStateDescriptor;
import org.apache.flink.api.java.functions.KeySelector;
import org.apache.flink.api.java.tuple.Tuple4;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.KeyedStream;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.datastream.WindowedStream;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.windowing.WindowFunction;
import org.apache.flink.streaming.api.windowing.assigners.TumblingEventTimeWindows;
import org.apache.flink.streaming.api.windowing.time.Time;
import org.apache.flink.streaming.api.windowing.windows.TimeWindow;
import org.apache.flink.util.Collector;

import java.time.Duration;

/**
 * @author hjy
 * @create 2023/3/15 11:59
 */

/**
 * 流量域搜索关键词力度页面浏览各窗口汇总表
 * 数据流:web/app -> 日志服务器(file) -> flume -> Kafka(ODS) -> FlinkApp -> Kafka(DWD) -> FlinkApp -> ClickHouse(DWS)
 * 程 序:Mock -> file -> f1.sh -> Kafka(ZK) -> BaseLogApp -> Kafka(ZK) -> Dws01TrafficKeywordPageViewWindow -> ClickHouse(ZK)
 */
public class Dws02_TrafficVcChArIsNewPageViewWindow {
    public static void main(String[] args) throws Exception {
        //todo 1 获取执行环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);
//        env.enableCheckpointing(5000L);
//        env.getCheckpointConfig().setCheckpointStorage("hdfs://hadoop102:8020/gmall-flink/check");
//        env.getCheckpointConfig().setCheckpointTimeout(60000L);
//        env.setStateBackend(new HashMapStateBackend());
//        System.setProperty("HADOOP_USER_NAME","atguigu");
        //todo 2 从页面主题消费数据
        String topic ="dwd_traffic_page_log";
        String groupId="vc_ch_ar_isNew";
        DataStreamSource<String> kafkaDS = env.addSource(MyKafkaUtil.getFlinkKafkaConsumer(topic, groupId));
        //todo 3 转为json对象
        SingleOutputStreamOperator<JSONObject> jsonObjDS = kafkaDS.map(JSONObject::parseObject);
        //todo 4 按照mid分组
        KeyedStream<JSONObject, String> keyedByMidDs = jsonObjDS.keyBy(value -> value.getJSONObject("common").getString("mid"));
        //todo 5 转为javaBean对象
        SingleOutputStreamOperator<TrafficPageViewBean> mapDS = keyedByMidDs.map(new RichMapFunction<JSONObject, TrafficPageViewBean>() {
            private ValueState<String> lastvistDt;

            @Override
            public void open(Configuration parameters) throws Exception {
                ValueStateDescriptor<String> valueStateDescriptor = new ValueStateDescriptor<>("last_dt", String.class);
                lastvistDt = getRuntimeContext().getState(valueStateDescriptor);
            }

            @Override
            public TrafficPageViewBean map(JSONObject value) throws Exception {
                //判断是否是独立访客
                String lastDt = lastvistDt.value();
                Long ts = value.getLong("ts");
                String curtDt = DateFormatUtil.toDate(ts);
                Long uv = 0L;
                if (lastDt == null || !lastDt.equals(curtDt)) {
                    uv = 1L;
                    lastvistDt.update(curtDt);
                }
                //判断是否是新会话 last_page_id是否为null
                JSONObject page = value.getJSONObject("page");
                String lastPageId = page.getString("last_page_id");
                Long sv = 0L;
                if (lastPageId == null) {
                    sv = 1L;
                }
                JSONObject common = value.getJSONObject("common");
                String vc = common.getString("vc");


                String ch = common.getString("ch");
                String ar = common.getString("ar");
                String isNew = common.getString("is_new");
                return new TrafficPageViewBean("",
                        "",
                        vc,
                        ch,
                        ar,
                        isNew,
                        uv,
                        sv,
                        1L,
                        page.getLong("during_time"),
                        ts
                );
            }
        });
        //todo 6 提取watermark时间戳
        SingleOutputStreamOperator<TrafficPageViewBean> trafficPageViewBeanWithWM = mapDS.assignTimestampsAndWatermarks(WatermarkStrategy.<TrafficPageViewBean>forBoundedOutOfOrderness(Duration.ofSeconds(2))
                .withTimestampAssigner(new SerializableTimestampAssigner<TrafficPageViewBean>() {
                    @Override
                    public long extractTimestamp(TrafficPageViewBean element, long recordTimestamp) {
                        return element.getTs();
                    }
                }));
        //todo 7 分组开窗聚合
        WindowedStream<TrafficPageViewBean, Tuple4<String, String, String, String>, TimeWindow> windowDS = trafficPageViewBeanWithWM.keyBy(new KeySelector<TrafficPageViewBean, Tuple4<String, String, String, String>>() {
            @Override
            public Tuple4<String, String, String, String> getKey(TrafficPageViewBean value) throws Exception {

                return Tuple4.of(value.getAr(), value.getCh(), value.getIsNew(), value.getVc());
            }
        }).window(TumblingEventTimeWindows.of(Time.seconds(10)));
        //聚合
        SingleOutputStreamOperator<TrafficPageViewBean> resultDS = windowDS.reduce(new ReduceFunction<TrafficPageViewBean>() {
            @Override
            public TrafficPageViewBean reduce(TrafficPageViewBean value1, TrafficPageViewBean value2) throws Exception {
                value1.setPvCt(value1.getPvCt() + value2.getPvCt());
                value1.setSvCt(value1.getSvCt() + value2.getSvCt());
                value1.setUvCt(value1.getUvCt() + value2.getUvCt());
                value1.setDurSum(value1.getDurSum() + value2.getDurSum());
                return value1;
            }
        }, new WindowFunction<TrafficPageViewBean, TrafficPageViewBean, Tuple4<String, String, String, String>, TimeWindow>() {
            @Override
            public void apply(Tuple4<String, String, String, String> stringStringStringStringTuple4, TimeWindow window, Iterable<TrafficPageViewBean> input, Collector<TrafficPageViewBean> out) throws Exception {
                //因为在上面聚合过，所以一个窗口只会来一条数据，所以直接用.next()
                TrafficPageViewBean next = input.iterator().next();
                out.collect(MyWindowFunction.getJavabeanFields(next,window));
            }
        });

        resultDS.print("resultDS>>>>>>>>>>>>>>>>>");

        //todo 8 数据写出clickhouse
        resultDS.addSink(ClickHouseUtil.getSinkFunction("insert into dws_traffic_vc_ch_ar_is_new_page_view_window values(?,?,?,?,?,?,?,?,?,?,?)"));
        //todo 9 启动程序
        env.execute();
    }
}
