package com.atguigu.gmall.realtime.app.dws;

import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import com.atguigu.gmall.realtime.bean.VisitorStats;
import com.atguigu.gmall.realtime.utils.ClickHouseUtil;
import com.atguigu.gmall.realtime.utils.MyKafkaUtil;
import org.apache.flink.api.common.eventtime.SerializableTimestampAssigner;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.api.common.functions.ReduceFunction;
import org.apache.flink.api.java.functions.KeySelector;
import org.apache.flink.streaming.api.CheckpointingMode;
import org.apache.flink.streaming.api.datastream.*;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.ProcessFunction;
import org.apache.flink.streaming.api.functions.windowing.ProcessWindowFunction;
import org.apache.flink.streaming.api.windowing.assigners.TumblingEventTimeWindows;
import org.apache.flink.streaming.api.windowing.assigners.WindowAssigner;
import org.apache.flink.streaming.api.windowing.time.Time;
import org.apache.flink.streaming.api.windowing.windows.TimeWindow;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer;
import org.apache.flink.util.Collector;
import scala.Tuple4;

import java.text.SimpleDateFormat;
import java.time.Duration;
import java.util.Date;

/*
目标：访客主题宽表计算

 */
public class VisitorStatsApp {
    public static void main(String[] args)  throws Exception{
        //TODO 0.基本环境准备
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(3);

        env.enableCheckpointing(5000, CheckpointingMode.EXACTLY_ONCE);
        env.getCheckpointConfig().setCheckpointTimeout(60000);

        // TODO 1. 从kafka的pv，uv，跳转明细主题中获取数据
        String groupId = "visitor_stats_app";
        String pageViewSourceTopic="dwd_page_log";
        String uniqueVisistSourceTopic = "dwm_unique_visit";
        String userJumpDetailSourceTopic = "dwm_user_jump_detail";

        FlinkKafkaConsumer<String> pageViewSource = MyKafkaUtil.getKafkaSource(pageViewSourceTopic,groupId);
        FlinkKafkaConsumer<String> uniqueVisitSource = MyKafkaUtil.getKafkaSource(uniqueVisistSourceTopic,groupId);
        FlinkKafkaConsumer<String> userJumpSource = MyKafkaUtil.getKafkaSource(userJumpDetailSourceTopic,groupId);

        /*
        上面三个源分别代表：pv流，uv流，以及跳出用户流
         */
        DataStreamSource<String> pageViewDStream = env.addSource(pageViewSource);
        DataStreamSource<String> uniqueVisitDStream = env.addSource(uniqueVisitSource);
        DataStreamSource<String> userJumpDStream = env.addSource(userJumpSource);

        //TODO 2. 对读取的流进行结构转换
        // 2.1 转换pv流
        SingleOutputStreamOperator<VisitorStats> pageViewStatsDStream = pageViewDStream.map(json -> {
            JSONObject jsonObject = JSON.parseObject(json);
            return new VisitorStats("", "",
                    jsonObject.getJSONObject("common").getString("vc"),
                    jsonObject.getJSONObject("common").getString("ch"),
                    jsonObject.getJSONObject("common").getString("ar"),
                    jsonObject.getJSONObject("common").getString("is_new"),
                    0L, 1L, 0L, 0L,
                    jsonObject.getJSONObject("page").getLong("during_time"),
                    jsonObject.getLong("ts")
            );
        });

        pageViewDStream.print(">>>>> pageView: ");

        //2.2 转换uv流
        SingleOutputStreamOperator<VisitorStats> uniqueVisitStatsDStream = uniqueVisitDStream.map(json -> {
            JSONObject jsonObject = JSON.parseObject(json);
            return new VisitorStats("", "",
                    jsonObject.getJSONObject("common").getString("vc"),
                    jsonObject.getJSONObject("common").getString("ch"),
                    jsonObject.getJSONObject("common").getString("ar"),
                    jsonObject.getJSONObject("common").getString("is_new"),
                    1L, 0L, 0L, 0L, 0L,
                    jsonObject.getLong("ts")
            );
        });

        uniqueVisitStatsDStream.print(">>>>> uniqueView : ");

        //2.3 转换sv 流（进入）
        /*
        如何判断进入： dwd_page_log；即首次进入该页面
         */
        SingleOutputStreamOperator<VisitorStats> sessionVisitDStream = pageViewDStream.process(new ProcessFunction<String, VisitorStats>() {

            @Override
            public void processElement(String json, Context context, Collector<VisitorStats> collector) throws Exception {
                JSONObject jsonObj = JSON.parseObject(json);
                String lastPageId = jsonObj.getJSONObject("page").getString("last_page_id");
                if (lastPageId == null || lastPageId.length() == 0) {
                    // System.out.println("sc:"+json);
                    VisitorStats visitorStats = new VisitorStats("", "",
                            jsonObj.getJSONObject("common").getString("vc"),
                            jsonObj.getJSONObject("common").getString("ch"),
                            jsonObj.getJSONObject("common").getString("ar"),
                            jsonObj.getJSONObject("common").getString("is_new"),
                            0L, 0L, 1L, 0L, 0L, jsonObj.getLong("ts"));
                    collector.collect(visitorStats);
                }
            }
        });

        //2.4 跳出流
        SingleOutputStreamOperator<VisitorStats> userJumpStatsDStream = userJumpDStream.map(json -> {
            JSONObject jsonObject = JSON.parseObject(json);
            return new VisitorStats("", "",
                    jsonObject.getJSONObject("common").getString("vc"),
                    jsonObject.getJSONObject("common").getString("ch"),
                    jsonObject.getJSONObject("common").getString("ar"),
                    jsonObject.getJSONObject("common").getString("is_new"),
                    0L, 0L, 0L, 1L, 0L,
                    jsonObject.getLong("ts")
            );
        });


        //TODO 3.将4条流合并起来
        DataStream<VisitorStats> unionDstream = pageViewStatsDStream.union(uniqueVisitStatsDStream,
                sessionVisitDStream,
                userJumpStatsDStream);


        unionDstream.print("unionDS>>>> ");
        //TODO 4 根据维度进行聚合
        // 4.1 设置水印
        SingleOutputStreamOperator<VisitorStats> visitorStatesDS = unionDstream.assignTimestampsAndWatermarks(WatermarkStrategy.<VisitorStats>forBoundedOutOfOrderness(Duration.ofSeconds(1)).withTimestampAssigner(
                new SerializableTimestampAssigner<VisitorStats>() {
                    @Override
                    public long extractTimestamp(VisitorStats visitorStats, long l) {
                        return visitorStats.getTs();
                    }
                }
        ));

        //TODO  5 选取四个维度，
        KeyedStream<VisitorStats, Tuple4<String, String, String, String>> visitorStatsTuple4KeyedStream = visitorStatesDS.keyBy(new KeySelector<VisitorStats, Tuple4<String, String, String, String>>() {
            //版本，渠道，地区，新老
            @Override
            public Tuple4<String, String, String, String> getKey(VisitorStats visitorStats) throws Exception {
                return new Tuple4<>(visitorStats.getVc(),
                        visitorStats.getCh(),
                        visitorStats.getAr(),
                        visitorStats.getIs_new()
                );
            }
        });
        visitorStatsTuple4KeyedStream.print("tuple4 >>>>> ");

        //TODO 6 开窗  时间窗口为10秒
        WindowedStream<VisitorStats, Tuple4<String, String, String, String>, TimeWindow> windowedStream = visitorStatsTuple4KeyedStream.window(TumblingEventTimeWindows.of(Time.seconds(10)));

        //TODO 7 聚合统计
        //当我们既想访问窗口里的元数据，又不想缓存窗口里的所有数据时，可以将ProcessWindowFunction与增量计算函数相reduce和aggregate结合。对于一个窗口来说，Flink先增量计算，
        // 窗口关闭前，将增量计算结果发送给ProcessWindowFunction作为输入再进行处理。
        // 访问窗口里的元数据： processWindowFunction; reduce先增量计算；在窗口触发关闭操作时，将结果发送到ProcessWindowFunction作为输入再进行处理
        // 这样ProcessWindowFunction 就不会缓存窗口中的所有数据； 而是聚合后的数据


        //reduce的数据即窗口内的数据和
        SingleOutputStreamOperator<VisitorStats> reduceDstream = windowedStream.reduce(new ReduceFunction<VisitorStats>() {
                                                                                           @Override
                                                                                           public VisitorStats reduce(VisitorStats t1, VisitorStats t2) throws Exception {
                                                                                               //第一个为状态，第二个为新来的
                                                                                               t1.setPv_ct(t1.getPv_ct() + t2.getPv_ct());
                                                                                               t1.setUv_ct(t1.getUv_ct() + t2.getUv_ct());
                                                                                               t1.setSv_ct(t1.getSv_ct() + t2.getSv_ct());
                                                                                               t1.setUj_ct(t1.getUj_ct() + t2.getUj_ct());
                                                                                               return t1;
                                                                                           }
                                                                                       },
                //对一个窗口内的元素进行处理，窗口内的元素缓存在Iterable<IN>，进行处理后输出到Collector<OUT>中
                //   * 我们可以输出一到多个结果
                //使用时，Flink将某个Key下某个窗口的所有元素都缓存在Iterable<IN>中，我们需要对其进行处理，然后用Collector<OUT>收集输出。我们可以使用Context获取窗口内更多的信息，包括时间、状态、迟到数据发送位置等。

                new ProcessWindowFunction<VisitorStats, VisitorStats, Tuple4<String, String, String, String>, TimeWindow>() {
                    @Override
                    public void process(Tuple4<String, String, String, String> tuple4, Context context, Iterable<VisitorStats> visitorStatsIn, Collector<VisitorStats> collector) throws Exception {
                        //补充时间字段
                        SimpleDateFormat simpleDateFormat = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
                        // 将中间结果全部输出吗？
                        for(VisitorStats vs : visitorStatsIn){
                            //获取窗口时间作为起始时间，将窗口时间戳格式化为标准时间
                            String startDate = simpleDateFormat.format(new
                                    Date(context.window().getStart()));
                            String endDate = simpleDateFormat.format(new
                                    Date(context.window().getEnd()));
                            System.out.println("window time: "+ startDate +"  >>> " + endDate);
                            vs.setStt(startDate);
                            vs.setEdt(endDate);
                            collector.collect(vs);
                        }

                    }
                }

        );

        reduceDstream.print("reduceDstream >>>  ");



        //TODO 9 向Clickhouse中写入数据
        reduceDstream.addSink(
                ClickHouseUtil.getJdbcSink("insert into visitor_stats_2021 values(?,?,?,?,?,?,?,?,?,?,?,?)")
        );

        env.execute();

    }
}
