package com.raylu.realtime.app.dws;

import com.alibaba.fastjson.JSON;
import com.raylu.realtime.bean.PageBean;
import com.raylu.realtime.bean.VisitorStats;
import com.raylu.realtime.utils.ClickHouseUtil;
import com.raylu.realtime.utils.DateUtil;
import com.raylu.realtime.utils.KafkaSourceUtil;
import com.raylu.realtime.utils.PropertiesUtil;
import org.apache.flink.api.common.eventtime.SerializableTimestampAssigner;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.api.common.functions.ReduceFunction;
import org.apache.flink.api.java.functions.KeySelector;
import org.apache.flink.api.java.tuple.Tuple4;
import org.apache.flink.connector.jdbc.JdbcSink;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.windowing.ProcessWindowFunction;
import org.apache.flink.streaming.api.windowing.assigners.TumblingEventTimeWindows;
import org.apache.flink.streaming.api.windowing.time.Time;
import org.apache.flink.streaming.api.windowing.windows.TimeWindow;
import org.apache.flink.streaming.api.windowing.windows.Window;
import org.apache.flink.util.Collector;
import redis.clients.jedis.Tuple;

import java.time.Duration;
import java.util.Properties;

/**
 * Description:
 * <p>在多维度的角度：版本、渠道、地区、新老客户标识</p>
 * <p>统计访客的PV、UV、UJO、SV、DUR_SUM</p>
 * Create by lucienoz on 2022/1/4.
 * Copyright © 2022 lucienoz. All rights reserved.
 */
public class VisitorStatsApp {
    public static void main(String[] args) throws Exception {
        Properties load = PropertiesUtil.load("config.properties");
        //TODO 1. 准备运行环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(4);
        //TODO 2. 设置检查点
//        env.enableCheckpointing(5000L);
//        env.getCheckpointConfig().setCheckpointingMode(CheckpointingMode.EXACTLY_ONCE);
//        env.getCheckpointConfig().setCheckpointTimeout(60*1000L);
//        env.getCheckpointConfig().enableExternalizedCheckpoints(CheckpointConfig.ExternalizedCheckpointCleanup.RETAIN_ON_CANCELLATION);
//        env.setRestartStrategy(RestartStrategies.failureRateRestart(3, Time.seconds(3L),Time.days(3L)));
//        env.setStateBackend(new FsStateBackend(load.getProperty("visitor.stats.app.fsstatebackend.url")));
//        System.setProperty("HADOOP_USER_NAME", "raylu");

        //TODO 3. 订阅流，并将各种不通的流转换成VisitorStats POJO对象类型
        //TODO 3.1. 订阅kafka主题DWD_PAGE_LOG，用于统计PV、SV、DUR_SUM
        SingleOutputStreamOperator<VisitorStats> visitorStatsFromPageDS = env
                .addSource(KafkaSourceUtil.getKafkaSource(load.getProperty("visitor.stats.app.kafka.source-topic1"), load.getProperty("visitor.stats.app.kafka.group-id")))
                .map(r -> {
                    PageBean pageBean = JSON.parseObject(r, PageBean.class);
                    return new VisitorStats(pageBean.getVc(), pageBean.getCh(), pageBean.getAr(), pageBean.getIs_new(), "", "", pageBean.getTs(), 1L, 0L, 0L, pageBean.getLast_page_id() != null ? 0L : 1L, Long.parseLong(pageBean.getDuring_time() != null ? pageBean.getDuring_time() : "0"));
                });
        //TODO 3.2. 订阅kafka主题DWM_UNIQUE_VISITOR，用于统计UV
        SingleOutputStreamOperator<VisitorStats> visitorStatsFromUV = env
                .addSource(KafkaSourceUtil.getKafkaSource(load.getProperty("visitor.stats.app.kafka.source-topic2"), load.getProperty("visitor.stats.app.kafka.group-id")))
                .map(r -> {
                    PageBean pageBean = JSON.parseObject(r, PageBean.class);
                    return new VisitorStats(pageBean.getVc(), pageBean.getCh(), pageBean.getAr(), pageBean.getIs_new(), "", "", pageBean.getTs(), 0L, 1L, 0L, 0L, 0L);
                });
        //TODO 3.3. 订阅kafka主题DWM_USER_JUMP_DETAIL，用于统计UV
        SingleOutputStreamOperator<VisitorStats> visitorStatsFromUJ = env
                .addSource(KafkaSourceUtil.getKafkaSource(load.getProperty("visitor.stats.app.kafka.source-topic3"), load.getProperty("visitor.stats.app.kafka.group-id")))
                .map(r -> {
                    PageBean pageBean = JSON.parseObject(r, PageBean.class);
                    return new VisitorStats(pageBean.getVc(), pageBean.getCh(), pageBean.getAr(), pageBean.getIs_new(), "", "", pageBean.getTs(), 0L, 0L, 1L, 0L, 0L);
                });
        //TODO 4. 通过union将各个流合成一个流，设置水位线
        SingleOutputStreamOperator<VisitorStats> resultDS = visitorStatsFromPageDS
                .union(visitorStatsFromUV, visitorStatsFromUJ)
                .assignTimestampsAndWatermarks(WatermarkStrategy.<VisitorStats>forBoundedOutOfOrderness(Duration.ofMinutes(1L))
                        .withTimestampAssigner(new SerializableTimestampAssigner<VisitorStats>() {
                            @Override
                            public long extractTimestamp(VisitorStats element, long recordTimestamp) {
                                return element.getTs();
                            }
                        }))
                //TODO 5. 将流按照维度进行分组，并进行窗口的部分聚合
//                .keyBy(r -> Tuple4.of(r.getVc(), r.getCh(), r.getAr(), r.getIs_new()))
                .keyBy(new KeySelector<VisitorStats, Tuple4<String, String, String, String>>() {
                    @Override
                    public Tuple4<String, String, String, String> getKey(VisitorStats r) throws Exception {
                        return Tuple4.of(r.getVc(), r.getCh(), r.getAr(), r.getIs_new());
                    }
                })
                .window(TumblingEventTimeWindows.of(Time.seconds(10L)))
                .reduce(new ReduceFunction<VisitorStats>() {
                    @Override
                    public VisitorStats reduce(VisitorStats value1, VisitorStats value2) throws Exception {
                        value1.setPv_cnt(value1.getPv_cnt() + value2.getPv_cnt());
                        value1.setUv_cnt(value1.getUv_cnt() + value2.getUv_cnt());
                        value1.setUjo_cnt(value1.getUjo_cnt() + value2.getUjo_cnt());
                        value1.setSv_cnt(value1.getSv_cnt() + value2.getSv_cnt());
                        value1.setDur_sum(value1.getDur_sum() + value2.getDur_sum());

                        return value1;
                    }
                }, new ProcessWindowFunction<VisitorStats, VisitorStats, Tuple4<String, String, String, String>, TimeWindow>() {

                    @Override
                    public void process(Tuple4<String, String, String, String> stringStringStringStringTuple4, ProcessWindowFunction<VisitorStats, VisitorStats, Tuple4<String, String, String, String>, TimeWindow>.Context context, Iterable<VisitorStats> elements, Collector<VisitorStats> out) throws Exception {

                        for (VisitorStats element : elements) {
                            long start = context.window().getStart();
                            long end = context.window().getEnd();
                            long processingTime = System.currentTimeMillis();
                            element.setStt(DateUtil.getFormatDateTime(start));
                            element.setEdt(DateUtil.getFormatDateTime(end));
                            element.setTs(processingTime);
                            out.collect(element);
                        }

                    }
                });

        //TODO 6. 将低聚合的数据sink到Clickhouse中
        resultDS
                .addSink(ClickHouseUtil.getJdbcSink("insert into dws_visitor_stats(vc,ch,ar,is_new,stt,edt,ts,pv_cnt,uv_cnt,ujo_cnt,sv_cnt,dur_sum)values(?,?,?,?,?,?,?,?,?,?,?,?)"));
        resultDS.print();
        env.execute();


    }
}
