package com.atguigu.app.dws;

import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import com.atguigu.bean.UniqueVisitStats;
import com.atguigu.utils.MyClickHouseUtil;
import com.atguigu.utils.MyKafkaUtil;
import org.apache.flink.api.common.eventtime.SerializableTimestampAssigner;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.api.common.functions.ReduceFunction;
import org.apache.flink.api.common.functions.RichFilterFunction;
import org.apache.flink.api.common.state.StateTtlConfig;
import org.apache.flink.api.common.state.ValueState;
import org.apache.flink.api.common.state.ValueStateDescriptor;
import org.apache.flink.api.common.time.Time;
import org.apache.flink.api.java.functions.KeySelector;
import org.apache.flink.api.java.tuple.Tuple4;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.KeyedStream;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.datastream.WindowedStream;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.windowing.WindowFunction;
import org.apache.flink.streaming.api.windowing.assigners.TumblingEventTimeWindows;
import org.apache.flink.streaming.api.windowing.windows.TimeWindow;
import org.apache.flink.util.Collector;

import java.text.SimpleDateFormat;
import java.time.Duration;

//数据流： web/app -> Nginx -> 日志服务器(xx.log) -> Flume -> Kafka(ODS) -> FlinkApp -> Kafka(DWD) -> FlinkApp -> ClickHouse
//程  序： Mock -> Flume(f1.sh) -> Kafka(ZK) -> BaseLogApp -> Kafka(ZK) -> UniqueVisit10sApp -> ClickHouse(ZK)
public class UniqueVisit10sApp {

    public static void main(String[] args) throws Exception {

        //TODO 1.获取执行环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);  //生产环境设置为Kafka分区数

        //        //1.1 开启CheckPoint
        //        env.enableCheckpointing(5 * 60000L);
        //        env.getCheckpointConfig().setCheckpointTimeout(5 * 60000L);
        //        env.getCheckpointConfig().setMinPauseBetweenCheckpoints(10000L);
        //        env.getCheckpointConfig().setMaxConcurrentCheckpoints(2);
        //        env.setRestartStrategy(RestartStrategies.fixedDelayRestart(3,5000L));
        //
        //        //1.2 指定状态后端
        //        env.setStateBackend(new FsStateBackend("hdfs://hadoop102:8020/xxxx/xxx"));

        //TODO 2.读取Kafka  dwd_page_log 主题的数据创建流
        String sourceTopic = "dwd_page_log";
        String groupId = "unique_visit_10s_app_210927";
        DataStreamSource<String> kafkaDS = env.addSource(MyKafkaUtil.getKafkaConsumer(sourceTopic, groupId));

        //TODO 3.将数据转换为JSON对象，并提取时间戳生成WaterMark
        SingleOutputStreamOperator<JSONObject> jsonObjDS = kafkaDS.map(JSON::parseObject);
        SingleOutputStreamOperator<JSONObject> jsonObjWithWMDS = jsonObjDS.assignTimestampsAndWatermarks(WatermarkStrategy.<JSONObject>forBoundedOutOfOrderness(Duration.ofSeconds(2)).withTimestampAssigner(new SerializableTimestampAssigner<JSONObject>() {
            @Override
            public long extractTimestamp(JSONObject element, long recordTimestamp) {
                return element.getLong("ts");
            }
        }));

        //TODO 4.按照Mid分组
        KeyedStream<JSONObject, String> keyedStream = jsonObjWithWMDS.keyBy(jsonObj -> jsonObj.getJSONObject("common").getString("mid"));

        //TODO 5.使用状态编程的方式按照Mid去重(过滤掉今天已经访问过的Mid数据)
        SingleOutputStreamOperator<JSONObject> filterDS = keyedStream.filter(new RichFilterFunction<JSONObject>() {

            private ValueState<String> lastVisitDateState;
            private SimpleDateFormat sdf;

            @Override
            public void open(Configuration parameters) throws Exception {

                ValueStateDescriptor<String> stateDescriptor = new ValueStateDescriptor<>("last-visit", String.class);

                //设置状态的TTL
                StateTtlConfig ttlConfig = new StateTtlConfig.Builder(Time.days(1))
                        .setUpdateType(StateTtlConfig.UpdateType.OnCreateAndWrite) //状态更新则重置时间
                        .build();
                stateDescriptor.enableTimeToLive(ttlConfig);

                lastVisitDateState = getRuntimeContext().getState(stateDescriptor);
                sdf = new SimpleDateFormat("yyyy-MM-dd");
            }

            @Override
            public boolean filter(JSONObject value) throws Exception {

                //获取数据中上一跳页面信息
                String lastPageId = value.getJSONObject("page").getString("last_page_id");

                //判断上一跳页面是否为Null
                if (lastPageId == null) {

                    //取出状态数据
                    String lastVisitDate = lastVisitDateState.value();
                    //获取当前数据的访问日期
                    String curDate = sdf.format(value.getLong("ts"));

                    //如果状态为null或者状态时间与当前时间不同,则保留数据,反之过滤掉
                    if (lastVisitDate == null || !lastVisitDate.equals(curDate)) {
                        //更新状态
                        lastVisitDateState.update(curDate);
                        return true;
                    }
                }
                return false;
            }
        });

        //去重打印测试
        //filterDS.print("filterDS>>>>>>>>");

        //TODO 6.转换数据为JavaBean对象  4个维度,度量值(日活),时间戳,窗口的时间
        SingleOutputStreamOperator<UniqueVisitStats> uniqueVisitStatsDS = filterDS.map(jsonObj -> {

            JSONObject common = jsonObj.getJSONObject("common");

            return new UniqueVisitStats("", "",
                    common.getString("vc"),
                    common.getString("ch"),
                    common.getString("ar"),
                    common.getString("is_new"),
                    1L,
                    jsonObj.getLong("ts"));
        });

        //TODO 7.分组开窗聚合
        //7.1 分组
        KeyedStream<UniqueVisitStats, Tuple4<String, String, String, String>> uniqueVisitStatsTuple4KeyedStream = uniqueVisitStatsDS.keyBy(new KeySelector<UniqueVisitStats, Tuple4<String, String, String, String>>() {
            @Override
            public Tuple4<String, String, String, String> getKey(UniqueVisitStats value) throws Exception {
                return new Tuple4<>(value.getAr(),
                        value.getCh(),
                        value.getIs_new(),
                        value.getVc());
            }
        });

        //7.2 开窗
        WindowedStream<UniqueVisitStats, Tuple4<String, String, String, String>, TimeWindow> windowedStream = uniqueVisitStatsTuple4KeyedStream.window(TumblingEventTimeWindows.of(org.apache.flink.streaming.api.windowing.time.Time.seconds(10)));

        //7.3 聚合
        //增量聚合 -> 优点：1.快,效率高;2.存储的数据量小
//        windowedStream.reduce(new ReduceFunction<UniqueVisitStats>() {
//            @Override
//            public UniqueVisitStats reduce(UniqueVisitStats value1, UniqueVisitStats value2) throws Exception {
//                return null;
//            }
//        });
        //全量聚合 -> 优点：1.有全部的数据,可以计算前百分比数据;2.可以获取窗口信息
//        windowedStream.apply(new WindowFunction<UniqueVisitStats, Object, Tuple4<String, String, String, String>, TimeWindow>() {
//            @Override
//            public void apply(Tuple4<String, String, String, String> key, TimeWindow window, Iterable<UniqueVisitStats> input, Collector<Object> out) throws Exception {
//            }
//        });
        SingleOutputStreamOperator<UniqueVisitStats> resultDS = windowedStream.reduce(new ReduceFunction<UniqueVisitStats>() {
            @Override
            public UniqueVisitStats reduce(UniqueVisitStats value1, UniqueVisitStats value2) throws Exception {
                value1.setUv_ct(value1.getUv_ct() + value2.getUv_ct());
                return value1;
            }
        }, new WindowFunction<UniqueVisitStats, UniqueVisitStats, Tuple4<String, String, String, String>, TimeWindow>() {
            @Override
            public void apply(Tuple4<String, String, String, String> key, TimeWindow window, Iterable<UniqueVisitStats> input, Collector<UniqueVisitStats> out) throws Exception {

                //获取数据
                UniqueVisitStats uniqueVisitStats = input.iterator().next();

                //补充窗口信息
                SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
                uniqueVisitStats.setStt(sdf.format(window.getStart()));
                uniqueVisitStats.setEdt(sdf.format(window.getEnd()));

                //输出数据
                out.collect(uniqueVisitStats);
            }
        });

        //TODO 8.将数据写出到ClickHouse
        resultDS.print("resultDS>>>>>>>>>");
        resultDS.addSink(MyClickHouseUtil.getClickHouseSink("insert into dws_uv_vc_ch_isnew_ar_10s_210927 values(?,?,?,?,?,?,?,?)"));

        //TODO 9.启动任务
        env.execute("UniqueVisit10sApp");

    }

}
