package com.zhang.gmall.realtime.app.dwd;

import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONArray;
import com.alibaba.fastjson.JSONObject;
import com.zhang.gmall.realtime.utils.MyKafkaUtil;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.common.functions.RichMapFunction;
import org.apache.flink.api.common.restartstrategy.RestartStrategies;
import org.apache.flink.api.common.state.ListState;
import org.apache.flink.api.common.state.ListStateDescriptor;
import org.apache.flink.api.common.state.ValueState;
import org.apache.flink.api.common.state.ValueStateDescriptor;
import org.apache.flink.api.common.typeinfo.Types;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.runtime.state.filesystem.FsStateBackend;
import org.apache.flink.streaming.api.CheckpointingMode;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.CheckpointConfig;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.ProcessFunction;
import org.apache.flink.util.Collector;
import org.apache.flink.util.OutputTag;

import java.text.SimpleDateFormat;
import java.util.Date;

/**
 * @title: 日志数据分流
 * @author: zhang
 * @date: 2022/3/2 18:55
 * 启动日志：启动侧输出流
 * 曝光日志：曝光侧输出流
 * 页面日志：主流
 * 需要启动的进程：zk、kafka、hdfs、logger（nginx + 日志采集服务）、BaseLogApp
 * 执行流程：
 * -- 模拟生成日志jar
 * -- 将生成日志发送nginx
 * -- nginx接受日志后，进行负载均衡，将日志发送到三台服务器
 * -- 日志服务器接收到请求后，对日志进行处理（打印、落盘、发送到kafka----ods-base-log-2022主题）
 * -- BaseLogApp从ods-base-log-2022读取数据
 * -- 新老用户修复、日志分流写到不同kafka主题
 */
public class BaseLogApp {
    public static void main(String[] args) throws Exception {
        //todo 1.获取执行环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(4);

       //todo 2. 设置检查点
    /*   //2.1 开启检查点
        env.enableCheckpointing(5000L, CheckpointingMode.EXACTLY_ONCE);
        //2.2 设置检查点超时时间
        env.getCheckpointConfig().setCheckpointTimeout(60 * 1000L);
        //2.3 设置取消job后，检查点是否保留
        env.getCheckpointConfig().enableExternalizedCheckpoints(CheckpointConfig.ExternalizedCheckpointCleanup.RETAIN_ON_CANCELLATION);
        //2.4 设置重启策略
        //固定次数重启
        env.setRestartStrategy(RestartStrategies.fixedDelayRestart(3, 3000L));
        //失败率重启
        env.setRestartStrategy(RestartStrategies.failureRateRestart(3, Time.milliseconds(3000), Time.days(30)));
        //2.5 设置检查点间隔时间
        env.getCheckpointConfig().setMinPauseBetweenCheckpoints(3000L);
        //2.6 设置状态后段
        env.setStateBackend(new FsStateBackend("hdfs://hadoop102:8020/flink/gmall-ck"));
        //2.7 设置操作hadoop用户
        System.setProperty("HADOOP_USER_NAME", "zhang");*/

        //todo 3. 从kafka中读取数据
        String topic = "ods_base_log_2022";
        String groupId = "base_log_app_group_2022";
        //3.2 消费数据封装为流
        DataStreamSource<String> kafkaDS = env.addSource(MyKafkaUtil.getKafkaSource(topic, groupId));

        //todo 4. 对数据进行类型转换
        SingleOutputStreamOperator<JSONObject> jsonObjDS = kafkaDS.map(new MapFunction<String, JSONObject>() {
            @Override
            public JSONObject map(String value) throws Exception {
                return JSON.parseObject(value);
            }
        });
        //Lambda
        //kafkaDS.map(jsonStr -> JSON.parseObject(jsonStr));

        //todo 5. 对新老访客标记进行修复
        SingleOutputStreamOperator<JSONObject> midWithNewFlagDS = jsonObjDS
                .keyBy(r -> r.getJSONObject("common").getString("mid"))
                .map(new RichMapFunction<JSONObject, JSONObject>() {
                    //定义状态存储用户初次访问时间
                    //注意⚠️：不能在声明状态时候直接初始化，因为此时还没有开始运行，获取不到
                    private ValueState<String> firstTime;
                    //定义格式化时间戳
                    private SimpleDateFormat simpleDateFormat;

                    @Override
                    public void open(Configuration parameters) throws Exception {
                        firstTime = getRuntimeContext().getState(
                                new ValueStateDescriptor<String>(
                                        "last-time", Types.STRING
                                )
                        );
                        simpleDateFormat = new SimpleDateFormat("yyyy-MM-dd");
                    }

                    @Override
                    public JSONObject map(JSONObject value) throws Exception {
                        JSONObject common = value.getJSONObject("common");
                        String is_new = common.getString("is_new");
                        Long ts = value.getLong("ts");
                        //只有当不是新用户的时候才进行校验修复
                        if ("1".equals(is_new)) {
                            String stateTime = firstTime.value();
                            String currTime = simpleDateFormat.format(new Date(ts));
                            //当状态中有初次访问时间,说明曾经访问过，需要进行修复
                            if (stateTime != null && stateTime.length() != 0) {
                                common.put("is_new", 0);
                                //如果没有初次日期，说明还没有访问过，当前是第一次访问，将初次时间写入状态
                            } else {
                                firstTime.update(currTime);
                            }
                        }
                        return value;
                    }
                });
        //todo 6. 对数据分流
        //定义侧输出流对象
        OutputTag<String> startTag = new OutputTag<String>("start"){};
        OutputTag<String> displayTag = new OutputTag<String>("display"){};
        SingleOutputStreamOperator<String> pageDS = midWithNewFlagDS
                .process(new ProcessFunction<JSONObject, String>() {
                    @Override
                    public void processElement(JSONObject value, ProcessFunction<JSONObject, String>.Context ctx, Collector<String> out) throws Exception {
                        JSONObject start = value.getJSONObject("start");
                        if (start != null && start.size() > 0) {
                            //启动日志侧输出流
                            ctx.output(startTag, value.toJSONString());
                        } else {
                            //页面日志主流(曝光日志也属于页面日志）
                            out.collect(value.toJSONString());
                            JSONArray displays = value.getJSONArray("displays");
                            if (displays != null && displays.size() > 0) {
                                //补充页面id和曝光时间
                                Long ts = value.getLong("ts");
                                String pageId = value.getJSONObject("page").getString("page_id");
                                for (int i = 0; i < displays.size(); i++) {
                                    JSONObject display = displays.getJSONObject(i);
                                    display.put("page_id", pageId);
                                    display.put("ts", ts);
                                    //曝光日志侧输出流
                                    ctx.output(displayTag, display.toJSONString());
                                }
                            }
                        }
                    }
                });
        //todo 7. 将不同流的数据写到kafka主题中
        DataStream<String> displayDS = pageDS.getSideOutput(displayTag);
        DataStream<String> startDS = pageDS.getSideOutput(startTag);
        //打印测试
        displayDS.print("display");
        startDS.print("start");
        pageDS.print("page");

        //发送到kafka对应主题
        pageDS.addSink(MyKafkaUtil.getKafkaSink("dwd_page_log_2022"));
        displayDS.addSink(MyKafkaUtil.getKafkaSink("dwd_display_log_2022"));
        startDS.addSink(MyKafkaUtil.getKafkaSink("dwd_start_log_2022"));

        //todo 8. 启动任务
        env.execute("BaseLogApp");
    }
}
