package com.atguigu.education.app.dwd.log;

import com.alibaba.fastjson.JSONAware;
import com.alibaba.fastjson.JSONObject;
import com.atguigu.education.util.DateFormatUtil;
import com.atguigu.education.util.KafkaUtil;
import org.apache.flink.api.common.functions.FlatMapFunction;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.common.functions.RichFlatMapFunction;
import org.apache.flink.api.common.state.StateTtlConfig;
import org.apache.flink.api.common.state.ValueState;
import org.apache.flink.api.common.state.ValueStateDescriptor;
import org.apache.flink.api.common.time.Time;
import org.apache.flink.api.java.functions.KeySelector;
import org.apache.flink.api.java.tuple.Tuple3;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.streaming.api.datastream.*;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.KeyedProcessFunction;
import org.apache.flink.streaming.api.functions.co.CoFlatMapFunction;
import org.apache.flink.streaming.api.functions.co.RichCoFlatMapFunction;
import org.apache.flink.util.Collector;

/**
 * @Author luyang
 * @create 2022/9/4 16:39
 */
public class DwdPayChapter {
    public static void main(String[] args) throws Exception {
        //TODO 1.获取执行环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();

        //设置并行度
        env.setParallelism(1);

//        StreamTableEnvironment tableEnv = StreamTableEnvironment.create(env);

        //TODO 2 设置检查点和状态后端
        /*     //先开启  多长时间去设置一个状态后端 检查点模式一般使用精准一次
        env.enableCheckpointing(5*60*1000L, CheckpointingMode.EXACTLY_ONCE);
        //超时时间
        env.getCheckpointConfig().setCheckpointTimeout(3*60*1000L);
        //最多可以出现几个状态后端
        env.getCheckpointConfig().setMaxConcurrentCheckpoints(2);
        //状态保存位置
        env.getCheckpointConfig().setCheckpointStorage("hdfs://hadoop102:8020/gmall/ck");
        //上面保存位置如果是在hdfs上的话 那么需要用户名
        System.setProperty("HADOOP_USER_NAME","atguigu");
        //开一下状态后端
        env.setStateBackend(new HashMapStateBackend());
        */
        //TODO 3 读取kafka数据
        //读取Video数据
        String topicName = "dwd_traffic_appVideo_log";
        String groupID = "dwd_pay_chapter";
        DataStreamSource<String> topicAppVideoLogStream = env.addSource(KafkaUtil.getKafkaConsumer(topicName, groupID));
        //topicAppVideoLogStream.print("video>>>>>");
        SingleOutputStreamOperator<JSONObject> map = topicAppVideoLogStream.map(new MapFunction<String, JSONObject>() {
            @Override
            public JSONObject map(String value) throws Exception {
                return JSONObject.parseObject(value);
            }
        });
        //map.print(">>>>>");
        //TODO 4 进行keyBy
        KeyedStream<JSONObject, Tuple3<String,String,String>> keyedStream = map.keyBy(new KeySelector<JSONObject, Tuple3<String,String,String>>() {

            @Override
            public Tuple3<String, String, String> getKey(JSONObject value) throws Exception {
                JSONObject common = value.getJSONObject("common");
                JSONObject appVideo = value.getJSONObject("appVideo");
                return new Tuple3<>(common.getString("uid"),common.getString("sid"),appVideo.getString("video_id"));
            }
        });

        //TODO 定时
        SingleOutputStreamOperator<JSONObject> processStream = keyedStream.process(new KeyedProcessFunction<Tuple3<String, String, String>, JSONObject, JSONObject>() {
            ValueState<JSONObject> lastOrderJsonObjState = null;

            @Override
            public void open(Configuration parameters) throws Exception {
                lastOrderJsonObjState = getRuntimeContext().getState(new ValueStateDescriptor<JSONObject>("last_video_jsonObj", JSONObject.class));
            }

            @Override
            public void processElement(JSONObject value, Context ctx, Collector<JSONObject> out) throws Exception {
                JSONObject lastOrderJsonObj = lastOrderJsonObjState.value();
                if (lastOrderJsonObj == null) {
                    // 当前id的第一条数据,把数据更新到状态里面
                    lastOrderJsonObjState.update(value);
                    long currentProcessingTime = ctx.timerService().currentProcessingTime();
                    ctx.timerService().registerProcessingTimeTimer(currentProcessingTime + 5000L);
                } else {
                    JSONObject appVideo = value.getJSONObject("appVideo");
                    if (Integer.parseInt(appVideo.getString("play_sec")) >= 30) {
                        Long aLong = value.getJSONObject("appVideo").getLong("play_sec");
                        Long aLong1 = lastOrderJsonObj.getJSONObject("appVideo").getLong("play_sec");
                        if (aLong1 != null) {
                            value.getJSONObject("appVideo").put("play_sec", aLong + aLong1);
                            lastOrderJsonObjState.update(value);
                        }
                    }
                }
            }

            @Override
            public void onTimer(long timestamp, OnTimerContext ctx, Collector<JSONObject> out) throws Exception {
                // 输出状态中的值
                JSONObject lastOrderJsonObj = lastOrderJsonObjState.value();
                out.collect(lastOrderJsonObj);
                lastOrderJsonObjState.clear();
            }
        });
        processStream.print(">>>>>>");
        // todo 7、将数据传回到Kafka中
        String targetTopic = "dwd_pay_chapter_user";
        processStream.map(JSONAware::toJSONString)
                .addSink(KafkaUtil.getKafkaProducer(targetTopic));

        //TODO 执行
        env.execute();

    }
}
