package cn.doitedu.rtmk.demo1;

import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.api.common.serialization.SimpleStringSchema;
import org.apache.flink.connector.kafka.source.KafkaSource;
import org.apache.flink.connector.kafka.source.enumerator.initializer.OffsetsInitializer;
import org.apache.flink.runtime.state.hashmap.HashMapStateBackend;
import org.apache.flink.streaming.api.CheckpointingMode;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.KeyedStream;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.KeyedProcessFunction;
import org.apache.flink.util.Collector;
import org.apache.kafka.clients.consumer.OffsetResetStrategy;

/**
 *规则 1： 当某用户发生了 x 行为，立刻推出消息
 *规则 2： 当某用户发生了 c 行为，且行为属性中符合  properties[p1] = v1

 测试数据：
 {"uid":1,"eventId":"a","timestamp":1692844887100,"properties":{"p1":"v1"}}
 {"uid":2,"eventId":"a","timestamp":1692844887200,"properties":{"p1":"v2"}}
 {"uid":3,"eventId":"x","timestamp":1692844887300,"properties":{"p1":"v3"}}
 {"uid":1,"eventId":"x","timestamp":1692844887400,"properties":{"p1":"v4"}}
 *
 */
public class Demo1 {

    public static void main(String[] args) throws Exception {

        // 创建编程入口
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        // 开启checkpoint
        env.enableCheckpointing(5000, CheckpointingMode.EXACTLY_ONCE);
        env.getCheckpointConfig().setCheckpointStorage("file:/d:/ckpt");
        env.getCheckpointConfig().setCheckpointTimeout(2000);
        // 设置状态的backend
        env.setStateBackend(new HashMapStateBackend());


        // 构建 kafka source，读取用户实时行为数据
        KafkaSource<String> source = KafkaSource.<String>builder()
                .setBootstrapServers("doitedu:9092")
                .setStartingOffsets(OffsetsInitializer.committedOffsets(OffsetResetStrategy.LATEST))
                .setGroupId("doit40-1")
                .setTopics("dwd_events")
                .setValueOnlyDeserializer(new SimpleStringSchema())
                .build();

        // 读取kafka中的数据为一个流
        DataStreamSource<String> eventsStr = env.fromSource(source, WatermarkStrategy.noWatermarks(), "s");

        // 解析行为日志为javabean
        SingleOutputStreamOperator<EventBean> beanStream = eventsStr.map(json -> JSON.parseObject(json, EventBean.class));


        // 把相同用户的行为，发到相同的subtask去处理
        KeyedStream<EventBean, Long> keyedStream = beanStream.keyBy(EventBean::getUid);

        // 核心处理： 判断规则是否满足
        SingleOutputStreamOperator<String> resultStream = keyedStream.process(new KeyedProcessFunction<Long, EventBean, String>() {
            final JSONObject jsonObject = new JSONObject();

            @Override
            public void processElement(EventBean eventBean, KeyedProcessFunction<Long, EventBean, String>.Context context, Collector<String> collector) throws Exception {

                // 规则1的处理
                if ("x".equals(eventBean.getEventId())) {

                    jsonObject.put("uid", eventBean.getUid());
                    jsonObject.put("timestamp", eventBean.getTimestamp());
                    jsonObject.put("rule_id", "rule-001");

                    collector.collect(jsonObject.toJSONString());
                }


                // 规则2的处理
                if ("c".equals(eventBean.getEventId()) && eventBean.getProperties().getOrDefault("p1","").equals("v1")  ) {

                    jsonObject.put("uid", eventBean.getUid());
                    jsonObject.put("timestamp", eventBean.getTimestamp());
                    jsonObject.put("rule_id", "rule-002");

                    collector.collect(jsonObject.toJSONString());
                }


            }
        });


        // 输出结果
        resultStream.print();


        env.execute();

    }

}
