package cn.doitedu.rtmk.demo5;

import cn.doitedu.rtmk.common.EventBean;

import com.alibaba.fastjson.JSON;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.api.common.serialization.SimpleStringSchema;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.connector.kafka.source.KafkaSource;
import org.apache.flink.connector.kafka.source.enumerator.initializer.OffsetsInitializer;
import org.apache.flink.runtime.state.hashmap.HashMapStateBackend;
import org.apache.flink.streaming.api.CheckpointingMode;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.KeyedStream;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.KeyedProcessFunction;
import org.apache.flink.util.Collector;
import org.apache.kafka.clients.consumer.OffsetResetStrategy;

import java.util.HashMap;
import java.util.Map;

public class Demo5 {

    public static void main(String[] args) throws Exception {

        // 创建编程入口
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);
        // 开启checkpoint
        env.enableCheckpointing(5000, CheckpointingMode.EXACTLY_ONCE);
        env.getCheckpointConfig().setCheckpointStorage("file:/d:/ckpt");
        env.getCheckpointConfig().setCheckpointTimeout(2000);
        // 设置状态的backend
        env.setStateBackend(new HashMapStateBackend());


        // 构建 kafka source，读取用户实时行为数据
        KafkaSource<String> source = KafkaSource.<String>builder()
                .setBootstrapServers("doitedu:9092")
                .setStartingOffsets(OffsetsInitializer.committedOffsets(OffsetResetStrategy.LATEST))
                .setGroupId("doit40-1")
                .setTopics("dwd_events")
                .setValueOnlyDeserializer(new SimpleStringSchema())
                .build();

        // 读取kafka中的数据为一个流
        DataStreamSource<String> eventsStr = env.fromSource(source, WatermarkStrategy.noWatermarks(), "s");

        // 解析行为日志为javabean
        SingleOutputStreamOperator<EventBean> beanStream = eventsStr.map(json -> JSON.parseObject(json, EventBean.class));


        // 把相同用户的行为，发到相同的subtask去处理
        KeyedStream<EventBean, Long> keyedStream = beanStream.keyBy(bean -> bean.getUid());

        // 核心处理逻辑
        keyedStream.process(new KeyedProcessFunction<Long, EventBean, String>() {

            // 构造一个运算机池
            final HashMap<String, RuleCalculator> calculatorPool = new HashMap<>();

            @Override
            public void open(Configuration parameters) throws Exception {

                // 构造规则模型-1 的 规则实例-1
                String rule_1_1_paramJson = "{\n" +
                        "  \"rule_id\": \"rule-1-1\",\n" +
                        "  \"static_profile\": [\n" +
                        "    {\n" +
                        "      \"tagName\": \"gender\",\n" +
                        "      \"tagValue\": [\n" +
                        "        \"male\"\n" +
                        "      ],\n" +
                        "      \"compareType\": \"=\"\n" +
                        "    },\n" +
                        "    {\n" +
                        "      \"tagName\": \"age\",\n" +
                        "      \"tagValue\": [\n" +
                        "        \"20.5\",\n" +
                        "        \"39\"\n" +
                        "      ],\n" +
                        "      \"compareType\": \"between\"\n" +
                        "    }\n" +
                        "  ],\n" +
                        "  \"realtime_profile\": {\n" +
                        "    \"eventId\": \"w\",\n" +
                        "    \"eventCnt\": 4\n" +
                        "  },\n" +
                        "  \"fire_action\": \"p\"\n" +
                        "}";
                RuleCalculator rule_1_calculator = new RuleModel_1_Calculator();
                rule_1_calculator.init(getRuntimeContext(),rule_1_1_paramJson);
                calculatorPool.put("rule-1-1",rule_1_calculator);


                String rule_1_2_paramJson = "{\n" +
                        "  \"rule_id\": \"rule-1-2\",\n" +
                        "  \"static_profile\": [\n" +
                        "    {\n" +
                        "      \"tagName\": \"active_level\",\n" +
                        "      \"tagValue\": [\n" +
                        "        \"2\"\n" +
                        "      ],\n" +
                        "      \"compareType\": \">\"\n" +
                        "    },\n" +
                        "    {\n" +
                        "      \"tagName\": \"age\",\n" +
                        "      \"tagValue\": [\n" +
                        "        \"25\",\n" +
                        "        \"30\"\n" +
                        "      ],\n" +
                        "      \"compareType\": \"between\"\n" +
                        "    }\n" +
                        "  ],\n" +
                        "  \"realtime_profile\": {\n" +
                        "    \"eventId\": \"g\",\n" +
                        "    \"eventCnt\": 2\n" +
                        "  },\n" +
                        "  \"fire_action\": \"u\"\n" +
                        "}";
                RuleCalculator rule_2_calculator = new RuleModel_1_Calculator();
                rule_2_calculator.init(getRuntimeContext(),rule_1_2_paramJson);
                calculatorPool.put("rule-1-2",rule_2_calculator);
            }

            @Override
            public void processElement(EventBean eventBean, KeyedProcessFunction<Long, EventBean, String>.Context context, Collector<String> collector) throws Exception {

                // 遍历运算机池中的每一个运算机对象，对当前收到的用户行为数据进行处理
                for (Map.Entry<String, RuleCalculator> entry : calculatorPool.entrySet()) {
                    RuleCalculator calculator = entry.getValue();
                    calculator.calc(eventBean, collector);
                }
            }
        }).print();

        env.execute();
    }


}
