package cn.doitedu.demo2;

import cn.doitedu.beans.UserAction;
import com.alibaba.fastjson.JSON;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.api.common.serialization.SimpleStringSchema;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.connector.kafka.source.KafkaSource;
import org.apache.flink.connector.kafka.source.enumerator.initializer.OffsetsInitializer;
import org.apache.flink.contrib.streaming.state.EmbeddedRocksDBStateBackend;
import org.apache.flink.streaming.api.CheckpointingMode;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.KeyedStream;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.KeyedProcessFunction;
import org.apache.flink.util.Collector;

import java.util.HashMap;
import java.util.Map;

public class MainEntry {

    public static void main(String[] args) {

        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);
        env.enableCheckpointing(5000, CheckpointingMode.EXACTLY_ONCE);
        env.getCheckpointConfig().setCheckpointStorage("file:///d:/ckpt");

        env.setStateBackend(new EmbeddedRocksDBStateBackend(true));

        // 构建kafka source来读取数据
        KafkaSource<String> source = KafkaSource
                .<String>builder()
                .setBootstrapServers("doitedu:9092")
                .setTopics("user-action-log")
                .setStartingOffsets(OffsetsInitializer.latest())
                .setGroupId("doitedu-ww")
                .setClientIdPrefix("doitedu-cc")
                .setValueOnlyDeserializer(new SimpleStringSchema())
                .build();


        DataStreamSource<String> stream = env.fromSource(source, WatermarkStrategy.noWatermarks(), "log-source");

        // 将source读取到的json数据，转换成javaBean数据
        DataStream<UserAction> mapped = stream.map(json -> JSON.parseObject(json, UserAction.class));

        // 按用户分区
        KeyedStream<UserAction, Long> keyedStream = mapped.keyBy(UserAction::getUser_id);


        // 核心逻辑
        keyedStream.process(new KeyedProcessFunction<Long, UserAction, String>() {

            HashMap<String, RuleCalculator> calculatorPool = new HashMap<>();
            @Override
            public void open(Configuration parameters) throws Exception {

                RuleCalculator_01 ruleCalculator01 = new RuleCalculator_01();
                ruleCalculator01.init(getRuntimeContext());
                calculatorPool.put("rule-001",ruleCalculator01);


                RuleCalculator_02 ruleCalculator02 = new RuleCalculator_02();
                ruleCalculator02.init(getRuntimeContext());
                calculatorPool.put("rule-002",ruleCalculator02);

            }

            @Override
            public void processElement(UserAction userAction, KeyedProcessFunction<Long, UserAction, String>.Context ctx, Collector<String> out) throws Exception {


                for (Map.Entry<String, RuleCalculator> entry : calculatorPool.entrySet()) {

                    RuleCalculator ruleCalculator = entry.getValue();
                    ruleCalculator.calc(userAction,out);
                }

            }
        });


    }

}
