package cn.doitedu.demo2;

import cn.doitedu.beans.EventBean;
import com.alibaba.fastjson.JSON;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.api.common.serialization.SimpleStringSchema;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.connector.kafka.source.KafkaSource;
import org.apache.flink.connector.kafka.source.enumerator.initializer.OffsetsInitializer;
import org.apache.flink.streaming.api.CheckpointingMode;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.KeyedProcessFunction;
import org.apache.flink.util.Collector;

import java.util.HashMap;
import java.util.Map;

/**
 * @Author: 深似海
 * @Site: <a href="www.51doit.com">多易教育</a>
 * @QQ: 657270652
 * @Date: 2024/5/14
 * @Desc: 学大数据，上多易教育
 *
 **/

public class Entrypoint {

    public static void main(String[] args) {

        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.enableCheckpointing(5000, CheckpointingMode.EXACTLY_ONCE);
        env.getCheckpointConfig().setCheckpointStorage("file:///d:/ckpt");
        env.setParallelism(1);

        // 构造kafkaSource
        KafkaSource<String> source = KafkaSource.<String>builder()
                .setBootstrapServers("doitedu:9092")
                .setTopics("ods-user-action-log")
                .setGroupId("ggg")
                .setClientIdPrefix("client-001")
                .setValueOnlyDeserializer(new SimpleStringSchema())
                .setStartingOffsets(OffsetsInitializer.latest())
                .build();

        DataStreamSource<String> stream = env.fromSource(source, WatermarkStrategy.noWatermarks(), "kafka-source");

        SingleOutputStreamOperator<EventBean> beanStream = stream.map(json -> JSON.parseObject(json, EventBean.class));

        SingleOutputStreamOperator<String> resultStream = beanStream.keyBy(EventBean::getUser_id)
                .process(new KeyedProcessFunction<Long, EventBean, String>() {

                    HashMap<String, RuleCalculator> calculatorPool = new HashMap<>();

                    @Override
                    public void open(Configuration parameters) throws Exception {


                        // 构造规则1运算机对象
                        RuleCalculator_01 ruleCalculator01 = new RuleCalculator_01();
                        // 初始化规则1运算机
                        ruleCalculator01.init(getRuntimeContext());
                        // 将初始化好的运算机，放入运算机池
                        calculatorPool.put(ruleCalculator01.getRuleId(),ruleCalculator01);


                        // 构造规则2运算机对象
                        RuleCalculator_02 ruleCalculator02 = new RuleCalculator_02();
                        // 初始化规则2运算机
                        ruleCalculator02.init(getRuntimeContext());
                        // 将初始化好的运算机，放入运算机池
                        calculatorPool.put(ruleCalculator02.getRuleId(),ruleCalculator02);


                        // 构造规则2运算机对象
                        RuleCalculator_03 ruleCalculator03 = new RuleCalculator_03();
                        // 初始化规则2运算机
                        ruleCalculator03.init(getRuntimeContext());
                        // 将初始化好的运算机，放入运算机池
                        calculatorPool.put(ruleCalculator03.getRuleId(),ruleCalculator03);


                    }

                    @Override
                    public void processElement(EventBean eventBean, KeyedProcessFunction<Long, EventBean, String>.Context ctx, Collector<String> out) throws Exception {

                        for (Map.Entry<String, RuleCalculator> entry : calculatorPool.entrySet()) {
                            RuleCalculator ruleCalculator = entry.getValue();
                            ruleCalculator.calc(eventBean,out);
                        }


                    }

                });



    }



}
