package com.xiao.dynamic_rule.engine;

import com.xiao.dynamic_rule.functions.DeviceKeySelector;
import com.xiao.dynamic_rule.functions.Json2BeanMapFunction;
import com.xiao.dynamic_rule.functions.RuleProcessFunction;
import com.xiao.dynamic_rule.functions.RuleProcessFunctionV2;
import com.xiao.dynamic_rule.pojo.LogBean;
import com.xiao.dynamic_rule.pojo.ResultBean;
import com.xiao.dynamic_rule.utils.KafkaUtils;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.KeyedStream;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer;

/**
 * 静态规则引擎版本 2.0
 */
public class RuleEngineV2 {
    public static void main(String[] args) {
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();

        //消费kafka消息 source
        FlinkKafkaConsumer<String> kafkaCounsumer = KafkaUtils.getFlinkKafkaCounsumer(
                "hadoop101:9092,hadoop102:9092,hadoop103:9092",
                "latest",
                "applog");

        DataStreamSource<String> kafkaSource = (DataStreamSource<String>) env.addSource(kafkaCounsumer);

        // 将json数据转为logBean
        SingleOutputStreamOperator<LogBean> beanStream = kafkaSource.map(new Json2BeanMapFunction());

        // 根据用户id分组
        KeyedStream<LogBean, String> keyedStream = beanStream.keyBy(new DeviceKeySelector());

        // 进行逻辑处理
        SingleOutputStreamOperator<ResultBean> resultStream = keyedStream.process(new RuleProcessFunctionV2());

        //打印
        resultStream.print();

        //执行
        try {
            env.execute();
        } catch (Exception e) {
            e.printStackTrace();
        }

    }
}
