package com.sunzm.dynamic_rule.engine;

import com.sunzm.dynamic_rule.functions.Json2LogBeanFlatMapFunction;
import com.sunzm.dynamic_rule.functions.RuleProcessFunction;
import com.sunzm.dynamic_rule.functions.SourceFunctions;
import com.sunzm.dynamic_rule.pojo.LogBean;
import com.sunzm.dynamic_rule.pojo.ResultBean;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.KeyedStream;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;

/**
 * 静态规则引擎版本1主程序
 *
 * @author Administrator
 * @version 1.0
 * @date 2021-07-07 0:57
 */
public class RuleEngineV1 {
    private static boolean isLocal = true;

    public static void main(String[] args) throws Exception {

        //获取执行环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();

        if (isLocal) {
            env = StreamExecutionEnvironment.createLocalEnvironmentWithWebUI(new Configuration());
        }

        // 添加一个消费kafka中用户实时行为事件数据的source
        DataStreamSource<String> logStream = env.addSource(SourceFunctions.getKafkaEventSource());

        // 将json格式的数据，转成 logbean格式的数据
        DataStream<LogBean> beanStream = logStream.flatMap(new Json2LogBeanFlatMapFunction());

        // 对数据按用户deviceid分key
        KeyedStream<LogBean, String> keyed = beanStream.keyBy(logBean -> logBean.getDeviceId());

        // 开始核心计算处理
        SingleOutputStreamOperator<ResultBean> resultStream = keyed.process(new RuleProcessFunction());

        // 打印
        resultStream.print();

        env.execute();
    }
}
