package cn.doitedu.rule.engine.entry;

import cn.doitedu.rule.engine.beans.EventBean;
import cn.doitedu.rule.engine.beans.RuleMatchResult;
import cn.doitedu.rule.engine.functions.Json2EventBeanMapFunction;
import cn.doitedu.rule.engine.functions.KafkaSourceBuilder;
import cn.doitedu.rule.engine.functions.RuleMatchKeyedProcessFunction;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.KeyedStream;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;

/**
 * 规则系统第一版
 * 需求：
 *    获得用户事件，计算如下规则，输出结果
 *    规则：
 *         触发事件：  K事件，事件属性（ p2=v1 ）
 *         画像属性条件: tag87=v2, tag26=v1
 *         行为次数条件： 2021-06-18 ~ 当前 , 事件 C [p6=v8,p12=v5] 做过 >= 2次
 */
public class Main {

    public static void main(String[] args) throws Exception {

        // 构建env
        StreamExecutionEnvironment env = StreamExecutionEnvironment.createLocalEnvironmentWithWebUI(new Configuration());

        // 读取kafka中的用户行为日志
        KafkaSourceBuilder kafkaSourceBuilder = new KafkaSourceBuilder();
        DataStream<String> dss = env.addSource(kafkaSourceBuilder.build("zenniu_applog"));

        // json解析
        DataStream<EventBean> dsBean = dss.map(new Json2EventBeanMapFunction()).filter(e -> e != null);

        // keyby: 按deviceId
        KeyedStream<EventBean, String> keyed = dsBean.keyBy(bean -> bean.getDeviceId());

        // 规则计算
        DataStream<RuleMatchResult> matchResultDs = keyed.process(new RuleMatchKeyedProcessFunction());

        matchResultDs.print();

        env.execute();

    }
}
