package com.kqkj.dyrule.engine.entry;

import com.kqkj.dyrule.engine.beans.EventBean;
import com.kqkj.dyrule.engine.beans.RuleMatchResult;
import com.kqkj.dyrule.engine.functions.JsonEventBeanMapFunction;
import com.kqkj.dyrule.engine.functions.KafkaSourceBuilder;
import com.kqkj.dyrule.engine.functions.RuleMatchKeyedProcessFunction;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.KeyedStream;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;

/**
 * 需求：
 *  获得用户事件，计算如下规则，输出结果
 *  规则：
 *      触发事件： K事件，事件属性（P2=V1）
 *      画像属性条件：tag87=v2 tag26=v1   hbase
 *      行为次数条件：2022-04-17-当前  事件C[p6=v8,p12=v12] 做过>=2次 clickhouse
 */
public class Main {
    public static void main(String[] args) throws Exception {
        StreamExecutionEnvironment env = StreamExecutionEnvironment.createLocalEnvironmentWithWebUI(new Configuration());
        env.setParallelism(2);
        //读取kafka中的用户行为日志
        KafkaSourceBuilder kafkaSourceBuilder = new KafkaSourceBuilder();
        DataStreamSource<String> dss = env.addSource(kafkaSourceBuilder.build("zenniu_applog"));
        //json解析
        SingleOutputStreamOperator<EventBean> dsBean = dss.map(new JsonEventBeanMapFunction()).filter(e -> e != null);
        //keyby 按设备id
        KeyedStream<EventBean, String> keyedStream = dsBean.keyBy(bean -> bean.getDeviceId());

        //规则计算
        SingleOutputStreamOperator<RuleMatchResult> matchResultDs = keyedStream.process(new RuleMatchKeyedProcessFunction());

        matchResultDs.print("min");

        env.execute();
    }
}
