package com.kqkj.dyrule.marketing.main;



import com.alibaba.fastjson.JSON;
import com.kqkj.dyrule.marketing.beans.DynamicKeyedBean;
import com.kqkj.dyrule.marketing.beans.EventBean;
import com.kqkj.dyrule.marketing.beans.MaxwellLogBean;
import com.kqkj.dyrule.marketing.beans.RuleMatchResult;
import com.kqkj.dyrule.marketing.functions.*;
import com.kqkj.dyrule.marketing.utils.StateDescContainer;
import org.apache.flink.api.common.eventtime.SerializableTimestampAssigner;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.streaming.api.datastream.*;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;

import java.time.Duration;

/**
 * 需求：
 *  获得用户事件，计算如下规则，输出结果
 *  规则：
 *      触发事件： K事件，事件属性（P2=V1）
 *      画像属性条件：tag87=v2 tag26=v1   hbase
 *      行为次数条件：2022-04-17-当前  事件C[p6=v8,p12=v12] 做过>=2次 clickhouse
 */
public class Main {
    public static void main(String[] args) throws Exception {
        StreamExecutionEnvironment env = StreamExecutionEnvironment.createLocalEnvironmentWithWebUI(new Configuration());
        env.setParallelism(2);
        //读取kafka中的用户行为日志
        KafkaSourceBuilder kafkaSourceBuilder = new KafkaSourceBuilder();
        DataStreamSource<String> dss = env.addSource(kafkaSourceBuilder.build("zenniu_applog"));

        //json解析
        SingleOutputStreamOperator<EventBean> dsBean = dss.map(new JsonEventBeanMapFunction()).filter(e -> e != null);
        //添加事件时间分配
        WatermarkStrategy<EventBean> watermarkStrategy = WatermarkStrategy.<EventBean>forBoundedOutOfOrderness(Duration.ofSeconds(0))
                .withTimestampAssigner(new SerializableTimestampAssigner<EventBean>() {
                    @Override
                    public long extractTimestamp(EventBean eventBean, long l) {
                        return eventBean.getTimeStamp();
                    }
                });
        SingleOutputStreamOperator<EventBean> withWatermarkAndTimestamp = dsBean.assignTimestampsAndWatermarks(watermarkStrategy);



        //读取kafka中的规则操作数据流maxwell binlog
        DataStreamSource<String> ruleBinlogs = env.addSource(kafkaSourceBuilder.build("maxwell"));
        //解析binlog
        DataStream<MaxwellLogBean> maxwellLongBeanDs = ruleBinlogs.map(s -> JSON.parseObject(s, MaxwellLogBean.class)).returns(MaxwellLogBean.class);
        //将binlog数据流广播出去
        BroadcastStream<MaxwellLogBean> ruleBroadcast = maxwellLongBeanDs.broadcast(StateDescContainer.ruleStateDesc);


        //将数据流规则 connect 广播流
        BroadcastConnectedStream<EventBean, MaxwellLogBean> connect = withWatermarkAndTimestamp.connect(ruleBroadcast);

        //数据复制（因为有多种keyby需求）
        SingleOutputStreamOperator<DynamicKeyedBean> withDynamicKey = connect.process(new DynamicKeyByReplicateFunctionV2());

        //keyby 按设备id
        KeyedStream<DynamicKeyedBean, String> keyedStream = withDynamicKey.keyBy(bean -> bean.getKeyValue());

        BroadcastConnectedStream<DynamicKeyedBean, MaxwellLogBean> connect2 = keyedStream.connect(ruleBroadcast);
        //规则计算
        SingleOutputStreamOperator<RuleMatchResult> matchResultDs = connect2.process(new RuleMatchKeyedProcessFunctionV2());

        matchResultDs.print("min");

        env.execute();
    }
}
