package com.zlx.rulemk.marketing.main;


import com.alibaba.fastjson.JSON;
import com.typesafe.config.Config;
import com.typesafe.config.ConfigFactory;
import com.zlx.rulemk.marketing.beans.CDCLogBean;
import com.zlx.rulemk.marketing.beans.DynamicKeyedBean;
import com.zlx.rulemk.marketing.beans.EventBean;
import com.zlx.rulemk.marketing.beans.RuleMatchResult;
import com.zlx.rulemk.marketing.constant.ConfigNames;
import com.zlx.rulemk.marketing.functions.DynamicKeyByReplicationFunctionV2;
import com.zlx.rulemk.marketing.functions.Json2EventBeanMapFunction;
import com.zlx.rulemk.marketing.functions.RuleMatchKeyedProcessFunctionV2;
import com.zlx.rulemk.marketing.utils.FlinkStateDescUtils;
import com.zlx.rulemk.marketing.utils.KafkaSourceBuilderUtils;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.streaming.api.datastream.*;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;

import java.util.Objects;


/**
 * @Author: zhanglingxing
 * @CreateTime: 2022-07-08
 * @Description: 规则引擎的主类
 * @Version: 1.0
 */
public class Main {

    // 获取 resource/appliction.properties配置文件
    static Config config = ConfigFactory.load();

    public static void main(String[] args) throws Exception {

        // TODO 用户传入的
        int flink_parallelism = 2;

        // 构建env
        StreamExecutionEnvironment env = StreamExecutionEnvironment.createLocalEnvironmentWithWebUI(new Configuration());
        env.setParallelism(flink_parallelism);

        // 读取kafka中的用户行为日志
        DataStream<String> dss = env.addSource(KafkaSourceBuilderUtils.build(config,ConfigFactory.load().getString(ConfigNames.KAFKA_ACTION_DETAIL_TOPIC),"e1"));
        // json解析
        DataStream<EventBean> dsBean = dss.map(new Json2EventBeanMapFunction()).filter(Objects::nonNull);

        // 读取kafka中的规则操作数据流canal binlog
        DataStream<String> ruleBinlogDs = env.addSource(KafkaSourceBuilderUtils.build(config,ConfigFactory.load().getString(ConfigNames.KAFKA_ACTION_RULE_TOPIC),"r1"));
        // 解析 binlog
        DataStream<CDCLogBean> canalLogBeanDs = ruleBinlogDs.map(s -> JSON.parseObject(s, CDCLogBean.class)).returns(CDCLogBean.class);

        // 将binlog数据流广播出去  <keybyFields,List<规则名称>>
        BroadcastStream<CDCLogBean> ruleBroadcast1 = canalLogBeanDs.broadcast(FlinkStateDescUtils.ruleStateDesc1);

        // 将 数据流 connect  规则广播流
        BroadcastConnectedStream<EventBean, CDCLogBean> connect1 = dsBean.connect(ruleBroadcast1);

        // 数据复制（因为有多种keyby需求）
        SingleOutputStreamOperator<DynamicKeyedBean> withDynamicKey = connect1.process(new DynamicKeyByReplicationFunctionV2(flink_parallelism));

        // 增加并行度 按照规则对应的key进行分组
        KeyedStream<DynamicKeyedBean, String> keyedStream = withDynamicKey.keyBy(DynamicKeyedBean::getKey);

        SingleOutputStreamOperator<RuleMatchResult> result = keyedStream.process(new RuleMatchKeyedProcessFunctionV2());

        result.print("规则结果触发");

        env.execute("Flink_engine");

    }
}
