package com.sunzm.droolsdemo;

import com.alibaba.fastjson.JSON;
import org.apache.flink.api.common.serialization.SimpleStringSchema;
import org.apache.flink.api.common.state.BroadcastState;
import org.apache.flink.api.common.state.MapStateDescriptor;
import org.apache.flink.api.common.state.ReadOnlyBroadcastState;
import org.apache.flink.api.common.typeinfo.TypeInformation;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.streaming.api.datastream.*;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.co.KeyedBroadcastProcessFunction;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer;
import org.apache.flink.util.Collector;
import org.kie.api.io.ResourceType;
import org.kie.api.runtime.KieSession;
import org.kie.internal.utils.KieHelper;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import java.util.Iterator;
import java.util.Map;
import java.util.Properties;

/**
 * FlinkDroolsCanalDemo
 *
 * @author Administrator
 * @version 1.0
 * @date 2021-07-18 21:21
 */
public class FlinkDroolsCanalDemo {
    private static boolean isLocal = true;
    private static final Logger logger = LoggerFactory.getLogger(FlinkDroolsCanalDemo.class);
    public static void main(String[] args) throws Exception {
        //获取执行环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();

        if (isLocal) {
            env = StreamExecutionEnvironment.createLocalEnvironmentWithWebUI(new Configuration());
        }

        // 读申请信息流
        /*
         * bin/kafka-topics.sh --create --topic applicant --replication-factor 1 --partitions 1 --zookeeper 10.0.8.11:2181
         */
        Properties props = new Properties();
        props.setProperty("bootstrap.servers", "82.156.210.70:9093");
        props.setProperty("auto.offset.reset", "latest");
        //bin/kafka-console-producer.sh --topic applicant  --bootstrap-server 10.0.8.11:9092
        FlinkKafkaConsumer<String> applicantConsumer = new FlinkKafkaConsumer<>("applicant",
                new SimpleStringSchema(), props);

        //数据格式： kangkang,18
        DataStreamSource<String> applicantStrStream = env.addSource(applicantConsumer);

        // 将string流转成Applicant对象流
        SingleOutputStreamOperator<Applicant> applicantStream = applicantStrStream.map(line -> {
            String[] split = line.split(",");
            return new Applicant(split[0],Integer.parseInt(split[1]));
        }).returns(TypeInformation.of(Applicant.class));

        // 按照name来keyby
        KeyedStream<Applicant, String> keyedStream = applicantStream.keyBy(value -> value.getName());

        // 读规则流，并广播
        FlinkKafkaConsumer<String> ruleConsumer = new FlinkKafkaConsumer<>("canal_rule",
                new SimpleStringSchema(), props);
        DataStreamSource<String> ruleStream = env.addSource(ruleConsumer);

        MapStateDescriptor<String, KieSession> stateDescriptor = new MapStateDescriptor<>("ruleState", String.class, KieSession.class);
        BroadcastStream<String> broadcastStream = ruleStream.broadcast(stateDescriptor);

        // connect两个流
        BroadcastConnectedStream<Applicant, String> connectedStream = keyedStream.connect(broadcastStream);


        // 处理
        SingleOutputStreamOperator<String> result = connectedStream.process(new KeyedBroadcastProcessFunction<String, Applicant, String, String>() {

            /**
             * 处理数据流
             * @param applicant
             * @param ctx
             * @param out
             * @throws Exception
             */
            @Override
            public void processElement(Applicant applicant, ReadOnlyContext ctx, Collector<String> out) throws Exception {

                ReadOnlyBroadcastState<String, KieSession> broadcastState = ctx.getBroadcastState(stateDescriptor);

                Iterator<Map.Entry<String, KieSession>> rulesIterator = broadcastState.immutableEntries().iterator();
                while(rulesIterator.hasNext()){
                    Map.Entry<String, KieSession> entry = rulesIterator.next();
                    KieSession kieSession = entry.getValue();

                    applicant.setValid(true);
                    kieSession.insert(applicant);
                    kieSession.fireAllRules();

                    if(applicant.isValid()){
                        out.collect(applicant.getName()+",合法");
                    }else{
                        out.collect(applicant.getName()+",不合法");
                    }

                }
            }

            /**
             * 处理广播流中的数据
             * @param value
             * @param ctx
             * @param out
             * @throws Exception
             */
            @Override
            public void processBroadcastElement(String value, Context ctx, Collector<String> out) throws Exception {
                BroadcastState<String, KieSession> broadcastState = ctx.getBroadcastState(stateDescriptor);

                // 进来的规则信息，是canal从mysql中监听到一个json串
                CanalRecordBean canalRecordBean = JSON.parseObject(value, CanalRecordBean.class);

                // 取出规则表数据
                RuleTableRecord tableRec = canalRecordBean.getData().get(0);

                String ruleName = tableRec.getRuleName();
                String ruleCode = tableRec.getRuleCode();

                KieSession kieSession = new KieHelper().addContent(ruleCode, ResourceType.DRL).build().newKieSession();

                broadcastState.put(ruleName,kieSession);

            }
        });

        // 打印
        result.print();

        //启动程序
        env.execute("FlinkDroolsCanalDemo");
    }
}
