package cn.doitedu.jobs;

import cn.doitedu.pojo.LogBean;
import cn.doitedu.pojo.MatchResult;
import cn.doitedu.pojo.RulesBean;
import cn.doitedu.udfs.*;
import cn.doitedu.utils.FlinkUtils;
import cn.doitedu.utils.StateDescriptorUtils;
import com.ververica.cdc.connectors.mysql.source.MySqlSource;
import com.ververica.cdc.debezium.JsonDebeziumDeserializationSchema;
import org.apache.flink.api.common.eventtime.SerializableTimestampAssigner;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.api.java.utils.ParameterTool;
import org.apache.flink.streaming.api.datastream.*;

import java.time.Duration;


public class RulesMatcher {

    public static void main(String[] args) throws Exception {

        //ParameterTool parameterTool = ParameterTool.fromArgs(args);
        ParameterTool parameterTool = ParameterTool.fromPropertiesFile(args[0]);

        //写FlinkCDC的代码，实时读取MySQL中变化的规则数据
        //配置cdc读取mysql的相关参数
        String rulesDbHost = parameterTool.getRequired("rules.db.host");
        int rulesDbPort = parameterTool.getInt("rules.db.port", 3306);
        String[] rulesDbList = parameterTool.getRequired("rules.db.db-list").split(",");
        String[] rulesTableList = parameterTool.getRequired("rules.db.talbe-list").split(",");
        String username = parameterTool.getRequired("rules.db.username");
        String password = parameterTool.getRequired("rules.db.password");
        //使用Flikc的CDC读取业务库中的变化规则数据
        MySqlSource<String> mySqlSource = MySqlSource.<String>builder()
                .hostname(rulesDbHost)
                .port(rulesDbPort)
                .databaseList(rulesDbList) // set captured database
                .tableList(rulesTableList) // set captured table
                .username(username)
                .password(password)
                .deserializer(new JsonDebeziumDeserializationSchema()) // converts SourceRecord to JSON String
                .build();

        //添加将MySQLSource，返回DataStream
        DataStreamSource<String> rulesLineStream = FlinkUtils.env.fromSource(mySqlSource, WatermarkStrategy.noWatermarks(), "MySQL Source");

        //将数据库中rule_condition_json对应的json字符串转成JSON对象
        SingleOutputStreamOperator<RulesBean> rulesBeanStream = rulesLineStream.process(new RulesJsonToBeanFunction());

        //将规则数据广播（定义状态描述器，以后广播出去的数据，在下游以何种方式存在）
        BroadcastStream<RulesBean> broadcastStream = rulesBeanStream.broadcast(StateDescriptorUtils.rulesStateDescriptor);

        //从Kafka中读取数据
        DataStream<String> lines = FlinkUtils.createKafkaStream(parameterTool);

        //1.解析json字符串，将json字符串转成LogBean，并且过滤掉脏数据（json格式有误，没有deviceId的数据）
        SingleOutputStreamOperator<LogBean> beanStream = lines.process(new JsonToBeanFunction());

        SingleOutputStreamOperator<LogBean> beanStreamWithWaterMark = beanStream.assignTimestampsAndWatermarks(WatermarkStrategy.<LogBean>forBoundedOutOfOrderness(Duration.ofSeconds(0)).withTimestampAssigner(new SerializableTimestampAssigner<LogBean>() {
            @Override
            public long extractTimestamp(LogBean logBean, long recordTimestamp) {
                return logBean.getTimeStamp();
            }
        }));

        //按照设备ID进行KeyBy
        KeyedStream<LogBean, String> keyedStream = beanStreamWithWaterMark.keyBy(LogBean::getDeviceId);

        //将广播的规则进行关联，动态应用广告的规则

        //SingleOutputStreamOperator<MatchResult> res = keyedStream.process(new MatchFunctionV4());
        SingleOutputStreamOperator<MatchResult> res = keyedStream.connect(broadcastStream).process(new MatchFunctionV5());

        //6.输出匹配结果（sink到Kafka或打印）
        res.print();

        FlinkUtils.env.execute();
    }

}
