package com.galeno.jobs;

import com.galeno.pojo.LogBean;
import com.galeno.pojo.MatchResult;
import com.galeno.udfs.JSON2LogBean;
import com.galeno.udfs.RuleMatchFunction;
import com.galeno.utils.FlinkUtils;
import org.apache.flink.api.common.serialization.SimpleStringSchema;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.datastream.KeyedStream;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;

/**
 * @author galeno
 * @Title:
 * @Description:
 * @date 2021/11/119:17
 */
public class DemoOne {
    public static void main(String[] args) throws Exception {
        DataStream<String> kafkaStream = FlinkUtils.createKafkaStream(args[0], SimpleStringSchema.class);
        //数据清洗转换

        SingleOutputStreamOperator<LogBean> beanStream = kafkaStream.process(new JSON2LogBean());
        //匹配规则

        KeyedStream<LogBean, String> keyedStream = beanStream.keyBy(LogBean::getDeviceId);





        RuleMatchFunction RuleMatchFunction = new RuleMatchFunction();


        SingleOutputStreamOperator<MatchResult> res = keyedStream.process(RuleMatchFunction);
        res.print();
        FlinkUtils.env.execute();



    }
}
