package com.atguigu.cep;

import com.atguigu.bean.OrderEvent;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.api.common.functions.FilterFunction;
import org.apache.flink.cep.CEP;
import org.apache.flink.cep.PatternSelectFunction;
import org.apache.flink.cep.PatternStream;
import org.apache.flink.cep.PatternTimeoutFunction;
import org.apache.flink.cep.nfa.aftermatch.AfterMatchSkipStrategy;
import org.apache.flink.cep.pattern.Pattern;
import org.apache.flink.cep.pattern.conditions.SimpleCondition;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.streaming.api.datastream.KeyedStream;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.windowing.time.Time;
import org.apache.flink.util.OutputTag;

import java.time.Duration;
import java.util.List;
import java.util.Map;

public class Flink03_CEP_HAdemo2 {
    public static void main(String[] args) {
        Configuration configuration = new Configuration();
        configuration.setInteger("rest.port",10000);
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(configuration);
        env.setParallelism(2);

        KeyedStream<OrderEvent, Long> stream = env
                .readTextFile("input/OrderLog.csv")
                .map(line -> {
                    String[] data = line.split(",");
                    return new OrderEvent(
                            Long.valueOf(data[0]),
                            data[1],
                            data[2],
                            Long.parseLong(data[3]) * 1000
                    );
                })
                .assignTimestampsAndWatermarks(
                        WatermarkStrategy
                                .<OrderEvent>forBoundedOutOfOrderness(Duration.ofSeconds(3))
                                .withTimestampAssigner((log, ts) -> log.getEventTime())
                )
                .keyBy(OrderEvent::getOrderId);

        //模式制定
        Pattern<OrderEvent, OrderEvent> pattern = Pattern
                .<OrderEvent>begin("create", AfterMatchSkipStrategy.skipPastLastEvent())
                //只要create类型的OrderEvent
                .where(new SimpleCondition<OrderEvent>() {
                    @Override
                    public boolean filter(OrderEvent value) throws Exception {
                        return "create".equals(value.getEventType());
                    }
                })
                //设置该参数之后表示上面的条件可有可无！
                .optional()
                //下一个值只要pay类型的OrderEvent（30分钟以内）
                .next("pay")
                .where(new SimpleCondition<OrderEvent>() {
                    @Override
                    public boolean filter(OrderEvent value) throws Exception {
                        return "pay".equals(value.getEventType());
                    }
                })
                .within(Time.minutes(30));

        //制作模式流
        PatternStream<OrderEvent> ps = CEP.pattern(stream, pattern);

        //获取模式流中的数据
        SingleOutputStreamOperator<String> select = ps
                .select(
                        //定义测输出流，输出的是PatternTimeoutFunction中的内容
                        new OutputTag<String>("singleCreate") {},
                        //定义输出函数
                        new PatternTimeoutFunction<OrderEvent, String>() {
                            @Override
                            public String timeout(Map<String, List<OrderEvent>> pattern, long timeoutTimestamp) throws Exception {
                                //来到这里的数据都是只符合第一个条件的，即只有类型是create类型的数据来这里了
                                OrderEvent create = pattern.get("create").get(0);
                                //输出
                                return create.getOrderId() + "只有create没有pay或者是pay超时了！";
                            }
                        },
                        new PatternSelectFunction<OrderEvent, String>() {
                            @Override
                            public String select(Map<String, List<OrderEvent>> pattern) throws Exception {
                                //这里进来的都是正常的数据
                                //1、pay和creare都有， 2、只有pay没有create
                                //这里对没有create的异常数据进行输出！
                                if (!pattern.containsKey("create")) {
                                    return pattern.get("pay").get(0).getOrderId() + "只有pay没有create，或者是pay进行超时支付了！";
                                } else {
                                    return "";
                                }
                            }
                        });

        //结果输出，将测输出流和正常流联合后输出
        select.filter((FilterFunction<String>) value -> value.length()>0)
                .union(select.getSideOutput(new OutputTag<String>("singleCreate") {}))
                .print();


        try {
            env.execute();
        } catch (Exception e) {
            e.printStackTrace();
        }
    }
}
