package yuekao6.tm2;

import com.alibaba.fastjson.JSON;
import org.apache.flink.api.common.state.StateTtlConfig;
import static org.apache.flink.api.common.state.StateTtlConfig.UpdateType.OnCreateAndWrite;
import org.apache.flink.streaming.api.functions.KeyedProcessFunction;
import org.apache.flink.util.Collector;
import com.alibaba.fastjson.JSONObject;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.cep.CEP;
import org.apache.flink.cep.PatternSelectFunction;
import org.apache.flink.cep.PatternStream;
import org.apache.flink.cep.PatternTimeoutFunction;
import org.apache.flink.cep.pattern.Pattern;
import org.apache.flink.cep.pattern.conditions.SimpleCondition;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.JoinedStreams;
import org.apache.flink.streaming.api.datastream.KeyedStream;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.co.ProcessJoinFunction;
import org.apache.flink.streaming.api.functions.windowing.ProcessAllWindowFunction;
import org.apache.flink.streaming.api.windowing.assigners.TumblingEventTimeWindows;
import org.apache.flink.streaming.api.windowing.time.Time;
import org.apache.flink.streaming.api.windowing.windows.TimeWindow;
import org.apache.flink.util.Collector;
import org.apache.flink.util.OutputTag;
import redis.clients.jedis.Jedis;
import yuekao6.entity.OrderCargo;
import yuekao6.entity.OrderInfo;
import yuekao6.util.KafkaUtil;

import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.time.Duration;
import java.util.HashSet;
import java.util.List;
import java.util.Map;

public class JoinRelevancy {
    public static void main(String[] args) throws Exception {
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);
        //1.将order_cargo的数据与order_info数据流通过订单id进行关联。（由于存在数据乱序或者延时的情况，导致两个流的数据进度不一致
        // ，请酌情使用合适的join方式，二者数据前后相差最大5秒，同时order_info状态的ttl设置为5s）（10分）
//        Jedis jedis = new Jedis("hadoop-single");
//        jedis.set("order_info", "yourValue");
//        jedis.expire("order_info", 5); // 设置TTL为5秒
        DataStreamSource<String> streamSource = env.addSource(KafkaUtil.kafkaSource("tms_ods"));

        SingleOutputStreamOperator<OrderCargo> ordercargo = streamSource.filter(x -> JSON.parseObject(x).getString("table").equals("order_cargo")).map(new MapFunction<String, OrderCargo>() {
            @Override
            public OrderCargo map(String s) throws Exception {
                JSONObject data = JSON.parseObject(s).getJSONObject("data");
                return JSON.parseObject(data.toJSONString(), OrderCargo.class);
            }
        }).assignTimestampsAndWatermarks(WatermarkStrategy
                .<OrderCargo>forBoundedOutOfOrderness(Duration.ofSeconds(20))
                .withTimestampAssigner((event, timestamp) -> {
                    try {
                        return new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss'Z'").parse(event.getCreate_time()).getTime();
                    } catch (ParseException e) {
                        throw new RuntimeException(e);
                    }
                }));
//        ordercargo.print();


        SingleOutputStreamOperator<OrderInfo> orderinfo = streamSource.filter(x -> JSON.parseObject(x).getString("table").equals("order_info")).map(new MapFunction<String, OrderInfo>() {
            @Override
            public OrderInfo map(String s) throws Exception {
                JSONObject data = JSON.parseObject(s).getJSONObject("data");
                return JSON.parseObject(data.toJSONString(), OrderInfo.class);
            }
        }).assignTimestampsAndWatermarks(WatermarkStrategy
                .<OrderInfo>forBoundedOutOfOrderness(Duration.ofSeconds(20))
                .withTimestampAssigner((event, timestamp) -> {
                    try {
                        return new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss'Z'").parse(event.getCreate_time()).getTime();
                    } catch (ParseException e) {
                        throw new RuntimeException(e);
                    }
                }));
//        orderinfo.print();

        SingleOutputStreamOperator<Tuple2<OrderInfo, OrderCargo>> joindara = orderinfo
                .keyBy(x -> x.getId())
                .intervalJoin(ordercargo.keyBy(x -> x.getOrder_id()))
                .between(Time.milliseconds(-2), Time.milliseconds(3))
                .process(new ProcessJoinFunction<OrderInfo, OrderCargo, Tuple2<OrderInfo, OrderCargo>>() {
                    @Override
                    public void processElement(OrderInfo orderInfo, OrderCargo orderCargo, ProcessJoinFunction<OrderInfo, OrderCargo, Tuple2<OrderInfo, OrderCargo>>.Context context, Collector<Tuple2<OrderInfo, OrderCargo>> collector) throws Exception {
//                        context.timerService().registerEventTimeTimer(context.timestamp() + 5000);
                        collector.collect(new Tuple2<>(orderInfo, orderCargo));
                    }
                });
//        joindara.print("关联表:");

        //2.采用flink CEP,监控订单表支付超时事件。订单创建后，必须在20分钟内支付完成，超过20分钟支付的则订单无效，将失效的订单结果通过侧输出流给出报警，打印到控制台。（5分）
        KeyedStream<OrderInfo, String> keybay = orderinfo.keyBy(x -> x.getOrder_no());
        Pattern<OrderInfo, OrderInfo> within = Pattern.<OrderInfo>begin("one").where(
                new SimpleCondition<OrderInfo>() {
                    @Override
                    public boolean filter(OrderInfo event) {
                        return event.getStatus().equals("60010");
                    }
                }
        ).next("two").where(
                new SimpleCondition<OrderInfo>() {
                    @Override
                    public boolean filter(OrderInfo subEvent) {
                        return subEvent.getStatus().equals("60020");
                    }
                }
        ).within(Time.minutes(20));

        PatternStream<OrderInfo> pattern = CEP.pattern(keybay, within);

        OutputTag<OrderInfo> outputTag = new OutputTag<OrderInfo>("outpag") {
        };

        SingleOutputStreamOperator<OrderInfo> select = pattern.select(outputTag, new PatternTimeoutFunction<OrderInfo, OrderInfo>() {
            @Override
            public OrderInfo timeout(Map<String, List<OrderInfo>> map, long l) throws Exception {
                return map.get("one").get(0);
            }
        }, new PatternSelectFunction<OrderInfo, OrderInfo>() {
            @Override
            public OrderInfo select(Map<String, List<OrderInfo>> map) throws Exception {
                return map.get("one").get(0);
            }
        });

        select.print();
        select.getSideOutput(outputTag).print("超时未支付数据:");

        // 3.统计5分钟之内所有订单的平均消费金额，打印控制台(5分)
        SingleOutputStreamOperator<String> process = orderinfo.windowAll(TumblingEventTimeWindows.of(Time.seconds(5)))
                .process(new ProcessAllWindowFunction<OrderInfo, String, TimeWindow>() {
                    @Override
                    public void process(ProcessAllWindowFunction<OrderInfo, String, TimeWindow>.Context context,
                                        Iterable<OrderInfo> iterable,
                                        Collector<String> out) throws Exception {
                        HashSet<String> set = new HashSet<>();
                        Double sumprice = 0.0;
                        for (OrderInfo one : iterable) {
                            set.add(one.getId());
                            sumprice += one.getAmount();
                        }
                        out.collect("所有订单的平均消费金额:" + (sumprice / set.size()));
                    }
                });
//        process.print();

        //        jedis.close();
        env.execute();
    }
}
