package com.poetic.order;

import com.poetic.order.domain.OrderEvent;
import com.poetic.order.domain.ReceiptEvent;
import com.poetic.order.function.TxPayMatch;
import org.apache.commons.lang3.StringUtils;
import org.apache.flink.api.common.eventtime.SerializableTimestampAssigner;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.api.common.functions.FilterFunction;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.java.functions.KeySelector;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.streaming.api.TimeCharacteristic;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.KeyedStream;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.util.OutputTag;

import java.time.Duration;

/**
 * <pre>
 *  对于订单支付事件，用户支付完成其实并不算完，我们还得确认平台账户上是
 * 否到账了。而往往这会来自不同的日志信息，所以我们要同时读入两条流的数据来
 * 做 合 并 处 理 。 这 里 我 们 利 用 connect 将 两 条 流 进 行 连 接 ， 然 后 用 自 定 义 的
 * CoProcessFunction 进行处理
 * Created by lianghuikun on 2020-09-16.
 * </pre>
 *
 * @author lianghuikun
 */
public class TxMacthDetect {

    public static void main(String[] args) throws Exception {
        String path = "/Users/lianghuikun/indigo/poetic/flink/user-behavior-anylysis/data/OrderLog.csv";

        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setStreamTimeCharacteristic(TimeCharacteristic.EventTime);
        env.setParallelism(1);
        // 读取订单事件流
        KeyedStream<OrderEvent, String> orderEventStream = env.readTextFile(path)
                .map(new MapFunction<String, OrderEvent>() {
                    @Override
                    public OrderEvent map(String data) throws Exception {
                        String[] line = data.split("\\,");

                        return new OrderEvent(Long.valueOf(line[0]),
                                line[1],
                                line[2],
                                Long.valueOf(line[3]));
                    }
                })
                .filter(new FilterFunction<OrderEvent>() {
                    @Override
                    public boolean filter(OrderEvent value) throws Exception {
                        return StringUtils.isNotBlank(value.getTxId());
                    }
                })
                .assignTimestampsAndWatermarks(WatermarkStrategy.<OrderEvent>forBoundedOutOfOrderness(Duration.ofSeconds(1))
                        .withTimestampAssigner(new SerializableTimestampAssigner<OrderEvent>() {
                            @Override
                            public long extractTimestamp(OrderEvent element, long recordTimestamp) {
                                return element.getEventTime() * 1000;
                            }
                        })).keyBy(new KeySelector<OrderEvent, String>() {
                    @Override
                    public String getKey(OrderEvent value) throws Exception {
                        return value.getTxId();
                    }
                });

        String receiptPath = "/Users/lianghuikun/indigo/poetic/flink/user-behavior-anylysis/data/ReceiptLog.csv";
        DataStreamSource<String> receiptEventStream = env.readTextFile(receiptPath);
//        DataStreamSource<String> receiptEventStream = env.socketTextStream("localhost", 9999);

        KeyedStream<ReceiptEvent, String> receiptEventDataStream = receiptEventStream.map(new MapFunction<String, ReceiptEvent>() {
            @Override
            public ReceiptEvent map(String data) throws Exception {

                String[] dataArr = data.split("\\,");
                return new ReceiptEvent(dataArr[0].trim(), dataArr[1].trim(), Long.valueOf(dataArr[2].trim()));
            }
        }).assignTimestampsAndWatermarks(WatermarkStrategy.<ReceiptEvent>forBoundedOutOfOrderness(Duration.ofSeconds(1))
                .withTimestampAssigner(new SerializableTimestampAssigner<ReceiptEvent>() {
                    @Override
                    public long extractTimestamp(ReceiptEvent element, long recordTimestamp) {
                        return element.getEventTime() * 1000;
                    }
                })).keyBy(new KeySelector<ReceiptEvent, String>() {
            @Override
            public String getKey(ReceiptEvent value) throws Exception {
                return value.getTxId();
            }
        });

        OutputTag<OrderEvent> unmatchedPays = new OutputTag<OrderEvent>("unmatchedPays") {
        };
        OutputTag<ReceiptEvent> unmatchedReceipts = new OutputTag<ReceiptEvent>("unmatchReceipts") {
        };
        // 将两条流连接起来，共同处理
        SingleOutputStreamOperator<Tuple2<OrderEvent, ReceiptEvent>> processedStream = orderEventStream.connect(receiptEventDataStream)
                .process(new TxPayMatch(unmatchedPays, unmatchedReceipts));

        processedStream.print("matched");
        processedStream.getSideOutput(unmatchedPays).print("unmatchedPays");
        processedStream.getSideOutput(unmatchedReceipts).print("unmatchReceipts");
        env.execute("tx match job");
    }
}
