package com.atguigu.bigdata.chapter06;

import com.atguigu.bigdata.bean.OrderEvent;
import com.atguigu.bigdata.bean.TxEvent;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.co.CoProcessFunction;
import org.apache.flink.util.Collector;

import java.util.HashMap;

/**
 * @Author lzc
 * @Date 2022/9/3 13:59
 */
public class Flink04_Project_Order {
    public static void main(String[] args) {
        Configuration conf = new Configuration();
        conf.setInteger("rest.port", 2000);
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(conf);
        env.setParallelism(1);
        
        SingleOutputStreamOperator<OrderEvent> orderEventStream = env
            .readTextFile("input/OrderLog.csv")
            .map(new MapFunction<String, OrderEvent>() {
                @Override
                public OrderEvent map(String value) throws Exception {
                    String[] data = value.split(",");
                    return new OrderEvent(
                        Long.valueOf(data[0]),
                        data[1],
                        data[2],
                        Long.valueOf(data[3])
                    );
                }
            })
            .filter(e -> "pay".equals(e.getEventType()));
        
        SingleOutputStreamOperator<TxEvent> txEventStream = env
            .readTextFile("input/ReceiptLog.csv")
            .map(new MapFunction<String, TxEvent>() {
                @Override
                public TxEvent map(String value) throws Exception {
                    String[] data = value.split(",");
                    return new TxEvent(
                        data[0],
                        data[1],
                        Long.valueOf(data[2])
                    );
                }
            });
        
        orderEventStream
            .connect(txEventStream)
            //            .keyBy("txId", "txId")
            .keyBy(OrderEvent::getTxId, TxEvent::getTxId)
            .process(new CoProcessFunction<OrderEvent, TxEvent, String>() {
                
                HashMap<String, OrderEvent> orderEventMap = new HashMap<>();
                HashMap<String, TxEvent> txEventMap = new HashMap<>();
                // 用map集合存储先来的数据
                
                @Override
                public void processElement1(OrderEvent orderEvent,
                                            Context ctx,
                                            Collector<String> out) throws Exception {
                    // 来了支付数据, 去交易对应的map集合中,查找交易数据是否存在
    
                    TxEvent txEvent = txEventMap.get(orderEvent.getTxId());
    
                    if (txEvent != null) {
                        // 交易数据已经存在
                        out.collect("订单: " + orderEvent.getOrderId() + " 对账成功... ");
                    }else{
                        // 把支付数据存入到oderEventMap中
                        orderEventMap.put(orderEvent.getTxId(), orderEvent);
                    }
    
                }
                
                
                @Override
                public void processElement2(TxEvent value,
                                            Context ctx,
                                            Collector<String> out) throws Exception {
    
                    OrderEvent orderEvent = orderEventMap.get(value.getTxId());
    
                    if (orderEvent != null) {
                        out.collect("订单: " + orderEvent.getOrderId() + " 对账成功... ");
                    }else{
                        txEventMap.put(value.getTxId(), value);
                    }
    
                }
            })
            .print();
        
        
        try {
            env.execute();
        } catch (Exception e) {
            e.printStackTrace();
        }
    }
    
    
}
