package com.atguigu.flink.chapter03_exec1;

import org.apache.flink.api.common.RuntimeExecutionMode;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.java.functions.KeySelector;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.streaming.api.datastream.ConnectedStreams;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.co.CoProcessFunction;
import org.apache.flink.util.Collector;

import java.util.HashMap;
import java.util.HashSet;
import java.util.Map;
import java.util.Set;

/*
34761
34729
34766
34735
34753
34767
34737
34756
34763
34750
34739
34747
34760
34736
34738
34754
34759
34730
34734
34755
34757
34740
34733
34742
34743
34752
34765
34746
34744
34749
34741
34762
34751
34758
34732
34748
34764
34745

 */
public class Demo6_Account
{
    public static void main(String[] args) {

        CoProcessFunction<Tuple2<String, String>, String, String> MyProcess = new CoProcessFunction<Tuple2<String, String>, String, String>()
        {
            //准备两个缓存
            Map<String, String> orderCache = new HashMap<>();

            Set<String> receiptCache = new HashSet<>();

            //处理orderLogDS
            @Override
            public void processElement1(Tuple2<String, String> order, Context ctx, Collector<String> out) throws Exception {

                //说明当前订单的 receipt 已经来了，在缓存中等着你
                if (receiptCache.contains(order.f0)) {
                    //对账成功的订单id
                    out.collect(order.f1);
                } else {
                    //说明订单来早了，去缓存等着
                    orderCache.put(order.f0, order.f1);
                }

            }

            //处理 ReceiptLogDS
            @Override
            public void processElement2(String receipt, Context ctx, Collector<String> out) throws Exception {

                String orderId = orderCache.get(receipt);

                if (orderId != null) {
                    out.collect(orderId);
                } else {
                    //订单还没来，把receipt写入缓存
                    receiptCache.add(receipt);

                }

            }
        };

        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();

        env.setRuntimeMode(RuntimeExecutionMode.BATCH);
        SingleOutputStreamOperator<Tuple2<String, String>> orderLogDS = env
            .readTextFile("data/OrderLog.csv")
            .map(new MapFunction<String, Tuple2<String, String>>()
            {
                @Override
                public Tuple2<String, String> map(String value) throws Exception {
                    String[] words = value.split(",");
                    //txid作为key
                    return Tuple2.of(words[2], words[0]);
                }
            });

        SingleOutputStreamOperator<String> ReceiptLogDS = env
            .readTextFile("data/ReceiptLog.csv")
            .map(line -> (line.split(","))[0]);

        ConnectedStreams<Tuple2<String, String>, String> connectedStreams = orderLogDS
            .connect(ReceiptLogDS)
            //选择两种数据的txid作为key
            .keyBy(t -> t.f0, txid -> txid);

        connectedStreams
            .process(MyProcess)
            .print().setParallelism(1);


        try {
                    env.execute();
                } catch (Exception e) {
                    e.printStackTrace();
                }
    }
}
