package com.study.flink.java.day07_join_count;

import com.study.flink.java.day07_join_count.entity.ActBean;
import com.study.flink.java.utils.FlinkUtils;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.common.functions.RichMapFunction;
import org.apache.flink.api.common.serialization.SimpleStringSchema;
import org.apache.flink.api.common.state.*;
import org.apache.flink.api.java.tuple.Tuple;
import org.apache.flink.api.java.tuple.Tuple3;
import org.apache.flink.api.java.tuple.Tuple4;
import org.apache.flink.api.java.utils.ParameterTool;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.shaded.guava18.com.google.common.hash.BloomFilter;
import org.apache.flink.shaded.guava18.com.google.common.hash.Funnels;
import org.apache.flink.streaming.api.datastream.*;
import org.apache.flink.streaming.api.functions.ProcessFunction;
import org.apache.flink.streaming.api.functions.co.BroadcastProcessFunction;
import org.apache.flink.util.Collector;

import java.util.Iterator;
import java.util.Map;
import java.util.UUID;

/**
 * 广播数据的使用，每个task slots都存一份数据关联使用
 */
public class ActivityCountV2BroadcastState {

    public static void main(String[] args) throws Exception{
        ParameterTool parameters = ParameterTool.fromPropertiesFile(args[0]);

        // 获取字典的数据，每次重启都要重新读取
        // A1,新人礼包,IN
        // A2,月末活动,IN

        // A3,周末活动,IN
        // A2,月月活动,UP
        // A3,周末活动,DE
        DataStream<String> dictDataStream = FlinkUtils.createKafkaStream(parameters, "activity_dict", UUID.randomUUID().toString() ,SimpleStringSchema.class);

        SingleOutputStreamOperator<Tuple3<String, String, String>> tpDictDataStream = dictDataStream.process(new ProcessFunction<String, Tuple3<String, String, String>>() {
            @Override
            public void processElement(String dictStr, Context context, Collector<Tuple3<String, String, String>> out) throws Exception {
                String[] dict = dictStr.split(",");
                out.collect(Tuple3.of(dict[0],dict[1],dict[2]));
            }
        });

        //定义广播状态描述器
        MapStateDescriptor<String, String> broadcastDescriptor = new MapStateDescriptor<>("dic-state", String.class, String.class);
        BroadcastStream<Tuple3<String, String, String>> broadcastStateStream = tpDictDataStream.broadcast(broadcastDescriptor);

        //u001,A1,2020-01-09
        //u001,A2,2020-01-02
        //u002,A3,2020-11-11
        DataStreamSource<String> lines = FlinkUtils.getEnv().socketTextStream("node02", 8888);
        SingleOutputStreamOperator<Tuple3<String, String, String>> tp2DataStream = lines.map(new MapFunction<String, Tuple3<String, String, String>>() {
            @Override
            public Tuple3<String, String, String> map(String s) throws Exception {
                String[] fields = s.split(",");
                return Tuple3.of(fields[0], fields[1], fields[2]);
            }
        });

        //要关联已经广播出去的数据
        SingleOutputStreamOperator<Tuple4<String, String, String, String>> connected = tp2DataStream.connect(broadcastStateStream).process(new BroadcastProcessFunction<Tuple3<String, String, String>, Tuple3<String, String, String>, Tuple4<String, String, String, String>>() {
            //处理要计算的活动数据
            @Override
            public void processElement(Tuple3<String, String, String> input, ReadOnlyContext ctx, Collector<Tuple4<String, String, String, String>> out) throws Exception {
                ReadOnlyBroadcastState<String, String> mapState = ctx.getBroadcastState(broadcastDescriptor);
                String uid = input.f0;
                String aid = input.f1;
                String date = input.f2;

                // 根据活动Id广播的stateMap中关联对应的数据
                String name = mapState.get(aid);
                out.collect(Tuple4.of(uid, aid, name, date));
            }

            //处理规则数据，添加到内存中
            @Override
            public void processBroadcastElement(Tuple3<String, String, String> tp, Context context, Collector<Tuple4<String, String, String, String>> out) throws Exception {
                String id = tp.f0;
                String name = tp.f1;
                String type = tp.f2;
                // 新来一条规则数据就同步规则到内存中
                BroadcastState<String, String> mapState = context.getBroadcastState(broadcastDescriptor);
                // 修改再每一个Solt中的数据
                if ("IN".equals(type)) {
                    mapState.put(id, name);
                } else if ("UP".equals(type)) {
                    mapState.put(id, name);
                } else if ("DE".equals(type)) {
                    mapState.remove(id);
                }
                Iterator<Map.Entry<String, String>> iterator = mapState.iterator();
                while (iterator.hasNext()) {
                    Map.Entry<String, String> next = iterator.next();
                    System.out.println("key:" + next.getKey() + " value:" + next.getValue());
                }
            }
        });

        connected.print();

        FlinkUtils.getEnv().execute("ActivityCountV2BroadcastState");
    }
    
}
