package com.fwmagic.flink.projectcase.broadcaststate;

import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONArray;
import com.alibaba.fastjson.JSONObject;
import com.fwmagic.flink.projectcase.utils.FlinkUtils;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.common.serialization.SimpleStringSchema;
import org.apache.flink.api.common.state.BroadcastState;
import org.apache.flink.api.common.state.MapStateDescriptor;
import org.apache.flink.api.common.state.ReadOnlyBroadcastState;
import org.apache.flink.api.java.tuple.Tuple3;
import org.apache.flink.api.java.tuple.Tuple4;
import org.apache.flink.api.java.utils.ParameterTool;
import org.apache.flink.streaming.api.datastream.BroadcastStream;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.ProcessFunction;
import org.apache.flink.streaming.api.functions.co.BroadcastProcessFunction;
import org.apache.flink.util.Collector;

import java.util.Iterator;
import java.util.Map;

/**
 * DataStream中BroadcastState的使用
 *
 * 需求步骤：
 *  1、canal订阅binlog指定数据库下的数据表，监听到数据表中数据的变化，
 *  将数据表中更新的数据以json的形式发送到kafka
 *  2、flink消费kafka中的数据，将数据转化后维护到BroadcastState中
 *  3、flink读取其他的流数据，并和BroadcastState中的数据[broadcastStream]进行连接[connect]
 *  4、在process方法的
 *      processElement中关联两个流中的数据
 *      processBroadcastElement中维护最新的广播的数据[增删改]
 *
 * kafka中dic这个topic中的数据：
 *  {"type":"INSERT","data":[{"id":"A1","name":"新人礼包"}]}
 *  {"type":"INSERT","data":[{"id":"A2","name":"促销活动"}]}
 *  {"type":"INSERT","data":[{"id":"A3","name":"双11活动"}]}
 *  {"type":"UPDATE","data":[{"id":"A3","name":"双12活动"}]}
 *  {"type":"DELETE","data":[{"id":"A3","name":"双22活动"}]}
 *
 *  SocketStream中的数据：
 *      u001,A1,1990-12-12
 *      u002,A2,1990-10-10
 */
public class BroadcastStateDemo {

    public static void main(String[] args) throws Exception{
        StreamExecutionEnvironment env = FlinkUtils.getEnv();
        env.setParallelism(4);
        ParameterTool parameterTool = ParameterTool.fromPropertiesFile(args[0]);

        DataStream<String> dicDataStream = FlinkUtils.createKafkaSource(parameterTool, SimpleStringSchema.class);


        //Tuple3<String,String,String>:A1,新人礼包,INSERT
        SingleOutputStreamOperator<Tuple3<String, String, String>> tp3DataStream = dicDataStream.process(new ProcessFunction<String, Tuple3<String, String, String>>() {
            @Override
            public void processElement(String line, Context ctx, Collector<Tuple3<String, String, String>> out) throws Exception {
                try {
                    JSONObject jsonObject = JSON.parseObject(line);
                    JSONArray jsonArray = jsonObject.getJSONArray("data");

                    String type = jsonObject.getString("type");
                    //筛选出这三类数据
                    if ("INSERT".equals(type) || "UPDATE".equals(type) || "DELETE".equals(type)) {
                        for (int i = 0; i < jsonArray.size(); i++) {
                            JSONObject object = jsonArray.getJSONObject(i);
                            String id = object.getString("id");
                            String name = object.getString("name");
                            out.collect(Tuple3.of(id, name, type));
                        }
                    }
                } catch (Exception e) {
                    e.printStackTrace();
                    System.err.println(e.getMessage());
                }
            }
        });

        //定义一个广播的状态描述器
        MapStateDescriptor<String,String> mapStateDescriptor = new MapStateDescriptor("map-tp3-state",String.class ,String.class );
        //广播数据
        BroadcastStream<Tuple3<String, String, String>> broadcastStream = tp3DataStream.broadcast(mapStateDescriptor);

        /*
        * 另外一个要关联的数据流
        * 数据格式：u001,A1,1990-12-12
        * */
        DataStreamSource<String> lines = env.socketTextStream("localhost", 8888);

        SingleOutputStreamOperator<Tuple3<String, String, String>> inputDataStream = lines.map(new MapFunction<String, Tuple3<String, String, String>>() {
            @Override
            public Tuple3<String, String, String> map(String line) throws Exception {
                String[] fields = line.split(",");
                return Tuple3.of(fields[0], fields[1], fields[2]);
            }
        });

        //关联已经广播出去的数据
        SingleOutputStreamOperator<Tuple4<String, String, String, String>> connected = inputDataStream.connect(broadcastStream).process(new BroadcastProcessFunction<Tuple3<String, String, String>, Tuple3<String, String, String>, Tuple4<String, String, String, String>>() {
            //处理socket中的数据 tp3:u001,A1,1990-12-12
            @Override
            public void processElement(Tuple3<String, String, String> tp3, ReadOnlyContext ctx, Collector<Tuple4<String, String, String, String>> out) throws Exception {
                //只读模式取出BroadcastState中的广播数据，和tp3关联起来，重新整合数据
                ReadOnlyBroadcastState<String, String> mapState = ctx.getBroadcastState(mapStateDescriptor);
                String uid = tp3.f0;
                String aid = tp3.f1;
                String date = tp3.f2;

                //根据广播活动的ID去关联广播中的mapState中对应的数据
                String name = mapState.get(aid);
                out.collect(Tuple4.of(uid, aid, name, date));
            }

            //处理广播的数据，增删改，即广播状态数据的更新操作在这里
            @Override
            public void processBroadcastElement(Tuple3<String, String, String> tp, Context ctx, Collector<Tuple4<String, String, String, String>> out) throws Exception {
                String aid = tp.f0;
                String name = tp.f1;
                String type = tp.f2;

                //每来一条是数据就将数据保存到内存中，具体位置：TaskManager->TaskSlot->SubTask->内存中
                BroadcastState<String, String> mapState = ctx.getBroadcastState(mapStateDescriptor);
                if ("DELETE".equals(type)) {
                    mapState.remove(aid);
                } else {//INSERT,UPDATE
                    mapState.put(aid, name);
                }

                //查看mapState中的数据
                Iterator<Map.Entry<String, String>> it = mapState.iterator();
                while(it.hasNext()){
                    Map.Entry<String, String> entry = it.next();
                    System.out.println("key:"+entry.getKey()+",value:"+entry.getValue());
                }
            }
        });

        //打印关联出来的数据
        connected.print();

        env.execute("BroadcastStateDemo");
    }
}
