package javaVersion.learn.stateProcess.broadCastStateDemo;

import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import org.apache.flink.api.common.serialization.SimpleStringSchema;
import org.apache.flink.api.common.state.MapStateDescriptor;
import org.apache.flink.api.common.typeinfo.TypeHint;
import org.apache.flink.api.common.typeinfo.TypeInformation;
import org.apache.flink.api.common.typeinfo.Types;
import org.apache.flink.api.java.tuple.Tuple4;
import org.apache.flink.streaming.api.datastream.BroadcastConnectedStream;
import org.apache.flink.streaming.api.datastream.BroadcastStream;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.ProcessFunction;
import org.apache.flink.streaming.api.functions.co.BroadcastProcessFunction;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer011;
import org.apache.flink.util.Collector;
import org.apache.kafka.clients.consumer.ConsumerConfig;

import java.util.Map;
import java.util.Properties;

/**
 * @program: myFlink
 * @description: 使用broadcast state动态的读取用户的详细信息
 * @author: WincoMa
 * @create: 2020-07-27 15:42
 **/
public class StreamBroadcastStateDemo {
    public static void main(String[] args) throws Exception {
        StreamExecutionEnvironment senv = StreamExecutionEnvironment.getExecutionEnvironment();
        //构建Kafka配置
        Properties prop = new Properties();
        prop.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, "hadoop1:9092");
        //定义flink中Kafka消费者
        FlinkKafkaConsumer011<String> kafkaConsumer = new FlinkKafkaConsumer011<>("test", new SimpleStringSchema(), prop);
        //设置Kafka消费数据的位置
        kafkaConsumer.setStartFromLatest();
        //构建数据源
        DataStreamSource<String> source = senv.addSource(kafkaConsumer);
        SingleOutputStreamOperator<Tuple4<String, String, String, Integer>> eventStream = source.process(new ProcessFunction<String, Tuple4<String, String, String, Integer>>() {
            @Override
            public void processElement(String value, Context ctx, Collector<Tuple4<String, String, String, Integer>> out) throws Exception {
                try {
                    JSONObject obj = JSON.parseObject(value);
                    String userID = obj.getString("userID");
                    String eventTime = obj.getString("eventTime");
                    String eventType = obj.getString("eventType");
                    int productID = obj.getIntValue("productID");
                    out.collect(Tuple4.of(userID, eventTime, eventType, productID));
                } catch (Exception e) {
                    e.printStackTrace();
                    System.out.println("异常数据！");
                }

            }
        });

        //构建MySQL数据源
        //返回的数据格式：（用户id，（用户名字，用户年龄））
        DataStreamSource<Map<String, Integer>> configStream = senv.addSource(new MysqlSource());

        //广播时，给每一个广播，需要一个map描述器
        MapStateDescriptor<Void, Map<String, Map<String, Integer>>> configDescriptor = new MapStateDescriptor<>(
                "config",
                Types.VOID,
//                BasicTypeInfo.VOID_TYPE_INFO,
                TypeInformation.of(new TypeHint<Map<String, Map<String, Integer>>>() {
                })
        );
        
        //使用广播描述器，形成一条广播流
        BroadcastStream<Map<String, Integer>> broadcastConfigStream = configStream.broadcast(configDescriptor);

        //将广播流广播出去
        BroadcastConnectedStream<Tuple4<String, String, String, Integer>, Map<String, Integer>> connectedStream = eventStream.connect(broadcastConfigStream);

        //对广播后的流进行操作
        BroadcastProcessFunction customeBroadcastProcessFunction = new CustomeBroadcastProcessFunction();

        connectedStream.process(customeBroadcastProcessFunction);
        senv.execute();
    }
}
