package com.zzw.demo.text_A;


import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import com.zzw.demo.bean.OrderInfo;
import com.zzw.demo.bean.OrderWide;
import com.zzw.demo.bean.UserInfo;
import com.zzw.demo.util.KafkaUtil;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.KeyedStream;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.co.ProcessJoinFunction;
import org.apache.flink.streaming.api.windowing.time.Time;
import org.apache.flink.util.Collector;

/**
 * 拉宽表
 */
public class Text07 {
    public static void main(String[] args) throws Exception {
        //7）在 DWM 层中，使用双流 join 实现事实表和事实表的拉宽，将订单数据和用户数据关联起来，并在输出流中发送到相应的 Kafka 主题中（5分）
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        //设置并行度
        env.setParallelism(1);
        // 订阅kafka dwd_user_info
        DataStreamSource<String> user_infoDS = env.addSource(KafkaUtil.kafkaSource("dwd_user_info", "test"));

        KeyedStream<UserInfo, String> user_infoKS = user_infoDS.map(x -> {
            return JSON.parseObject(x);
        }).map(x -> {
            return JSON.toJavaObject(x.getJSONObject("user_info"), UserInfo.class);
        }).keyBy(x -> {
            return x.getId();
        });


        // 订阅kafka dwh_order_wide
        DataStreamSource<String> order_wideDS = env.addSource(KafkaUtil.kafkaSource("dwd_order_info", "test"));

        KeyedStream<OrderInfo, String> order_infoKS = order_wideDS.map(x -> {
            return JSON.parseObject(x);
        }).map(x -> {
            return JSON.toJavaObject(x.getJSONObject("order_info"), OrderInfo.class);
        }).keyBy(x -> {
            return x.getUser_id();
        });

        SingleOutputStreamOperator<Object> process = user_infoKS.intervalJoin(order_infoKS).between(Time.milliseconds(-2), Time.milliseconds(1))
                .process(new ProcessJoinFunction<UserInfo, OrderInfo, Object>() {
                    @Override
                    public void processElement(UserInfo userInfo, OrderInfo orderInfo, ProcessJoinFunction<UserInfo, OrderInfo, Object>.Context context, Collector<Object> collector) throws Exception {
                        collector.collect(new OrderWide(orderInfo, userInfo));
                    }
                });



        //执行程序
        env.execute();
    }
}
//    //设置广播流配置  名字  key值  value值
//    MapStateDescriptor<String, JSONObject> map_state = new MapStateDescriptor<>("map_state", String.class, JSONObject.class);
//    //转换成广播流
//    BroadcastStream<String> broadcast = user_infoDS.broadcast(map_state);
//    //连接主流跟广播流
//    BroadcastConnectedStream<String, String> connect = user_infoDS.connect(new BroadcastProcessFunction<JSONObject,String,JSONObject>(){
//        //处理广播流的方法
//        @Override
//        public void processBroadcastElement(String value, BroadcastProcessFunction<JSONObject, String, JSONObject>.Context context, Collector<JSONObject> collector) throws Exception {
//            //获取并解析数据为JavaBean对象
//            JSONObject jsonObject = JSON.parseObject(value);
//            JSON.parseObject(jsonObject.getString("after"));
//            //效验表是否存在,如果不存在则建表
//            //将数据写入状态
//        }
//        //处理主流的方法
//        @Override
//        public void processElement(JSONObject jsonObject, BroadcastProcessFunction<JSONObject, String, JSONObject>.ReadOnlyContext readOnlyContext, Collector<JSONObject> collector) throws Exception {
//            //获取广播的配置信息
//            //根据sinkColumns配置信息过滤字段
//            //补充singTable字段写出
//        }
//    });
