package com.dwd;


import com.alibaba.fastjson.JSONObject;
import com.alibaba.ververica.cdc.connectors.mysql.MySQLSource;
import com.alibaba.ververica.cdc.debezium.DebeziumSourceFunction;
import com.bean.UserInfo;
import com.common.DimSinkFunctions;
import com.function.CustomerDeserialization;
import com.utils.KafkaUtils;
import com.utils.PhonexUtil;
import org.apache.flink.api.common.eventtime.SerializableTimestampAssigner;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.api.common.state.BroadcastState;
import org.apache.flink.api.common.state.MapStateDescriptor;
import org.apache.flink.api.common.state.ReadOnlyBroadcastState;
import org.apache.flink.streaming.api.datastream.BroadcastStream;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.ProcessFunction;
import org.apache.flink.streaming.api.functions.co.BroadcastProcessFunction;
import org.apache.flink.streaming.api.functions.co.ProcessJoinFunction;
import org.apache.flink.streaming.api.windowing.time.Time;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer;
import org.apache.flink.util.Collector;
import org.apache.flink.util.OutputTag;

import java.text.ParseException;
import java.text.SimpleDateFormat;

/**
 * v根据业务需求，在 DWD 层中使用侧输出流对订单数据进行动态分流。
 * 对于新用户的订单，发送到一个特定的 Kafka 主题中；
 * 对于老用户的订单，则根据商品类别动态分流，将订单数据发送到不同的 Kafka 主题中。
 * 同时，对于用户信息表，将维度数据写入 HBase 中缓存起来（8分
 */
public class Five {
    public static void main(String[] args) throws Exception {
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);
        //从kafka主题中读取已经识别新老用户的数据
        FlinkKafkaConsumer<String> dwd_newOrOld_db = KafkaUtils.createConsumer("dwd_newOrOld_db","ok");
        DataStreamSource<String> stream = env.addSource(dwd_newOrOld_db);
        //一.先进行新老用户分流,新用户发送kafka  老用户等着进行动态分流

        OutputTag<String> newTag = new OutputTag<String>("new") {
        };
        SingleOutputStreamOperator<String> oldStream = stream.process(new ProcessFunction<String, String>() {
            @Override
            public void processElement(String str, ProcessFunction<String, String>.Context context, Collector<String> collector) throws Exception {
                //1.转为json  判断is_new
                JSONObject jo = JSONObject.parseObject(str);
                String is_new = jo.getString("is_new");
                if (is_new.equals("1")) {//新用户
                    context.output(newTag, str);
                } else {//老用户
                    collector.collect(str);
                }
            }
        });
        //得到侧流新用户
        DataStream<String> newDs = oldStream.getSideOutput(newTag);
        //发送到kafka
        newDs.addSink(KafkaUtils.createProduer("dwd_newUser_db"));
        //二.老用户动态分流
        /*
        得到order_info表和order_detail表 水位线  关联成宽表
        再利用flink cdc得到sku_info表  将sku_info表转为广播变量
        动态获取
         */
        SimpleDateFormat smt=new SimpleDateFormat("yyyy-MM-dd hh:mm:ss");
        //order_info水位线
        SingleOutputStreamOperator<JSONObject> orderInfoStream = oldStream.map(x -> {
            return JSONObject.parseObject(x);
        }).assignTimestampsAndWatermarks(WatermarkStrategy
                .<JSONObject>forMonotonousTimestamps()
                .withTimestampAssigner(new SerializableTimestampAssigner<JSONObject>() {
                    @Override
                    public long extractTimestamp(JSONObject jsonObject, long l) {
                        try {
                            String string = jsonObject.getJSONObject("after").getString("create_time");
                            long time = smt.parse(string).getTime();
                            return time;
                        } catch (ParseException e) {
                            throw new RuntimeException(e);
                        }
                    }
                }));
        //order_detail水位线
        DataStreamSource<String> orderDetailDb = env.addSource(KafkaUtils.createConsumer("dwd_orderDetail_db","ka"));

        SingleOutputStreamOperator<JSONObject> orderDetailStream = orderDetailDb.map(x -> {
            return JSONObject.parseObject(x);
        }).filter(x->{
                    String type = x.getString("type");
                    if(!"delete".equals(type)){
                        return true;
                    }else{
                        return false;
                    }
                }).assignTimestampsAndWatermarks(WatermarkStrategy
                .<JSONObject>forMonotonousTimestamps()
                .withTimestampAssigner(new SerializableTimestampAssigner<JSONObject>() {
                    @Override
                    public long extractTimestamp(JSONObject jsonObject, long l) {
                        try {
                            String string = jsonObject.getJSONObject("after").getString("create_time");
                            long time = smt.parse(string).getTime();
                            return time;
                        } catch (ParseException e) {
                            throw new RuntimeException(e);
                        }
                    }
                }));

        //形成order_info和order_detail宽表
        SingleOutputStreamOperator<JSONObject> wideTable = orderInfoStream.keyBy(x -> {
                    String id = x.getJSONObject("after").getString("id");
                    return id;
                }).intervalJoin(orderDetailStream.keyBy(x -> {
                    String order_id = x.getJSONObject("after").getString("order_id");
                    return order_id;
                })).between(Time.minutes(-2), Time.minutes(1))
                .process(new ProcessJoinFunction<JSONObject, JSONObject, JSONObject>() {
                    @Override
                    public void processElement(JSONObject jsonObject, JSONObject jsonObject2, ProcessJoinFunction<JSONObject, JSONObject, JSONObject>.Context context, Collector<JSONObject> collector) throws Exception {
                        JSONObject jo = new JSONObject();
                        jo.put("info", jsonObject);
                        jo.put("detail", jsonObject2);
                        collector.collect(jo);
                    }
                });

        //wideTable.print("wideTable");

        //flink cdc获取sku_info表的数据
        DebeziumSourceFunction<String> sku_info = MySQLSource.<String>builder()
                .hostname("hadoop-single")
                .port(3306)
                .databaseList("gmall2021") // set captured database
                .tableList("gmall2021.sku_info") // set captured table
                .username("root")
                .password("root")
                .deserializer(new CustomerDeserialization()) // converts SourceRecord to JSON String
                .build();
        DataStreamSource<String> skuInfoDs = env.addSource(sku_info);
        SingleOutputStreamOperator<JSONObject> skuInfoStream = skuInfoDs.map(x -> {
            return JSONObject.parseObject(x);
        });
        //skuInfoStream.print();
        //转为广播变量new MapStateDescriptor<>("bro", String.class, JSONObject.class).var
        MapStateDescriptor<String, JSONObject> broMap = new MapStateDescriptor<>("bro", String.class, JSONObject.class);
        BroadcastStream<JSONObject> broadcast = skuInfoStream.broadcast(broMap);
        //
        SingleOutputStreamOperator<JSONObject> bigStream = wideTable.connect(broadcast).process(new BroadcastProcessFunction<JSONObject,
                JSONObject, JSONObject>() {
            @Override
            public void processElement(JSONObject jsonObject, BroadcastProcessFunction<JSONObject, JSONObject,
                    JSONObject>.ReadOnlyContext readOnlyContext, Collector<JSONObject> collector) throws Exception {
                //获得广播变量
                ReadOnlyBroadcastState<String, JSONObject> broadcastState = readOnlyContext.getBroadcastState(broMap);
                //获得广播变量中的key
                String sku_id = jsonObject.getJSONObject("detail").getJSONObject("after").getString("sku_id");
                //判断
                if (broadcastState.get(sku_id) != null) {
                    jsonObject.put("sku_id", broadcastState.get(sku_id));
                    collector.collect(jsonObject);
                }
            }

            @Override
            public void processBroadcastElement(JSONObject jsonObject, BroadcastProcessFunction<JSONObject, JSONObject, JSONObject>.Context context, Collector<JSONObject> collector) throws Exception {
                //先广播流
                String sku_id = jsonObject.getJSONObject("after").getString("id");
                //存入广播流

                BroadcastState<String, JSONObject> broadcastState1 = context.getBroadcastState(broMap);
                broadcastState1.put(sku_id, jsonObject);
            }
        });

        bigStream.print("kk");
        bigStream.map(x->{
            return x.toJSONString();
        }).addSink(KafkaUtils.createProduer("ads_wide_db"));

        //对于用户信息表，将维度数据写入 HBase 中缓存起来
        DataStreamSource<String> userInfoDb = env.addSource(KafkaUtils.createConsumer("dwd_userInfo_db","lala"));
        SingleOutputStreamOperator<UserInfo> userInfo = userInfoDb.map(x -> {
            return JSONObject.parseObject(x, UserInfo.class);
        });
        PhonexUtil.checkTable("userInfo",UserInfo.class,"id","");

        userInfoDb.map(x->{
            JSONObject jo = new JSONObject();
            jo.put("sinkTable","userInfo");
            jo.put("sinkPk","id");
            jo.put("type","insert");
            jo.put("after",JSONObject.parseObject(x).getJSONObject("after"));
            return jo;
        }).addSink(new DimSinkFunctions());

        env.execute();

    }
}
