package com.zhang.gmall.realtime.app.dwm;

import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import com.zhang.gmall.realtime.app.func.DimAsyncFunction;
import com.zhang.gmall.realtime.beans.OrderDetail;
import com.zhang.gmall.realtime.beans.OrderInfo;
import com.zhang.gmall.realtime.beans.OrderWide;
import com.zhang.gmall.realtime.utils.DimUtil;
import com.zhang.gmall.realtime.utils.MyKafkaUtil;
import org.apache.flink.api.common.eventtime.SerializableTimestampAssigner;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.common.functions.RichMapFunction;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.streaming.api.CheckpointingMode;
import org.apache.flink.streaming.api.datastream.AsyncDataStream;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.async.ResultFuture;
import org.apache.flink.streaming.api.functions.async.RichAsyncFunction;
import org.apache.flink.streaming.api.functions.co.ProcessJoinFunction;
import org.apache.flink.streaming.api.windowing.time.Time;
import org.apache.flink.util.Collector;

import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.time.Duration;
import java.util.concurrent.TimeUnit;

/**
 * @title: 订单宽表
 * @author: zhang
 * @date: 2022/3/7 18:12
 * 1 双流join
 * 2 维度关联
 * <p>
 * mockDB->mysql->binlog->maxwell->kafka->BaseDBApp->kafka/hbase->OrderWideApp->kafka
 * --执行流程
 * 模拟业务数据生成，保存在mysql
 * binlog会记录mysql业务数据的变化
 * MaxWell从binlog中读取变化数据并封装为json字符串发送kafka主题 ods_base_db_m_2022
 * BaseDBApp从ods_base_db_m_2022中读取数据
 * 根据配置表中的配置，对事实数据和维度数据进行动态分流
 * OrderWideApp从kafka读取订单数据和订单明细数据
 * 对读取数据进行类型转换
 * 指定水位线和时间戳
 * 使用KeyBy对两条流进行分组，其实是指定连接字段
 * 使用基于状态的intervalJoin完成join
 */
@SuppressWarnings("all")
public class OrderWideApp {
    public static void main(String[] args) throws Exception {
        //TODO 1.获取环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(4);
        //TODO 2.设置检查点
        //TODO 3.读取kafka订单数据、数据结构转换、分配时间戳和水位线
        String orderTopic = "dwd_order_info";
        String groupId = "order_wide_app";
        SingleOutputStreamOperator<OrderInfo> orderInfoDS = env
                .addSource(MyKafkaUtil.getKafkaSource(orderTopic, groupId))
                .map(new RichMapFunction<String, OrderInfo>() {
                    private SimpleDateFormat sdf;

                    @Override
                    public void open(Configuration parameters) throws Exception {
                        sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
                    }

                    @Override
                    public OrderInfo map(String value) throws Exception {
                        OrderInfo orderInfo = JSON.parseObject(value, OrderInfo.class);
                        String create_time = orderInfo.getCreate_time();
                        String[] dateTimeArr = create_time.split(" ");
                        orderInfo.setCreate_date(dateTimeArr[0]);
                        orderInfo.setCreate_hour(dateTimeArr[1].split(":")[0]);
                        orderInfo.setCreate_ts(sdf.parse(create_time).getTime());
                        return orderInfo;
                    }
                })
                .assignTimestampsAndWatermarks(
                        WatermarkStrategy
                                .<OrderInfo>forBoundedOutOfOrderness(Duration.ofSeconds(3))
                                .withTimestampAssigner(new SerializableTimestampAssigner<OrderInfo>() {
                                    @Override
                                    public long extractTimestamp(OrderInfo element, long recordTimestamp) {
                                        return element.getCreate_ts();
                                    }
                                })
                );

        //TODO 4.读取kafka订单明细数据、数据结构转换、分配时间戳和水位线
        String orderDetailTopic = "dwd_order_detail";
        SingleOutputStreamOperator<OrderDetail> orderDetailDS = env
                .addSource(MyKafkaUtil.getKafkaSource(orderDetailTopic, groupId))

                .map(new RichMapFunction<String, OrderDetail>() {
                    private SimpleDateFormat sdf;

                    @Override
                    public void open(Configuration parameters) throws Exception {
                        sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
                    }

                    @Override
                    public OrderDetail map(String value) throws Exception {
                        OrderDetail orderDetail = JSON.parseObject(value, OrderDetail.class);
                        orderDetail.setCreate_ts(sdf.parse(orderDetail.getCreate_time()).getTime());
                        return orderDetail;
                    }
                })
                .assignTimestampsAndWatermarks(
                        WatermarkStrategy
                                .<OrderDetail>forBoundedOutOfOrderness(Duration.ofSeconds(3))
                                .withTimestampAssigner(new SerializableTimestampAssigner<OrderDetail>() {
                                    @Override
                                    public long extractTimestamp(OrderDetail element, long recordTimestamp) {
                                        return element.getCreate_ts();
                                    }
                                })
                );

        //TODO 5.通过keyBy进行分组，指定连接字段，使用基于状态的intervalJoin进行双流join
        SingleOutputStreamOperator<OrderWide> orderWideDS = orderInfoDS
                .keyBy(OrderInfo::getId)
                .intervalJoin(orderDetailDS.keyBy(OrderDetail::getOrder_id))
                .between(Time.seconds(-5L), Time.seconds(5L))
                .process(new ProcessJoinFunction<OrderInfo, OrderDetail, OrderWide>() {
                    @Override
                    public void processElement(OrderInfo left, OrderDetail right, Context ctx, Collector<OrderWide> out) throws Exception {
                        out.collect(new OrderWide(left, right));
                    }
                });

        // TODO 6.用户维度关联
        //实现分发请求的 AsyncFunction
        //获取数据库交互的结果并发送给 ResultFuture 的 回调 函数
        //将异步 I/O 操作应用于 DataStream 作为 DataStream 的一次转换操作。
        SingleOutputStreamOperator<OrderWide> userDimDS = AsyncDataStream
                .unorderedWait(orderWideDS,
                        new DimAsyncFunction<OrderWide>("DIM_USER_INFO") {
                            @Override
                            public void join(OrderWide orderWide, JSONObject userDimInfo) throws ParseException {
                                String gender = userDimInfo.getString("GENDER");
                                orderWide.setUser_gender(gender);
                                String birthday = userDimInfo.getString("BIRTHDAY");
                                SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd");
                                Long birthdayTime = sdf.parse(birthday).getTime();
                                Long curTime = System.currentTimeMillis();
                                Long ageLong = (curTime - birthdayTime) / 1000 / 60 / 60 / 24 / 365L;
                                orderWide.setUser_age(ageLong.intValue());
                            }

                            @Override
                            public String getKey(OrderWide orderWide) {
                                return orderWide.getUser_id().toString();
                            }
                        },
                        60,
                        TimeUnit.SECONDS);
        //TODO 7.地区维度关联
        SingleOutputStreamOperator<OrderWide> regionDS = AsyncDataStream
                .unorderedWait(userDimDS,
                        new DimAsyncFunction<OrderWide>("DIM_BASE_PROVINCE") {

                            @Override
                            public String getKey(OrderWide orderWide) {
                                return orderWide.getProvince_id().toString();
                            }

                            @Override
                            public void join(OrderWide orderWide, JSONObject dimInfo) throws ParseException {
                                orderWide.setProvince_name(dimInfo.getString("NAME"));
                                orderWide.setProvince_area_code(dimInfo.getString("AREA_CODE"));
                                orderWide.setProvince_iso_code(dimInfo.getString("ISO_CODE"));
                                orderWide.setProvince_3166_2_code(dimInfo.getString("ISO_3166_2"));
                            }
                        }, 60, TimeUnit.SECONDS);


        //TODO 8.商品维度关联
        SingleOutputStreamOperator<OrderWide> skuDS = AsyncDataStream
                .unorderedWait(regionDS,
                        new DimAsyncFunction<OrderWide>("DIM_SKU_INFO") {
                            @Override
                            public String getKey(OrderWide orderWide) {
                                return orderWide.getSku_id().toString();
                            }

                            @Override
                            public void join(OrderWide orderWide, JSONObject dimInfo) throws ParseException {
                                orderWide.setSku_name(dimInfo.getString("SKU_NAME"));
                                orderWide.setSpu_id(dimInfo.getLong("SPU_ID"));
                                orderWide.setTm_id(dimInfo.getLong("TM_ID"));
                                orderWide.setCategory3_id(dimInfo.getLong("CATEGORY3_ID"));
                            }
                        }, 60, TimeUnit.SECONDS);

        //TODO 9.品牌维度关联
        SingleOutputStreamOperator<OrderWide> tmNameDS = AsyncDataStream
                .unorderedWait(skuDS,
                        new DimAsyncFunction<OrderWide>("DIM_BASE_TRADEMARK") {
                            @Override
                            public String getKey(OrderWide input) {
                                return input.getTm_id().toString();
                            }

                            @Override
                            public void join(OrderWide input, JSONObject dimInfo) throws ParseException {
                                input.setTm_name(dimInfo.getString("TM_NAME"));
                            }
                        }, 60, TimeUnit.SECONDS);

        //TODO 10.类别维度关联
        SingleOutputStreamOperator<OrderWide> cateNameDS = AsyncDataStream
                .unorderedWait(tmNameDS,
                        new DimAsyncFunction<OrderWide>("DIM_BASE_CATEGORY3") {
                            @Override
                            public String getKey(OrderWide input) {
                                return input.getCategory3_id().toString();
                            }

                            @Override
                            public void join(OrderWide input, JSONObject dimInfo) throws ParseException {
                                input.setCategory3_name(dimInfo.getString("NAME"));
                            }
                        }, 60, TimeUnit.SECONDS);
        cateNameDS.print();
        //TODO 11.SPU维度关联
        SingleOutputStreamOperator<OrderWide> resultDS = AsyncDataStream
                .unorderedWait(cateNameDS,
                        new DimAsyncFunction<OrderWide>("DIM_SPU_INFO") {
                            @Override
                            public String getKey(OrderWide input) {
                                return input.getSpu_id().toString();
                            }

                            @Override
                            public void join(OrderWide input, JSONObject dimInfo) throws ParseException {
                                input.setSpu_name(dimInfo.getString("SPU_NAME"));
                            }
                        }, 60, TimeUnit.SECONDS);
        // TODO    12. 写入kafka主题
        resultDS
                .map(JSONObject::toJSONString)
                .addSink(MyKafkaUtil.getKafkaSink("dwm_order_wide_2022"));

        resultDS
                .map(JSONObject::toJSONString).print("orderWideDS");
        // TODO 13.执行任务
        env.execute("OrderWideApp");

    }
}
