package com.atbeijing.gmall.realtime.app.dwm;

import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import com.atbeijing.gmall.realtime.app.func.DimAsyncFunction;
import com.atbeijing.gmall.realtime.bean.order.OrderDetail;
import com.atbeijing.gmall.realtime.bean.order.OrderInfo;
import com.atbeijing.gmall.realtime.bean.order.OrderWide;
import com.atbeijing.gmall.realtime.utils.MyKafkaUtil;
import org.apache.flink.api.common.eventtime.SerializableTimestampAssigner;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.api.common.functions.RichMapFunction;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.streaming.api.datastream.AsyncDataStream;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.KeyedStream;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.co.ProcessJoinFunction;
import org.apache.flink.streaming.api.windowing.time.Time;
import org.apache.flink.util.Collector;

import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.Date;
import java.util.concurrent.TimeUnit;

/**
 * 订单宽表数据
 */
public class OrderWideApp {
    public static void main(String[] args) throws Exception {
        //TODO 1.基本环境准备
        StreamExecutionEnvironment env  = StreamExecutionEnvironment.getExecutionEnvironment();
        //设置并行度读取kafka分区数据
        env.setParallelism(4);
        //TODO 2.设置检查点
        //TODO 3.从Kafka中读取数据
        String orderInfoSourceTopic = "dwd_order_info";
        String  orderDetailSourceTopic = "dwd_order_detail";
        String groupId = "order_wide_group";
        String orderWideSinkTopic = "dwm_order_wide";
        //订单信息
        DataStreamSource<String> orderInfoSource = env.addSource(MyKafkaUtil.getKafkaSource(orderInfoSourceTopic, groupId));
        //订单详情
        DataStreamSource<String> orderDetailSource = env.addSource(MyKafkaUtil.getKafkaSource(orderDetailSourceTopic, groupId));

        //TODO 4.处理数据
        //订单流数据结构转换 jsonStr->实体类对象,并将订单创建时间作为水位线,根据订单id分组
        KeyedStream<OrderInfo, Long> orderInfoKeyedStream = orderInfoSource
                .map(new RichMapFunction<String, OrderInfo>() {
                    SimpleDateFormat sdf = null;

                    @Override
                    public void open(Configuration parameters) throws Exception {
                        super.open(parameters);
                        sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
                    }

                    @Override
                    public OrderInfo map(String value) throws Exception {
                        OrderInfo orderInfo = JSON.parseObject(value, OrderInfo.class);
                        //订单创建时间作为事件时间
                        orderInfo.setCreate_ts(sdf.parse(orderInfo.getCreate_time()).getTime());
                        return orderInfo;
                    }
                })
                .assignTimestampsAndWatermarks(
                        WatermarkStrategy.<OrderInfo>forMonotonousTimestamps()
                                .withTimestampAssigner(new SerializableTimestampAssigner<OrderInfo>() {
                                    @Override
                                    public long extractTimestamp(OrderInfo element, long recordTimestamp) {
                                        return element.getCreate_ts();
                                    }
                                })
                )
                .keyBy(r -> r.getId());

        //订单详情流数据结构转换 jsonStr->实体类对象,并将订单详情创建时间作为水位线,根据订单id分组
        KeyedStream<OrderDetail, Long> orderDetailKeyedStream = orderDetailSource
                .map(new RichMapFunction<String, OrderDetail>() {
                    SimpleDateFormat sdf = null;

                    @Override
                    public void open(Configuration parameters) throws Exception {
                        super.open(parameters);
                        sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
                    }

                    @Override
                    public OrderDetail map(String value) throws Exception {
                        OrderDetail orderDetail = JSON.parseObject(value, OrderDetail.class);
                        //订单详情创建时间作为事件时间
                        orderDetail.setCreate_ts(sdf.parse(orderDetail.getCreate_time()).getTime());
                        return orderDetail;
                    }
                })
                .assignTimestampsAndWatermarks(
                        WatermarkStrategy.<OrderDetail>forMonotonousTimestamps()
                                .withTimestampAssigner(new SerializableTimestampAssigner<OrderDetail>() {
                                    @Override
                                    public long extractTimestamp(OrderDetail element, long recordTimestamp) {
                                        return element.getCreate_ts();
                                    }
                                })
                )
                .keyBy(r -> r.getOrder_id());

        //订单和订单明细进行双流join(intervalJoin) 订单流与前后5秒内订单详情按照订单id join
        SingleOutputStreamOperator<OrderWide> orderWideDS = orderInfoKeyedStream
                .intervalJoin(orderDetailKeyedStream)
                .between(Time.seconds(-5), Time.seconds(5))
                .process(new ProcessJoinFunction<OrderInfo, OrderDetail, OrderWide>() {
                    @Override
                    public void processElement(OrderInfo left, OrderDetail right, Context ctx, Collector<OrderWide> out) throws Exception {
                        out.collect(new OrderWide(left, right));
                    }
                });

        //orderWideDS.print(">>>>>>>>");

        //TODO 异步I/O orderWideDS和用户维度表进行关联
        //无序等待（unorderedWait）
        //后来的数据，如果异步查询速度快可以超过先来的数据，这样性能会更好一些，但是会有乱序出现。谁先关联完成,谁就先向下游发送数据
        //有序等待（orderedWait）
        //严格保留先来后到的顺序，所以后来的数据即使先完成也要等前面的数据。所以性能会差一些。
        SingleOutputStreamOperator<OrderWide> orderWideWithUserInfoDS = AsyncDataStream.unorderedWait(
                //输入流
                orderWideDS,
                //异步请求: 通过宽表的用户id多线程异步到缓存或hbase查询维度数据并关联
                new DimAsyncFunction<OrderWide>("DIM_USER_INFO") {
                    //orderWide 宽表, dimJsonObj 维度数据
                    @Override
                    public void join(OrderWide orderWide, JSONObject dimJsonObj) {
                        try {
                            SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd");

                            String gender = dimJsonObj.getString("GENDER");

                            String birthday = dimJsonObj.getString("BIRTHDAY");
                            Date birthdayDate = sdf.parse(birthday);
                            long birthdayTime = birthdayDate.getTime();
                            long curTime = System.currentTimeMillis();
                            long ageTime = curTime - birthdayTime;
                            Long age = ageTime / 1000 / 60 / 60 / 24 / 365;

                            orderWide.setUser_gender(gender);
                            orderWide.setUser_age(age.intValue());
                        } catch (ParseException e) {
                            e.printStackTrace();
                            System.out.println("orderWide join DIM_USER_INFO 异常");
                        }
                    }

                    @Override
                    public String getKey(OrderWide orderWide) {
                        return orderWide.getUser_id().toString();
                    }
                },
                60,//超时参数定义了异步请求发出多久后未得到响应即被认定为失败。 它可以防止一直等待得不到响应的请求
                TimeUnit.SECONDS
        );

        //orderWideWithUserInfoDS.print(">>>>");

        //TODO 关联省市维度
        SingleOutputStreamOperator<OrderWide> orderWideWithProvinceDS = AsyncDataStream.unorderedWait(
                orderWideWithUserInfoDS,
                new DimAsyncFunction<OrderWide>("DIM_BASE_PROVINCE") {
                    @Override
                    public void join(OrderWide orderWide, JSONObject dimJsonObj) {
                        orderWide.setProvince_name(dimJsonObj.getString("NAME"));
                        orderWide.setProvince_3166_2_code(dimJsonObj.getString("ISO_3166_2"));
                        orderWide.setProvince_iso_code(dimJsonObj.getString("ISO_CODE"));
                        orderWide.setProvince_area_code(dimJsonObj.getString("AREA_CODE"));
                    }

                    @Override
                    public String getKey(OrderWide orderWide) {
                        return orderWide.getProvince_id().toString();
                    }
                },
                60,
                TimeUnit.SECONDS
        );

        //TODO 关联SKU维度
        SingleOutputStreamOperator<OrderWide> orderWideWithSkuDS = AsyncDataStream.unorderedWait(
                orderWideWithProvinceDS,
                new DimAsyncFunction<OrderWide>("DIM_SKU_INFO") {
                    @Override
                    public void join(OrderWide orderWide, JSONObject dimJsonObj) {
                        orderWide.setSku_name(dimJsonObj.getString("SKU_NAME"));
                        orderWide.setCategory3_id(dimJsonObj.getLong("CATEGORY3_ID"));
                        orderWide.setSpu_id(dimJsonObj.getLong("SPU_ID"));
                        orderWide.setTm_id(dimJsonObj.getLong("TM_ID"));
                    }

                    @Override
                    public String getKey(OrderWide orderWide) {
                        return orderWide.getSku_id().toString();
                    }
                },
                60,
                TimeUnit.SECONDS
        );
        //TODO 关联spu
        SingleOutputStreamOperator<OrderWide> orderWideWithSpuDS = AsyncDataStream.unorderedWait(
                orderWideWithSkuDS,
                new DimAsyncFunction<OrderWide>("DIM_SPU_INFO") {
                    @Override
                    public void join(OrderWide orderWide, JSONObject dimJsonObj) {
                        orderWide.setSpu_name(dimJsonObj.getString("SPU_NAME"));
                    }

                    @Override
                    public String getKey(OrderWide orderWide) {
                        return orderWide.getSpu_id().toString();
                    }
                },
                60,
                TimeUnit.SECONDS
        );

        //TODO 关联品类维度
        SingleOutputStreamOperator<OrderWide> orderWideWithCategory3DS = AsyncDataStream.unorderedWait(
                orderWideWithSpuDS,
                new DimAsyncFunction<OrderWide>("DIM_BASE_CATEGORY3") {
                    @Override
                    public void join(OrderWide orderWide, JSONObject dimJsonObj) {
                        orderWide.setCategory3_name(dimJsonObj.getString("NAME"));
                    }

                    @Override
                    public String getKey(OrderWide orderWide) {
                        return orderWide.getCategory3_id().toString();
                    }
                },
                60,
                TimeUnit.SECONDS
        );

        //TODO 关联品牌维度
        SingleOutputStreamOperator<OrderWide> orderWideWithTmDS= AsyncDataStream.unorderedWait(
                orderWideWithCategory3DS,
                new DimAsyncFunction<OrderWide>("DIM_BASE_TRADEMARK") {
                    @Override
                    public void join(OrderWide orderWide, JSONObject jsonObject) {
                        orderWide.setTm_name(jsonObject.getString("TM_NAME"));
                    }

                    @Override
                    public String getKey(OrderWide orderWide) {
                        return String.valueOf(orderWide.getTm_id());
                    }
                }, 60, TimeUnit.SECONDS);

        //TODO 将数据写回到kafka的dwm层
        orderWideWithTmDS.print();

        orderWideWithTmDS
                .map(r -> JSON.toJSONString(r))
                .addSink(MyKafkaUtil.getKafkaSink(orderWideSinkTopic));

        env.execute();
    }
}
