package com.atguigu.gmall.realtime.app.dwm;

import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import com.atguigu.gmall.realtime.app.func.DimAsyncFunction;
import com.atguigu.gmall.realtime.beans.OrderDetail;
import com.atguigu.gmall.realtime.beans.OrderInfo;
import com.atguigu.gmall.realtime.beans.OrderWide;
import com.atguigu.gmall.realtime.utils.MyKafkaUtil;
import org.apache.flink.api.common.eventtime.SerializableTimestampAssigner;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.api.common.functions.RichMapFunction;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.streaming.api.datastream.AsyncDataStream;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.KeyedStream;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.co.ProcessJoinFunction;
import org.apache.flink.streaming.api.windowing.time.Time;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer;
import org.apache.flink.util.Collector;

import java.text.SimpleDateFormat;
import java.time.Duration;
import java.util.concurrent.TimeUnit;

/**
 * Author: Felix
 * Date: 2022/1/21
 * Desc: 订单宽表准备
 * 需要启动的进程
 * zk、kafka、maxwell、hdfs、hbase、redis、BaseDBApp、OrderWideApp
 * 执行流程
 * 模拟生成业务数据
 * 生成的业务数据库保存到MySQL业务数据库中
 * binlog会记录MySQL业务数据库的变化
 * Maxwell从binlog中读取变化数据并封装为json字符串发送给kafka主题 ods_base_db_m
 * BaseDBApp从ods_base_db_m主题中读取数据
 * 根据配置表中的配置，判断是事实还是维度数据进行动态分流
 * 订单和订单明细属于事实数据，发送到kafka的dwd_order_info和dwd_order_detail主题中
 * OrderWideApp从kafka的dwd_order_info和dwd_order_detail主题中读取数据
 */
public class OrderWideApp {
    public static void main(String[] args) throws Exception {
        //TODO 1.基本环境准备
        //1.1 指定流处理环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        //1.2 设置并行度
        env.setParallelism(4);
        //TODO 2.检查点相关设置(略)
        //TODO 3.从Kafka中读取数据
        //3.1 声明消费的主题以及消费者组
        String orderInfoTopic = "dwd_order_info";
        String orderDetailTopic = "dwd_order_detail";
        String groupId = "order_wide_app_group";

        //3.2 创建消费者对象
        FlinkKafkaConsumer<String> orderInfoKafkaSource = MyKafkaUtil.getKafkaSource(orderInfoTopic, groupId);
        FlinkKafkaConsumer<String> orderDetailKafkaSource = MyKafkaUtil.getKafkaSource(orderDetailTopic, groupId);

        //3.3 消费数据  封装为流
        DataStreamSource<String> orderInfoStrDS = env.addSource(orderInfoKafkaSource);
        DataStreamSource<String> orderDetaiStrDS = env.addSource(orderDetailKafkaSource);

        //orderInfoStrDS.print(">>>");
        //orderDetaiStrDS.print("###");

        //TODO 4.对读取数据进行类型转换    jsonStr->实体类对象
        SingleOutputStreamOperator<OrderInfo> orderInfoDS = orderInfoStrDS.map(
            new RichMapFunction<String, OrderInfo>() {
                private SimpleDateFormat sdf;

                @Override
                public void open(Configuration parameters) throws Exception {
                    sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
                }

                @Override
                public OrderInfo map(String jsonStr) throws Exception {
                    OrderInfo orderInfo = JSON.parseObject(jsonStr, OrderInfo.class);
                    orderInfo.setCreate_ts(sdf.parse(orderInfo.getCreate_time()).getTime());
                    return orderInfo;
                }
            }
        );

        SingleOutputStreamOperator<OrderDetail> orderDetailDS = orderDetaiStrDS.map(
            new RichMapFunction<String, OrderDetail>() {
                private SimpleDateFormat sdf;

                @Override
                public void open(Configuration parameters) throws Exception {
                    sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
                }

                @Override
                public OrderDetail map(String jsonStr) throws Exception {
                    OrderDetail orderDetail = JSON.parseObject(jsonStr, OrderDetail.class);
                    orderDetail.setCreate_ts(sdf.parse(orderDetail.getCreate_time()).getTime());
                    return orderDetail;
                }
            }
        );

        //TODO 5.指定Watermark以及提取事件时间字段
        //5.1 订单流
        SingleOutputStreamOperator<OrderInfo> orderInfoWithWatermarkDS = orderInfoDS.assignTimestampsAndWatermarks(
            WatermarkStrategy
                .<OrderInfo>forBoundedOutOfOrderness(Duration.ofSeconds(3))
                .withTimestampAssigner(
                    new SerializableTimestampAssigner<OrderInfo>() {
                        @Override
                        public long extractTimestamp(OrderInfo orderInfo, long recordTimestamp) {
                            return orderInfo.getCreate_ts();
                        }
                    }
                )
        );

        //5.2 订单明细
        SingleOutputStreamOperator<OrderDetail> orderDetailWithWatermarkDS = orderDetailDS.assignTimestampsAndWatermarks(
            WatermarkStrategy
                .<OrderDetail>forBoundedOutOfOrderness(Duration.ofSeconds(3))
                .withTimestampAssigner(
                    new SerializableTimestampAssigner<OrderDetail>() {
                        @Override
                        public long extractTimestamp(OrderDetail orderDetail, long recordTimestamp) {
                            return orderDetail.getCreate_ts();
                        }
                    }
                )
        );

        //TODO 6.通过key进行分组---指定两条流连接字段
        KeyedStream<OrderInfo, Long> orderInfoKeyedDS = orderInfoWithWatermarkDS.keyBy(OrderInfo::getId);
        KeyedStream<OrderDetail, Long> orderDetailKeyedDS = orderDetailWithWatermarkDS.keyBy(OrderDetail::getOrder_id);

        //TODO 7.使用intervalJoin 对订单和订单明细进行双流jion
        SingleOutputStreamOperator<OrderWide> joinedDS = orderInfoKeyedDS
            .intervalJoin(orderDetailKeyedDS)
            .between(Time.seconds(-5), Time.seconds(5))
            .process(
                new ProcessJoinFunction<OrderInfo, OrderDetail, OrderWide>() {
                    @Override
                    public void processElement(OrderInfo orderInfo, OrderDetail orderDetail, Context ctx, Collector<OrderWide> out) throws Exception {
                        out.collect(new OrderWide(orderInfo, orderDetail));
                    }
                }
            );

        //joinedDS.print(">>>>>");

        //TODO 8.和用户维度进行关联
        SingleOutputStreamOperator<OrderWide> orderWideWithUserInfoDS = AsyncDataStream.unorderedWait(
            joinedDS,
            new DimAsyncFunction<OrderWide>("DIM_USER_INFO") {
                @Override
                public void join(OrderWide orderWide, JSONObject userInfoJsonObj) throws Exception {
                    String gender = userInfoJsonObj.getString("GENDER");
                    orderWide.setUser_gender(gender);
                    String birthday = userInfoJsonObj.getString("BIRTHDAY");
                    SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd");
                    Long birthDayTime = sdf.parse(birthday).getTime();
                    Long curTime = System.currentTimeMillis();
                    Long ageLong = (curTime - birthDayTime) / 1000 / 60 / 60 / 24 / 365L;
                    orderWide.setUser_age(ageLong.intValue());
                }

                @Override
                public String getKey(OrderWide orderWide) {
                    return orderWide.getUser_id().toString();
                }
            },
            60, TimeUnit.SECONDS
        );
        //orderWideWithUserInfoDS.print(">>>>");

        //TODO 9.和地区维度进行关联
        SingleOutputStreamOperator<OrderWide> orderWideWithProvinceInfoDS = AsyncDataStream.unorderedWait(
            orderWideWithUserInfoDS,
            new DimAsyncFunction<OrderWide>("dim_base_province") {
                @Override
                public void join(OrderWide orderWide, JSONObject provinceInfo) throws Exception {
                    orderWide.setProvince_name(provinceInfo.getString("NAME"));
                    orderWide.setProvince_area_code(provinceInfo.getString("AREA_CODE"));
                    orderWide.setProvince_iso_code(provinceInfo.getString("ISO_CODE"));
                    orderWide.setProvince_3166_2_code(provinceInfo.getString("ISO_3166_2"));
                }

                @Override
                public String getKey(OrderWide orderWide) {
                    return orderWide.getProvince_id().toString();
                }
            },
            60, TimeUnit.SECONDS
        );
        //orderWideWithProvinceInfoDS.print(">>>>");
        //TODO 10.和商品维度进行关联
        SingleOutputStreamOperator<OrderWide> orderWideWithSkuInfoDS = AsyncDataStream.unorderedWait(
            orderWideWithProvinceInfoDS,
            new DimAsyncFunction<OrderWide>("dim_sku_info") {
                @Override
                public void join(OrderWide orderWide, JSONObject skuInfo) throws Exception {
                    orderWide.setSku_name(skuInfo.getString("SKU_NAME"));
                    orderWide.setCategory3_id(skuInfo.getLong("CATEGORY3_ID"));
                    orderWide.setSpu_id(skuInfo.getLong("SPU_ID"));
                    orderWide.setTm_id(skuInfo.getLong("TM_ID"));
                }

                @Override
                public String getKey(OrderWide orderWide) {
                    return orderWide.getSku_id().toString();
                }
            },
            60, TimeUnit.SECONDS
        );
        //TODO 11.和品牌维度进行关联
        SingleOutputStreamOperator<OrderWide> orderWideWithTmDS = AsyncDataStream.unorderedWait(
            orderWideWithSkuInfoDS,
            new DimAsyncFunction<OrderWide>("DIM_BASE_TRADEMARK") {
                @Override
                public void join(OrderWide orderWide, JSONObject jsonObject) throws Exception {
                    orderWide.setTm_name(jsonObject.getString("TM_NAME"));
                }

                @Override
                public String getKey(OrderWide orderWide) {
                    return String.valueOf(orderWide.getTm_id());
                }
            }, 60, TimeUnit.SECONDS);

        //TODO 12.和类别维度进行关联
        SingleOutputStreamOperator<OrderWide> orderWideWithCategory3DS = AsyncDataStream.unorderedWait(
            orderWideWithTmDS,
            new DimAsyncFunction<OrderWide>("DIM_BASE_CATEGORY3") {
                @Override
                public void join(OrderWide orderWide, JSONObject jsonObject) throws Exception {
                    orderWide.setCategory3_name(jsonObject.getString("NAME"));
                }

                @Override
                public String getKey(OrderWide orderWide) {
                    return String.valueOf(orderWide.getCategory3_id());
                }
            }, 60, TimeUnit.SECONDS);

        //TODO 13.和SPU类别维度进行关联
        SingleOutputStreamOperator<OrderWide> orderWideWithSpuDS = AsyncDataStream.unorderedWait(
            orderWideWithCategory3DS,
            new DimAsyncFunction<OrderWide>("DIM_SPU_INFO") {
                @Override
                public void join(OrderWide orderWide, JSONObject jsonObject) throws Exception {
                    orderWide.setSpu_name(jsonObject.getString("SPU_NAME"));
                }

                @Override
                public String getKey(OrderWide orderWide) {
                    return String.valueOf(orderWide.getSpu_id());
                }
            }, 60, TimeUnit.SECONDS);

        orderWideWithSpuDS.print(">>>");

        //TODO 14.将宽表数据写到kafka的dwm_order_wide
        orderWideWithSpuDS
            .map(orderWide->JSON.toJSONString(orderWide))
            .addSink(MyKafkaUtil.getKafkaSink("dwm_order_wide"));

        env.execute();
    }
}
