package com.atguigu.gmall.realtime.app.dwm;

import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import com.atguigu.gmall.realtime.app.func.DimAsyncFunction;
import com.atguigu.gmall.realtime.beans.OrderDetail;
import com.atguigu.gmall.realtime.beans.OrderInfo;
import com.atguigu.gmall.realtime.beans.OrderWide;
import com.atguigu.gmall.realtime.utils.MyKafkaUtil;
import org.apache.flink.api.common.eventtime.SerializableTimestampAssigner;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.api.common.functions.RichMapFunction;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.streaming.api.datastream.AsyncDataStream;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.KeyedStream;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.co.ProcessJoinFunction;
import org.apache.flink.streaming.api.windowing.time.Time;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer;
import org.apache.flink.util.Collector;

import java.text.SimpleDateFormat;
import java.time.Duration;
import java.util.concurrent.TimeUnit;

/**
 * Author: Felix
 * Date: 2022/4/8
 * Desc: 订单宽表
 * 需要启动的进程
 *      zk、kafka、maxwell、hdfs、hbase、Redis、BaseDBApp、OrderWideApp
 * 执行流程
 *      提前：对历史维度数据进行处理  将维度历史数据通过maxwell-bootstrap同步到phoenix维度表中
 *
 *      模拟生成业务数据jar包运行
 *      生成订单以及订单明细数据  并保存到业务数据库MySQL
 *      binlog会记录业务数据库表的变化
 *      maxwell会将binlog中变化的数据采集到并发送到ods_base_db_m主题中
 *      BaseDBApp读取ds_base_db_m主题数据，并根据配置表进行动态分流
 *          -事实数据(主流)  发送到kafka的dwd层主题中
 *          -维度数据(侧输出流)  保存到phoenix对应的维度表中
 *      OrderWideApp从kafka的dwd层读取订单和订单明细数据并进行双流join
 *          订单流
 *              .intervalJoin(订单明细流)
 *              .between(下界,上界)
 *              .process(orderInfo + orderDetail==>OrderWide)
 *      用双流join之后的数据和维度进行关联
 *          基本实现
 *              PhoenixUtil
 *                  List<T> queryList(String sql,Class<T> clz)
 *              DimUtil
 *                  JSONObject getDimInfoNoCache(String tableName,Tuple2<String,String> ... params)
 *          优化1：旁路缓存
 *              思路：先从缓存中获取维度数据，如果缓存中存在，直接将缓存中的数据返回；
 *                  如果缓存中不存在要查询的维度数据，那么发送请求到phoenix表中查询，并将查询的结果放到缓存中缓存起来
 *              分析：
 *                  缓存产品
 *                      状态(性能好，操作不方便)      Redis(性能也不错，操作方便)
 *                  Redis类型
 *                      String
 *                  Redis的key
 *                      dim:维度表表名:主键值1_主键值2
 *                  Redis的失效时间
 *                      1day
 *                  如果业务数据库中维度数据发生了变化，清除Redis中缓存的数据
 *
 *          优化2：异步IO
 *              Flink程序在对流中数据进行处理的时候，可以通过设置并行度提升并行处理能力，但是更多的并行度意味着个呢个多的硬件资源，不可能无限制的提升
 *              在有限的并行度下，在和外部系统交互的时候，我们需要考虑使用异步IO
 *              异步IO：在单个并行度上，通过开启多线程的方式，发送异步请求。
 *              AsyncDataStream.[un]OrderedWait(
 *                  流,
 *                  异步操作 implements AsyncFunction,
 *                  超时时间,
 *                  时间单位
 *              );
 *
 *              自定义DimAsyncFunction extends RichAsyncFunction{
 *                  open(){
 *                      获取线程池对象
 *                  }
 *                  asyncInvoke(){
 *                      从线程池中获取线程并执行，完成维度关联的操作
 *                       在当前父类中定义了完成维度关联这个操作的具体实现步骤，但是我们没有办法给出具体的实现，实现应该由子类完成----模板方法设计模式
 *                          1.根据流中的对象获取维度的主键
 *                          2.根据主键获取维度对象
 *                          3.将维度对象的属性值补充到流中对象属性上
 *                  }
 *              }
 *     最后将订单宽表数据写到kakfa主题dwm_order_wide
 */
public class OrderWideApp {
    public static void main(String[] args) throws Exception {
        //TODO 1.基本环境的准备
        //1.1 指定流处理环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        //1.2 设置并行度
        env.setParallelism(4);
        //TODO 2.检查点相关的设置(略)
        //TODO 3.从kafka中读取数据
        //3.1 声明消费的主题以及消费者组
        String orderInfoTopic = "dwd_order_info";
        String orderDetailTopic = "dwd_order_detail";
        String groupId = "order_wide_group";
        //3.2 创建消费者对象
        FlinkKafkaConsumer<String> orderInfoKafkaSource = MyKafkaUtil.getKafkaSource(orderInfoTopic, groupId);
        FlinkKafkaConsumer<String> orderDetailKafkaSource = MyKafkaUtil.getKafkaSource(orderDetailTopic, groupId);
        //3.3 消费数据 封装为流
        DataStreamSource<String> orderInfoStrDS = env.addSource(orderInfoKafkaSource);
        DataStreamSource<String> orderDetailStrDS = env.addSource(orderDetailKafkaSource);

        //orderInfoStrDS.print(">>>>>");
        //orderDetailStrDS.print("#####");

        //TODO 4.对读取的数据 进行类型转换  jsonStr -> 实体类对象
        //订单
        SingleOutputStreamOperator<OrderInfo> orderInfoDS = orderInfoStrDS.map(
            new RichMapFunction<String, OrderInfo>() {
                private SimpleDateFormat sdf;

                @Override
                public void open(Configuration parameters) throws Exception {
                    sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
                }

                @Override
                public OrderInfo map(String jsonStr) throws Exception {
                    OrderInfo orderInfo = JSON.parseObject(jsonStr, OrderInfo.class);
                    orderInfo.setCreate_ts(sdf.parse(orderInfo.getCreate_time()).getTime());
                    return orderInfo;
                }
            }
        );
        //订单明细
        SingleOutputStreamOperator<OrderDetail> orderDetailDS = orderDetailStrDS.map(
            new RichMapFunction<String, OrderDetail>() {
                private SimpleDateFormat sdf;

                @Override
                public void open(Configuration parameters) throws Exception {
                    sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
                }

                @Override
                public OrderDetail map(String jsonStr) throws Exception {
                    OrderDetail orderDetail = JSON.parseObject(jsonStr, OrderDetail.class);
                    orderDetail.setCreate_ts(sdf.parse(orderDetail.getCreate_time()).getTime());
                    return orderDetail;
                }
            }
        );

        //TODO 5.指定Watermark以及提取事件时间字段
        //订单
        SingleOutputStreamOperator<OrderInfo> orderInfoWithWatermarkDS = orderInfoDS.assignTimestampsAndWatermarks(
            WatermarkStrategy
                .<OrderInfo>forBoundedOutOfOrderness(Duration.ofSeconds(3))
                .withTimestampAssigner(
                    new SerializableTimestampAssigner<OrderInfo>() {
                        @Override
                        public long extractTimestamp(OrderInfo orderInfo, long recordTimestamp) {
                            return orderInfo.getCreate_ts();
                        }
                    }
                )
        );
        //订单明细
        SingleOutputStreamOperator<OrderDetail> orderDetailWithWatermarkDS = orderDetailDS.assignTimestampsAndWatermarks(
            WatermarkStrategy
                .<OrderDetail>forBoundedOutOfOrderness(Duration.ofSeconds(3))
                .withTimestampAssigner(
                    new SerializableTimestampAssigner<OrderDetail>() {
                        @Override
                        public long extractTimestamp(OrderDetail orderDetail, long recordTimestamp) {
                            return orderDetail.getCreate_ts();
                        }
                    }
                )
        );
        //TODO 6.通过keyby进行分组    指定两条流的连接字段 order_id
        //订单
        KeyedStream<OrderInfo, Long> orderInfoKeyedDS = orderInfoWithWatermarkDS.keyBy(OrderInfo::getId);
        //订单明细
        KeyedStream<OrderDetail, Long> orderDetailKeyedDS = orderDetailWithWatermarkDS.keyBy(OrderDetail::getOrder_id);

        //TODO 7.使用intervaljoin对订单和订单明细进行连接
        SingleOutputStreamOperator<OrderWide> joinDS = orderInfoKeyedDS
            .intervalJoin(orderDetailKeyedDS)
            .between(Time.seconds(-5), Time.seconds(5))
            .process(
                new ProcessJoinFunction<OrderInfo, OrderDetail, OrderWide>() {
                    @Override
                    public void processElement(OrderInfo orderInfo, OrderDetail orderDetail, Context ctx, Collector<OrderWide> out) throws Exception {
                        out.collect(new OrderWide(orderInfo, orderDetail));
                    }
                }
            );
        //joinDS.print(">>>>");

        //TODO 8.和用户维度进行关联
        /*SingleOutputStreamOperator<OrderWide> orderWideWithUserInfoDS = joinDS.map(
            new MapFunction<OrderWide, OrderWide>() {
                @Override
                public OrderWide map(OrderWide orderWide) throws Exception {
                    //根据订单宽表对象获取要关联的用户的主键id
                    String userId = orderWide.getUser_id().toString();
                    //根据用户的id到用户维度表中查询对应的记录
                    JSONObject userInfoJsonObj = DimUtil.getDimInfoNoCache("dim_user_info", Tuple2.of("ID", userId));

                    String gender = userInfoJsonObj.getString("GENDER");
                    String birthday = userInfoJsonObj.getString("BIRTHDAY");
                    SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd");
                    Long birthDayTime = sdf.parse(birthday).getTime();
                    Long currentTimeMillis = System.currentTimeMillis();
                    Long ageLong = (currentTimeMillis - birthDayTime) / 1000 / 60 / 60 / 24 / 365;

                    orderWide.setUser_gender(gender);
                    orderWide.setUser_age(ageLong.intValue());
                    return orderWide;
                }
            }
        );*/

        //将异步I/O操作应用于DataStream作为DataStream的一次转换操作
        SingleOutputStreamOperator<OrderWide> orderWideWithUserInfoDS = AsyncDataStream.unorderedWait(
            joinDS,
            //对流中的元素进行异步处理，该异步处理需要 实现分发请求的AsyncFunction
            new DimAsyncFunction<OrderWide>("DIM_USER_INFO") {
                @Override
                public void join(OrderWide orderWide, JSONObject userInfoJsonObj) throws Exception {
                    String gender = userInfoJsonObj.getString("GENDER");
                    String birthday = userInfoJsonObj.getString("BIRTHDAY");
                    SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd");
                    Long birthDayTime = sdf.parse(birthday).getTime();
                    Long currentTimeMillis = System.currentTimeMillis();
                    Long ageLong = (currentTimeMillis - birthDayTime) / 1000 / 60 / 60 / 24 / 365;

                    orderWide.setUser_gender(gender);
                    orderWide.setUser_age(ageLong.intValue());
                }

                @Override
                public String getKey(OrderWide orderWide) {
                    return orderWide.getUser_id().toString();
                }
            },
            60,
            TimeUnit.SECONDS
        );

        //orderWideWithUserInfoDS.print(">>>>");

        //TODO 9.和地区维度进行关联
        SingleOutputStreamOperator<OrderWide> orderWideWithProvinceInfoDS = AsyncDataStream.unorderedWait(
            orderWideWithUserInfoDS,
            new DimAsyncFunction<OrderWide>("DIM_BASE_PROVINCE") {
                @Override
                public void join(OrderWide orderWide, JSONObject provinceInfoJsonObj) throws Exception {
                    //ID,NAME,REGION_ID,AREA_CODE,ISO_CODE,ISO_3166_2
                    orderWide.setProvince_name(provinceInfoJsonObj.getString("NAME"));
                    orderWide.setProvince_area_code(provinceInfoJsonObj.getString("AREA_CODE"));
                    orderWide.setProvince_iso_code(provinceInfoJsonObj.getString("ISO_CODE"));
                    orderWide.setProvince_3166_2_code(provinceInfoJsonObj.getString("ISO_3166_2"));
                }

                @Override
                public String getKey(OrderWide orderWide) {
                    return orderWide.getProvince_id().toString();
                }
            },
            60, TimeUnit.SECONDS
        );
        //orderWideWithProvinceInfoDS.print(">>>>");

        //TODO 10.和商品维度进行关联
        SingleOutputStreamOperator<OrderWide> orderWideWithSkuInfoDS = AsyncDataStream.unorderedWait(
            orderWideWithProvinceInfoDS,
            new DimAsyncFunction<OrderWide>("DIM_SKU_INFO") {
                @Override
                public void join(OrderWide orderWide, JSONObject skuInfoJsonObj) throws Exception {
                    //ID,SPU_ID,PRICE,SKU_NAME,SKU_DESC,WEIGHT,TM_ID,CATEGORY3_ID,SKU_DEFAULT_IMG,IS_SALE,CREATE_TIME
                    orderWide.setSku_name(skuInfoJsonObj.getString("SKU_NAME"));
                    orderWide.setSpu_id(skuInfoJsonObj.getLong("SPU_ID"));
                    orderWide.setCategory3_id(skuInfoJsonObj.getLong("CATEGORY3_ID"));
                    orderWide.setTm_id(skuInfoJsonObj.getLong("TM_ID"));
                }

                @Override
                public String getKey(OrderWide orderWide) {
                    return orderWide.getSku_id().toString();
                }
            },
            60, TimeUnit.SECONDS
        );
        //orderWideWithSkuInfoDS.print(">>>>");

        //TODO 11.和SPU维度进行关联
        SingleOutputStreamOperator<OrderWide> orderWideWithSpuDS = AsyncDataStream.unorderedWait(
            orderWideWithSkuInfoDS, new DimAsyncFunction<OrderWide>("DIM_SPU_INFO") {
                @Override
                public void join(OrderWide orderWide, JSONObject jsonObject) throws Exception {
                    orderWide.setSpu_name(jsonObject.getString("SPU_NAME"));
                }

                @Override
                public String getKey(OrderWide orderWide) {
                    return String.valueOf(orderWide.getSpu_id());
                }
            }, 60, TimeUnit.SECONDS);

        //TODO 12.和类别维度进行关联
        SingleOutputStreamOperator<OrderWide> orderWideWithCategory3DS = AsyncDataStream.unorderedWait(
            orderWideWithSpuDS, new DimAsyncFunction<OrderWide>("DIM_BASE_CATEGORY3") {
                @Override
                public void join(OrderWide orderWide, JSONObject jsonObject) throws Exception {
                    orderWide.setCategory3_name(jsonObject.getString("NAME"));
                }

                @Override
                public String getKey(OrderWide orderWide) {
                    return String.valueOf(orderWide.getCategory3_id());
                }
            }, 60, TimeUnit.SECONDS);

        //TODO 13.和品牌维度进行关联
        SingleOutputStreamOperator<OrderWide> orderWideWithTmDS = AsyncDataStream.unorderedWait(
            orderWideWithCategory3DS, new DimAsyncFunction<OrderWide>("DIM_BASE_TRADEMARK") {
                @Override
                public void join(OrderWide orderWide, JSONObject jsonObject) throws Exception {
                    orderWide.setTm_name(jsonObject.getString("TM_NAME"));
                }

                @Override
                public String getKey(OrderWide orderWide) {
                    return String.valueOf(orderWide.getTm_id());
                }
            }, 60, TimeUnit.SECONDS);

        orderWideWithTmDS.print(">>>>");

        //TODO 14.将订单宽表数据写到kafka的主题dwm_order_wide中
        orderWideWithTmDS
            .map(orderWide->JSON.toJSONString(orderWide))
            .addSink(MyKafkaUtil.getKafkaSink("dwm_order_wide"));


        env.execute();
    }
}
