package com.atguigu.gmall.realtime.app.dwm;

import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import com.atguigu.gmall.realtime.app.func.DimAsyncFunction;
import com.atguigu.gmall.realtime.beans.OrderDetail;
import com.atguigu.gmall.realtime.beans.OrderInfo;
import com.atguigu.gmall.realtime.beans.OrderWide;
import com.atguigu.gmall.realtime.utils.MyKafkaUtil;
import org.apache.flink.api.common.eventtime.SerializableTimestampAssigner;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.api.common.functions.RichMapFunction;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.streaming.api.datastream.AsyncDataStream;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.KeyedStream;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.co.ProcessJoinFunction;
import org.apache.flink.streaming.api.windowing.time.Time;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer;
import org.apache.flink.util.Collector;

import java.text.SimpleDateFormat;
import java.time.Duration;
import java.util.Date;
import java.util.concurrent.TimeUnit;

/**
 * Author: Felix
 * Date: 2022/3/17
 * Desc: 订单宽表准备
 * 功能
 *      订单和订单明细双流join
 *      双流join的结果和维度进行关联
 * 需要启动的进程
 *      zk、kafka、maxwell、hdfs、hbase、Redis、BaseDBApp、OrderWideApp
 * 开发流程
 *      基本环境准备
 *      检查点相关设置
 *      从Kafka中读取订单和订单明细数据
 *      对读取的数据进行类型的转换  jsonStr->实体类对象
 *      指定Watermark以及提取事件时间字段
 *      通过keyby对两条流进行分组---指定连接字段 order_id
 *      使用intervalJoin进行连接
 *          keyedA.intervalJoin(keyedB).between(下边界,上边界).process(orderInfo + orderDetail ==>OrderWide)
 *      双流join的结果和维度进行关联
 *          基本维度关联的实现
 *              PhoenixUtil --- List<T> queryList(String sql,Class<T> clz)
 *              DimUtil     --- JSONObject getInfoNocache(String dimTableName,Tuple2<String,String>...params)
 *          优化1：旁路缓存
 *              先从缓存中查询维度数据，如果缓存中查到了，直接将维度返回；如果缓存中没有查到维度数据，那么再发送请求到Phoenix表
 *              中查询维度数据，并将查询的结果放到缓存中
 *              注意：
 *                  1.缓存产品
 *                      状态   -- 性能好，操作不方便，只能在当前进程中操作
 *                      Redis -- 性能也说得过去，操作方便
 *                  2.为什么在业务数据动态分流的时候没有直接用Redis
 *                      考虑的是维度数据常驻内存，对内存有压力
 *                  3.在维度关联的时候为什么又要将维度数据放到Redis中，不怕给内存压力了吗
 *                      设置缓存到Redis中维度数据的失效时间
 *                  4.业务数据库中，维度数据发生了变化，将Redis中缓存的数据清除
 *          优化2：异步IO
 *              在与外部系统交互（用数据库中的数据扩充流数据）的时候，需要考虑与外部系统的通信延迟对整个流处理应用的影响。
 *              简单地访问外部数据库的数据，比如使用 MapFunction，通常意味着同步交互： MapFunction 向数据库发送一个
 *              请求然后一直等待，直到收到响应。在许多情况下，等待占据了函数运行的大部分时间。
 *              注意： 仅仅提高 MapFunction 的并行度（parallelism）在有些情况下也可以提升吞吐量，但是这样做通常会导致非常高的资源消耗
 *              Flink提供了对流中的元素进行异步处理的API
 *              AsyncDataStream.[un]orderedWait(
 *                  流,
 *                  异步操作
 *                  DimAsyncFunction extends RichAsyncFunction{
 *                      open(){
 *                          获取线程池-----通过双重校验锁解决单例设计模式懒汉式线程安全的问题
 *                      }
 *                      asyncInvoke(){
 *                          发送异步请求，完成异步操作
 *                          **模板方法设计模式：在父类中定义了完成维度关联这件事执行步骤[核心算法骨架]
 *                          getKey --- 抽象
 *                          getDimInfo(key)
 *                          join(dimJsonObj,obj) ---抽象
 *                          将结果向下游传递  resultFuture.complete(Collections.singleton(obj));
 *                      }
 *                  },
 *                  超时时间,
 *                  时间单位
 *              )
 *
 */
public class OrderWideApp {
    public static void main(String[] args) throws Exception {
        //TODO 1.基本环境准备
        //1.1 指定流处理环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        //1.2 设置并行度
        env.setParallelism(4);
        //TODO 2.检查点相关设置(略)
        //TODO 3.从kafka中读取数据
        //3.1 声明消费的主题以及消费者组
        String orderInfoTopic = "dwd_order_info";
        String orderDetailTopic = "dwd_order_detail";
        String groupId = "order_wide_groupId";
        //3.2 创建消费者对象
        FlinkKafkaConsumer<String> orderInfoKafkaSource = MyKafkaUtil.getKafkaSource(orderInfoTopic, groupId);
        FlinkKafkaConsumer<String> orderDetailKafkaSource = MyKafkaUtil.getKafkaSource(orderDetailTopic, groupId);
        //3.3 消费数据  封装为流
        DataStreamSource<String> orderInfoStrDS = env.addSource(orderInfoKafkaSource);
        DataStreamSource<String> orderDetailStrDS = env.addSource(orderDetailKafkaSource);
        //orderInfoStrDS.print(">>>");
        //orderDetailStrDS.print("###");

        //TODO 4.对流中数据进行类型转换    jsonStr->实体类对象
        //订单
        SingleOutputStreamOperator<OrderInfo> orderInfoDS = orderInfoStrDS.map(
            new RichMapFunction<String, OrderInfo>() {
                private SimpleDateFormat sdf;

                @Override
                public void open(Configuration parameters) throws Exception {
                    sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
                }

                @Override
                public OrderInfo map(String jsonStr) throws Exception {
                    OrderInfo orderInfo = JSON.parseObject(jsonStr, OrderInfo.class);
                    orderInfo.setCreate_ts(sdf.parse(orderInfo.getCreate_time()).getTime());
                    return orderInfo;
                }
            }
        );
        //明细
        SingleOutputStreamOperator<OrderDetail> orderDetailDS = orderDetailStrDS.map(
            new RichMapFunction<String, OrderDetail>() {
                private SimpleDateFormat sdf;

                @Override
                public void open(Configuration parameters) throws Exception {
                    sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
                }

                @Override
                public OrderDetail map(String jsonStr) throws Exception {
                    OrderDetail orderDetail = JSON.parseObject(jsonStr, OrderDetail.class);
                    orderDetail.setCreate_ts(sdf.parse(orderDetail.getCreate_time()).getTime());
                    return orderDetail;
                }
            }
        );
        //TODO 5.指定Watermark以及提取事件时间字段
        //订单
        SingleOutputStreamOperator<OrderInfo> orderInfoWithWatermarkDS = orderInfoDS.assignTimestampsAndWatermarks(
            WatermarkStrategy
                .<OrderInfo>forBoundedOutOfOrderness(Duration.ofSeconds(3))
                .withTimestampAssigner(
                    new SerializableTimestampAssigner<OrderInfo>() {
                        @Override
                        public long extractTimestamp(OrderInfo orderInfo, long recordTimestamp) {
                            return orderInfo.getCreate_ts();
                        }
                    }
                )
        );
        //订单明细
        SingleOutputStreamOperator<OrderDetail> orderDetailWithWatermarkDS = orderDetailDS.assignTimestampsAndWatermarks(
            WatermarkStrategy
                .<OrderDetail>forBoundedOutOfOrderness(Duration.ofSeconds(3))
                .withTimestampAssigner(
                    new SerializableTimestampAssigner<OrderDetail>() {
                        @Override
                        public long extractTimestamp(OrderDetail orderDetail, long recordTimestamp) {
                            return orderDetail.getCreate_ts();
                        }
                    }
                )
        );
        //TODO 6.将订单和订单明细进行双流join----intervaljoin
        //6.1 对两条流按照mid进行分组
        KeyedStream<OrderInfo, Long> orderInfoKeyedDS = orderInfoWithWatermarkDS.keyBy(OrderInfo::getId);
        KeyedStream<OrderDetail, Long> orderDetailKeyedDS = orderDetailWithWatermarkDS.keyBy(OrderDetail::getOrder_id);
        //6.2 使用intervalJoin进行双流join
        SingleOutputStreamOperator<OrderWide> orderWideDS = orderInfoKeyedDS
            .intervalJoin(orderDetailKeyedDS)
            .between(Time.seconds(-5), Time.seconds(5))
            .process(
                new ProcessJoinFunction<OrderInfo, OrderDetail, OrderWide>() {
                    @Override
                    public void processElement(OrderInfo orderInfo, OrderDetail orderDetail, Context ctx, Collector<OrderWide> out) throws Exception {
                        out.collect(new OrderWide(orderInfo, orderDetail));
                    }
                }
            );
        //orderWideDS.print(">>>>");
        //TODO 7.和用户维度进行关联
        /*SingleOutputStreamOperator<OrderWide> orderWideWithUserInfoDS = orderWideDS.map(
            new MapFunction<OrderWide, OrderWide>() {
                @Override
                public OrderWide map(OrderWide orderWide) throws Exception {
                    //获取用户维度的id
                    String userId = orderWide.getUser_id().toString();
                    //根据用户维度的id 获取用户维度对象
                    JSONObject userInfoJsonObj = DimUtil.getDimInfo("dim_user_info", userId);
                    String gender = userInfoJsonObj.getString("GENDER");
                    String birthday = userInfoJsonObj.getString("BIRTHDAY");
                    SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd");
                    Date date = sdf.parse(birthday);

                    Long curTs = System.currentTimeMillis();
                    Long betweenMs = curTs - date.getTime();
                    Long ageLong = betweenMs / 1000L / 60L / 60L / 24L / 365L;
                    Integer age = ageLong.intValue();

                    orderWide.setUser_gender(gender);
                    orderWide.setUser_age(age);
                    return orderWide;
                }
            }
        );*/
        //将异步I/O操作应用于DataStream作为DataStream的一次转换操作
        SingleOutputStreamOperator<OrderWide> orderWideWithUserInfoDS = AsyncDataStream.unorderedWait(
            orderWideDS,
            //实现分发请求的 AsyncFunction
            new DimAsyncFunction<OrderWide>("DIM_USER_INFO") {
                @Override
                public void join(JSONObject userInfoJsonObj, OrderWide orderWide) throws Exception {
                    String gender = userInfoJsonObj.getString("GENDER");
                    String birthday = userInfoJsonObj.getString("BIRTHDAY");
                    SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd");
                    Date date = sdf.parse(birthday);

                    Long curTs = System.currentTimeMillis();
                    Long betweenMs = curTs - date.getTime();
                    Long ageLong = betweenMs / 1000L / 60L / 60L / 24L / 365L;
                    Integer age = ageLong.intValue();

                    orderWide.setUser_gender(gender);
                    orderWide.setUser_age(age);
                }

                @Override
                public String getKey(OrderWide orderWide) {
                    return orderWide.getUser_id().toString();
                }
            },
            60, TimeUnit.SECONDS
        );
        //orderWideWithUserInfoDS.print(">>>>>>");

        //TODO 8.和地区维度进行关联
        SingleOutputStreamOperator<OrderWide> orderWideWithProvinceInfoDS = AsyncDataStream.unorderedWait(
            orderWideWithUserInfoDS,
            new DimAsyncFunction<OrderWide>("DIM_BASE_PROVINCE") {
                @Override
                public void join(JSONObject provinceInfoJsonObj, OrderWide orderWide) throws Exception {
                    orderWide.setProvince_name(provinceInfoJsonObj.getString("NAME"));
                    orderWide.setProvince_area_code(provinceInfoJsonObj.getString("AREA_CODE"));
                    orderWide.setProvince_iso_code(provinceInfoJsonObj.getString("ISO_CODE"));
                    orderWide.setProvince_3166_2_code(provinceInfoJsonObj.getString("ISO_3166_2"));
                }

                @Override
                public String getKey(OrderWide orderWide) {
                    return orderWide.getProvince_id().toString();
                }
            },
            60, TimeUnit.SECONDS
        );
        //orderWideWithProvinceInfoDS.print(">>>>");

        //TODO 9.和商品SKU维度进行关联
        SingleOutputStreamOperator<OrderWide> orderWideWithSkuInfoDS = AsyncDataStream.unorderedWait(
            orderWideWithProvinceInfoDS,
            new DimAsyncFunction<OrderWide>("DIM_SKU_INFO") {
                @Override
                public void join(JSONObject skuInfoJsonObj, OrderWide orderWide) throws Exception {
                    orderWide.setSku_name(skuInfoJsonObj.getString("SKU_NAME"));
                    orderWide.setSpu_id(skuInfoJsonObj.getLong("SPU_ID"));
                    orderWide.setCategory3_id(skuInfoJsonObj.getLong("CATEGORY3_ID"));
                    orderWide.setTm_id(skuInfoJsonObj.getLong("TM_ID"));
                }

                @Override
                public String getKey(OrderWide orderWide) {
                    return orderWide.getSku_id().toString();
                }
            },
            60, TimeUnit.SECONDS
        );
        //orderWideWithSkuInfoDS.print(">>>>>");
        //TODO 10.和商品SPU维度进行关联
        SingleOutputStreamOperator<OrderWide> orderWideWithSpuInfoDS = AsyncDataStream.unorderedWait(
            orderWideWithSkuInfoDS,
            new DimAsyncFunction<OrderWide>("DIM_SPU_INFO") {
                @Override
                public void join(JSONObject spuInfoJsonObj, OrderWide orderWide) throws Exception {
                    orderWide.setSpu_name(spuInfoJsonObj.getString("SPU_NAME"));
                }

                @Override
                public String getKey(OrderWide orderWide) {
                    return orderWide.getSpu_id().toString();
                }
            },
            60, TimeUnit.SECONDS
        );
        //TODO 11.和类别维度进行关联
        SingleOutputStreamOperator<OrderWide> orderWideWithCategory3DS = AsyncDataStream.unorderedWait(
            orderWideWithSpuInfoDS,
            new DimAsyncFunction<OrderWide>("DIM_BASE_CATEGORY3") {
                @Override
                public void join(JSONObject category3InfoJsonObj, OrderWide orderWide) throws Exception {
                    orderWide.setCategory3_name(category3InfoJsonObj.getString("NAME"));
                }

                @Override
                public String getKey(OrderWide orderWide) {
                    return orderWide.getCategory3_id().toString();
                }
            },
            60, TimeUnit.SECONDS
        );
        //TODO 12.和品牌维度进行关联
        SingleOutputStreamOperator<OrderWide> orderWideWithTmDS = AsyncDataStream.unorderedWait(
            orderWideWithCategory3DS,
            new DimAsyncFunction<OrderWide>("DIM_BASE_TRADEMARK") {
                @Override
                public void join(JSONObject tmInfoJsonObj, OrderWide orderWide) throws Exception {
                    orderWide.setTm_name(tmInfoJsonObj.getString("TM_NAME"));
                }

                @Override
                public String getKey(OrderWide orderWide) {
                    return orderWide.getTm_id().toString();
                }
            },
            60, TimeUnit.SECONDS
        );

        orderWideWithTmDS.print(">>>>");

        //TODO 13.将关联之后的数据写到kafka的dwm_order_wide
        orderWideWithTmDS
            .map(orderWide->JSON.toJSONString(orderWide))
            .addSink(MyKafkaUtil.getKafkaSink("dwm_order_wide"));
        env.execute();
    }
}
