package com.atguigu.gmall.realtime.app.dws;

import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import com.atguigu.gmall.realtime.app.func.DimAsyncFunction;
import com.atguigu.gmall.realtime.bean.TradeSkuOrderBean;
import com.atguigu.gmall.realtime.util.DateFormatUtil;
import com.atguigu.gmall.realtime.util.MyClickHouseUtil;
import com.atguigu.gmall.realtime.util.MyKafkaUtil;
import com.atguigu.gmall.realtime.util.TimestampLtz3CompareUtil;
import org.apache.commons.lang3.StringUtils;
import org.apache.flink.api.common.eventtime.SerializableTimestampAssigner;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.common.functions.ReduceFunction;
import org.apache.flink.api.common.state.StateTtlConfig;
import org.apache.flink.api.common.state.ValueState;
import org.apache.flink.api.common.state.ValueStateDescriptor;
import org.apache.flink.api.common.time.Time;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.streaming.api.datastream.*;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.KeyedProcessFunction;
import org.apache.flink.streaming.api.functions.windowing.WindowFunction;
import org.apache.flink.streaming.api.windowing.assigners.TumblingEventTimeWindows;
import org.apache.flink.streaming.api.windowing.windows.TimeWindow;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer;
import org.apache.flink.util.Collector;

import java.time.Duration;
import java.util.Collections;
import java.util.HashSet;
import java.util.concurrent.TimeUnit;

/**
 * @author Felix
 * @date 2022/8/7
 * 交易域  商品维度下单聚合统计
 * 需要启动的进程
 *      zk、kafka、maxwell、hdfs、hbase、redis、Clickhouse
 *      DwdTradeOrderPreProcess、DwdTradeOrderDetail、DwsTradeSkuOrderWindow
 * 开发流程
 *      基本环境准备
 *      检查点设置
 *      从kafka的下单主题中读取下单数据
 *      对读取的数据进行类型转换    jsonStr->jsonObj
 *      按照订单明细id进行分组
 *      去重
 *      对流中数据类型进行转换      jsonObj->实体类对象
 *      按照用户id进行分组
 *      判断是否独立用户
 *      指定watermark以及提取事件时间字段
 *      按照统计维度skuid进行分组
 *      开窗
 *      聚合计算
 *      和维度表进行关联
 *          基本维度关联实现
 *              PhoenixUtil----List<T> queryList(conn,sql,clz)
 *              DimUtil--------JSONObject getDimInfoNoCache(conn,tableName,Tuple2 ... params)
 *          旁路缓存
 *              DimUtil--------JSONObject getDimInfo(conn,tableName,Tuple2 ... params)
 *              选型：redis √        flink状态
 *              TTL: 1day
 *              思路:先从缓存中获取维度数据，如果缓存中存在，直接将维度数据返回(缓存命中);
 * 			        如果缓存中不存在要查找的维度数据，那么发送请求到phoenix表中查询维度，并将
 * 			        查询的结果放到缓存中缓存起来
 * 			    注意：如果业务系统中维度数据发生了变化，需要将缓存中对应的维度删除掉
 *          异步IO
 *              为什么要使用异步：
 *                  默认情况下，在使用map相关的算子处理流中元素的时候，在同一个并行度上，使用是同步的处理方式。
 *                  如果在flink程序中，要想提升处理能力，第一种方式是加大并行度，但是这种方式意味需要更多的硬件资源，不能无限制提升；
 *                  第二种方式，是使用异步
 *              如何使用
 *                  AsyncDataStream.[un]orderedWait(
 *                      流,
 *                      执行的异步操作 implements AsyncFunction,
 *                      超时时间,
 *                      时间单位
 *                  );
 *                  class DimAsyncFunction extends RichAsyncFunction{
 *                      asyncInvoke(){
 *                          开启多个线程，发送异步请求
 *                              根据流中对象获取要关联的维度主键
 *                              根据维度的主键获取维度对象
 *                              将维度对象的属性补充到流中的对象上
 *                      }
 *                  }
 *               模板方法设计模式
 *                  在父类中定义实现某一个功能的核心算法的骨架，具体的实现延迟到子类中去完成；
 *                  子类在不改变父类核心算法骨架前提下，每一个子类都可以有不同的实现方式。
 *
 */
public class DwsTradeSkuOrderWindow {
    public static void main(String[] args) throws Exception {
        //TODO 1.基本环境准备
        //1.1 指定流处理环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        //1.2 设置并行度
        env.setParallelism(4);
        //TODO 2.检查点相关设置(略)
        //TODO 3.从kakfa主题中读取数据
        //3.1 声明消费的主题以及消费者组
        String topic = "dwd_trade_order_detail";
        String groupId = "dws_trade_sku_order_group";
        //3.2 创建消费者对象
        FlinkKafkaConsumer<String> kafkaConsumer = MyKafkaUtil.getKafkaConsumer(topic, groupId);
        //3.3 消费数据 封装流
        DataStreamSource<String> kafkaStrDS = env.addSource(kafkaConsumer);

        //TODO 4.对读取的数据进行类型转换   jsonStr->jsonObj
        SingleOutputStreamOperator<JSONObject> jsonObjDS = kafkaStrDS.map(JSON::parseObject);
        //{"create_time":"2022-07-26 16:33:08","sku_num":"1","split_original_amount":"11.0000",
        // "sku_id":"24","date_id":"2022-07-26","source_type_name":"商品推广","user_id":"34",
        // "province_id":"31","source_type_code":"2402","row_op_ts":"2022-08-07 08:33:08.973Z",
        // "sku_name":"金沙河面条 原味银丝挂面 龙须面 方便速食拉面 清汤面 900g","id":"279","source_id":"72",
        // "order_id":"136","split_total_amount":"11.0","ts":"1659861188"}
        // jsonObjDS.print(">>>>");

        //TODO 5.按照唯一键(订单明细的id)进行分组   目的是去重外连接产生的重复数据
        KeyedStream<JSONObject, String> orderDetailIdKeyedDS = jsonObjDS.keyBy(jsonObj -> jsonObj.getString("id"));

        //TODO 6.使用flink的状态 + 定时器实现去重
        SingleOutputStreamOperator<JSONObject> distinctDS = orderDetailIdKeyedDS.process(
            new KeyedProcessFunction<String, JSONObject, JSONObject>() {
                private ValueState<JSONObject> lastJsonObjState;

                @Override
                public void open(Configuration parameters) throws Exception {
                    lastJsonObjState
                        = getRuntimeContext().getState(new ValueStateDescriptor<JSONObject>("lastJsonObjState", JSONObject.class));
                }

                @Override
                public void processElement(JSONObject jsonObj, Context ctx, Collector<JSONObject> out) throws Exception {
                    JSONObject lastJsonObj = lastJsonObjState.value();
                    if (lastJsonObj == null) {
                        lastJsonObjState.update(jsonObj);
                        //注册定时器
                        long currentProcessingTime = ctx.timerService().currentProcessingTime();
                        ctx.timerService().registerProcessingTimeTimer(currentProcessingTime + 5000L);
                    } else {
                        //"row_op_ts":"2022-08-07 08:33:08.973"
                        String lastRowOpTs = lastJsonObj.getString("row_op_ts");
                        String rowOpTs = jsonObj.getString("row_op_ts");
                        if (TimestampLtz3CompareUtil.compare(lastRowOpTs, rowOpTs) <= 0) {
                            lastJsonObjState.update(jsonObj);
                        }
                    }
                }

                @Override
                public void onTimer(long timestamp, OnTimerContext ctx, Collector<JSONObject> out) throws Exception {
                    JSONObject lastJsonObj = lastJsonObjState.value();
                    if (lastJsonObj != null) {
                        out.collect(lastJsonObj);
                    }
                    lastJsonObjState.clear();
                }
            }
        );

        //TODO 7.再次进行类型转换   jsonObj->统计的实体类对象
        SingleOutputStreamOperator<TradeSkuOrderBean> skuOrderDS = distinctDS.map(
            new MapFunction<JSONObject, TradeSkuOrderBean>() {
                @Override
                public TradeSkuOrderBean map(JSONObject jsonObj) throws Exception {
                    String orderId = jsonObj.getString("order_id");
                    String userId = jsonObj.getString("user_id");
                    String skuId = jsonObj.getString("sku_id");
                    Double splitOriginalAmount = jsonObj.getDouble("split_original_amount");
                    Double splitActivityAmount = jsonObj.getDouble("split_activity_amount");
                    Double splitCouponAmount = jsonObj.getDouble("split_coupon_amount");
                    Double splitTotalAmount = jsonObj.getDouble("split_total_amount");
                    Long ts = jsonObj.getLong("ts") * 1000L;

                    TradeSkuOrderBean tradeSkuOrderBean = TradeSkuOrderBean.builder()
                        .orderIdSet(new HashSet<String>(
                            Collections.singleton(orderId)
                        ))
                        .skuId(skuId)
                        .userId(userId)
                        .orderUuCount(0L)
                        .originalAmount(splitOriginalAmount)
                        .activityAmount(splitActivityAmount == null ? 0.0 : splitActivityAmount)
                        .couponAmount(splitCouponAmount == null ? 0.0 : splitCouponAmount)
                        .orderAmount(splitTotalAmount)
                        .ts(ts)
                        .build();
                    return tradeSkuOrderBean;
                }
            }
        );

        //TODO 8.按照用户的id进行分组        目的：记录下单独立用户
        KeyedStream<TradeSkuOrderBean, String> userIdKeyedDS = skuOrderDS.keyBy(TradeSkuOrderBean::getUserId);

        //TODO 9.判断是否为下单独立用户
        SingleOutputStreamOperator<TradeSkuOrderBean> uuSkuOrderDS = userIdKeyedDS.process(
            new KeyedProcessFunction<String, TradeSkuOrderBean, TradeSkuOrderBean>() {
                private ValueState<String> lastOrderDateState;

                @Override
                public void open(Configuration parameters) throws Exception {
                    ValueStateDescriptor<String> valueStateDescriptor
                        = new ValueStateDescriptor<>("lastOrderDateState", String.class);
                    valueStateDescriptor.enableTimeToLive(StateTtlConfig.newBuilder(Time.days(1)).build());
                    this.lastOrderDateState = getRuntimeContext().getState(valueStateDescriptor);
                }

                @Override
                public void processElement(TradeSkuOrderBean tradeSkuOrderBean, Context ctx, Collector<TradeSkuOrderBean> out) throws Exception {
                    String lastOrderDate = lastOrderDateState.value();
                    String curOrderDate = DateFormatUtil.toDate(tradeSkuOrderBean.getTs());
                    if (StringUtils.isEmpty(lastOrderDate) || !curOrderDate.equals(lastOrderDate)) {
                        tradeSkuOrderBean.setOrderUuCount(1L);
                        lastOrderDateState.update(curOrderDate);
                    }
                    out.collect(tradeSkuOrderBean);
                }
            }
        );


        //TODO 10.指定Watermark以及提取事件时间字段
        SingleOutputStreamOperator<TradeSkuOrderBean> withWatermarkDS = uuSkuOrderDS.assignTimestampsAndWatermarks(
            WatermarkStrategy
                .<TradeSkuOrderBean>forBoundedOutOfOrderness(Duration.ofSeconds(3))
                .withTimestampAssigner(
                    new SerializableTimestampAssigner<TradeSkuOrderBean>() {
                        @Override
                        public long extractTimestamp(TradeSkuOrderBean tradeSkuOrderBean, long recordTimestamp) {
                            return tradeSkuOrderBean.getTs();
                        }
                    }
                )
        );

        //TODO 11.分组        目的：是按照统计的维度进行分组，对分组的数据进行开窗聚合计算
        KeyedStream<TradeSkuOrderBean, String> skuIdKeyedDS = withWatermarkDS.keyBy(TradeSkuOrderBean::getSkuId);

        //TODO 12.开窗
        WindowedStream<TradeSkuOrderBean, String, TimeWindow> windowDS
            = skuIdKeyedDS.window(TumblingEventTimeWindows.of(org.apache.flink.streaming.api.windowing.time.Time.seconds(10)));

        //TODO 13.聚合计算
        SingleOutputStreamOperator<TradeSkuOrderBean> reduceDS = windowDS.reduce(
            new ReduceFunction<TradeSkuOrderBean>() {
                @Override
                public TradeSkuOrderBean reduce(TradeSkuOrderBean value1, TradeSkuOrderBean value2) throws Exception {
                    value1.getOrderIdSet().addAll(value2.getOrderIdSet());
                    value1.setOrderUuCount(value1.getOrderUuCount() + value2.getOrderUuCount());
                    value1.setOriginalAmount(value1.getOriginalAmount() + value2.getOriginalAmount());
                    value1.setActivityAmount(value1.getActivityAmount() + value2.getActivityAmount());
                    value1.setCouponAmount(value1.getCouponAmount() + value2.getCouponAmount());
                    value1.setOrderAmount(value1.getOrderAmount() + value2.getOrderAmount());
                    return value1;
                }
            },
            new WindowFunction<TradeSkuOrderBean, TradeSkuOrderBean, String, TimeWindow>() {
                @Override
                public void apply(String s, TimeWindow window, Iterable<TradeSkuOrderBean> input, Collector<TradeSkuOrderBean> out) throws Exception {
                    String stt = DateFormatUtil.toYmdHms(window.getStart());
                    String edt = DateFormatUtil.toYmdHms(window.getEnd());
                    for (TradeSkuOrderBean orderBean : input) {
                        orderBean.setStt(stt);
                        orderBean.setEdt(edt);
                        orderBean.setTs(System.currentTimeMillis());
                        orderBean.setOrderCount((long) orderBean.getOrderIdSet().size());
                        out.collect(orderBean);
                    }
                }
            }
        );

        // TradeSkuOrderBean(stt=2022-08-09 10:24:20, edt=2022-08-09 10:24:30, trademarkId=null, trademarkName=null, category1Id=null,
        // category1Name=null, category2Id=null, category2Name=null, category3Id=null, category3Name=null, orderIdSet=[144], userId=195,
        // skuId=27, skuName=null, spuId=null, spuName=null, orderUuCount=0, orderCount=1, originalAmount=387.0, activityAmount=39.0,
        // couponAmount=0.0, orderAmount=348.0, ts=1660011996795)
        // reduceDS.print(">>>>>");

        //TODO 14.和sku维度进行关联
        /*
        //维度关联基本实现
        SingleOutputStreamOperator<TradeSkuOrderBean> withSkuInfoDS = reduceDS.map(
            new RichMapFunction<TradeSkuOrderBean, TradeSkuOrderBean>() {
                private DruidDataSource dataSource;
                @Override
                public void open(Configuration parameters) throws Exception {
                    dataSource = DruidDSUtil.createDataSource();
                }

                @Override
                public TradeSkuOrderBean map(TradeSkuOrderBean orderBean) throws Exception {
                    //根据流中的对象获取要关联的维度主键
                    String skuId = orderBean.getSkuId();
                    Connection conn = null;
                    try {
                        //根据维度的主键获取维度对象
                        conn = dataSource.getConnection();
                        //ID,SPU_ID,PRICE,SKU_NAME,SKU_DESC,WEIGHT,TM_ID,CATEGORY3_ID,SKU_DEFAULT_IMG,IS_SALE,CREATE_TIME
                        JSONObject dimInfoJsonObj = DimUtil.getDimInfo(conn, "DIM_SKU_INFO", Tuple2.of("id", skuId));
                        //补充维度相关的属性到流中的对象上
                        orderBean.setSkuName(dimInfoJsonObj.getString("SKU_NAME"));
                        orderBean.setTrademarkId(dimInfoJsonObj.getString("TM_ID"));
                        orderBean.setCategory3Id(dimInfoJsonObj.getString("CATEGORY3_ID"));
                        orderBean.setSpuId(dimInfoJsonObj.getString("SPU_ID"));
                    } catch (SQLException e) {
                        e.printStackTrace();
                    } finally {
                        if(conn != null){
                            conn.close();
                        }
                    }
                    return orderBean;
                }
            }
        );

        withSkuInfoDS.print(">>>>>");*/
        // 将异步I/O操作应用于DataStream作为DataStream的一次转换操作
            // 实现分发请求的 AsyncFunction
        SingleOutputStreamOperator<TradeSkuOrderBean> withSkuInfoDS = AsyncDataStream.unorderedWait(
            reduceDS,
            new DimAsyncFunction<TradeSkuOrderBean>("DIM_SKU_INFO") {
                @Override
                public void join(TradeSkuOrderBean orderBean, JSONObject dimInfoJsonObj) {
                    orderBean.setSkuName(dimInfoJsonObj.getString("SKU_NAME"));
                    orderBean.setTrademarkId(dimInfoJsonObj.getString("TM_ID"));
                    orderBean.setCategory3Id(dimInfoJsonObj.getString("CATEGORY3_ID"));
                    orderBean.setSpuId(dimInfoJsonObj.getString("SPU_ID"));
                }

                @Override
                public String getKey(TradeSkuOrderBean orderBean) {
                    return orderBean.getSkuId();
                }
            },
            60, TimeUnit.SECONDS
        );

        // withSkuInfoDS.print(">>>");


        //TODO 15.和spu维度进行关联
        SingleOutputStreamOperator<TradeSkuOrderBean> withSpuInfoDS = AsyncDataStream.unorderedWait(
            withSkuInfoDS,
            new DimAsyncFunction<TradeSkuOrderBean>("DIM_SPU_INFO") {
                @Override
                public void join(TradeSkuOrderBean orderBean, JSONObject dimInfoJsonObj) {
                    orderBean.setSpuName(dimInfoJsonObj.getString("SPU_NAME"));
                }

                @Override
                public String getKey(TradeSkuOrderBean orderBean) {
                    return orderBean.getSpuId();
                }
            },
            60, TimeUnit.SECONDS
        );
        //TODO 16.和tm维度进行关联
        SingleOutputStreamOperator<TradeSkuOrderBean> withTmDS = AsyncDataStream.unorderedWait(
            withSpuInfoDS,
            new DimAsyncFunction<TradeSkuOrderBean>("DIM_BASE_TRADEMARK") {
                @Override
                public void join(TradeSkuOrderBean orderBean, JSONObject dimInfoJsonObj) {
                    orderBean.setTrademarkName(dimInfoJsonObj.getString("TM_NAME"));
                }

                @Override
                public String getKey(TradeSkuOrderBean orderBean) {
                    return orderBean.getTrademarkId();
                }
            },
            60, TimeUnit.SECONDS
        );
        //TODO 17.和category3维度进行关联
        SingleOutputStreamOperator<TradeSkuOrderBean> withCategory3Stream = AsyncDataStream.unorderedWait(
            withTmDS,
            new DimAsyncFunction<TradeSkuOrderBean>("dim_base_category3".toUpperCase()) {
                @Override
                public void join(TradeSkuOrderBean javaBean, JSONObject jsonObj) {
                    javaBean.setCategory3Name(jsonObj.getString("name".toUpperCase()));
                    javaBean.setCategory2Id(jsonObj.getString("category2_id".toUpperCase()));
                }

                @Override
                public String getKey(TradeSkuOrderBean javaBean) {
                    return javaBean.getCategory3Id();
                }
            },
            5 * 60, TimeUnit.SECONDS
        );

        //TODO 18.和category2维度进行关联
        SingleOutputStreamOperator<TradeSkuOrderBean> withCategory2Stream = AsyncDataStream.unorderedWait(
            withCategory3Stream,
            new DimAsyncFunction<TradeSkuOrderBean>("dim_base_category2".toUpperCase()) {
                @Override
                public void join(TradeSkuOrderBean javaBean, JSONObject jsonObj){
                    javaBean.setCategory2Name(jsonObj.getString("name".toUpperCase()));
                    javaBean.setCategory1Id(jsonObj.getString("category1_id".toUpperCase()));
                }

                @Override
                public String getKey(TradeSkuOrderBean javaBean) {
                    return javaBean.getCategory2Id();
                }
            },
            5 * 60, TimeUnit.SECONDS
        );

        //TODO 19.和category1维度进行关联
        SingleOutputStreamOperator<TradeSkuOrderBean> withCategory1Stream = AsyncDataStream.unorderedWait(
            withCategory2Stream,
            new DimAsyncFunction<TradeSkuOrderBean>("dim_base_category1".toUpperCase()) {
                @Override
                public void join(TradeSkuOrderBean javaBean, JSONObject jsonObj) {
                    javaBean.setCategory1Name(jsonObj.getString("name".toUpperCase()));
                }

                @Override
                public String getKey(TradeSkuOrderBean javaBean) {
                    return javaBean.getCategory1Id();
                }
            },
            5 * 60, TimeUnit.SECONDS
        );

        //TODO 20.将关联的结果  写到ClickHouse表中
        withCategory1Stream.print(">>>");
        withCategory1Stream.addSink(
            MyClickHouseUtil.getSinkFunction("insert into dws_trade_sku_order_window values(?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?)")
        );

        env.execute();
    }
}
