package com.atguigu.realtime.app.dws;

import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import com.atguigu.realtime.app.BaseApp;
import com.atguigu.realtime.bean.TradeSkuOrderBean;
import com.atguigu.realtime.common.Constant;
import com.atguigu.realtime.function.DimMapFunction;
import com.atguigu.realtime.util.AtguiguUtil;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.common.functions.ReduceFunction;
import org.apache.flink.api.common.state.StateTtlConfig;
import org.apache.flink.api.common.state.ValueState;
import org.apache.flink.api.common.state.ValueStateDescriptor;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.KeyedProcessFunction;
import org.apache.flink.streaming.api.functions.windowing.ProcessWindowFunction;
import org.apache.flink.streaming.api.windowing.assigners.TumblingEventTimeWindows;
import org.apache.flink.streaming.api.windowing.time.Time;
import org.apache.flink.streaming.api.windowing.windows.TimeWindow;
import org.apache.flink.util.Collector;

import java.math.BigDecimal;
import java.time.Duration;

/**
 * @Author lzc
 * @Date 2023/3/20 09:44
 */
public class Dws_09_DwsTradeSkuOrderWindow extends BaseApp {
    public static void main(String[] args) {
        new Dws_09_DwsTradeSkuOrderWindow().init(
            4009,
            2,
            "Dws_09_DwsTradeSkuOrderWindow",
            Constant.TOPIC_DWD_TRADE_ORDER_DETAIL
        );
    }
    
    @Override
    public void handle(StreamExecutionEnvironment env,
                       DataStreamSource<String> stream) {
        // 1. 封装到 pojo 中
        SingleOutputStreamOperator<TradeSkuOrderBean> beanStream = parseToPojo(stream);
        // 2. 按照 order_detail_id 去重
        beanStream = distinctByOrderDetailId(beanStream);
        // 3. 分组开窗聚合
        SingleOutputStreamOperator<TradeSkuOrderBean> beanStreamWithoutDims = windowAndAgg(beanStream);
    
        // 4. 补充维度信息
        SingleOutputStreamOperator<TradeSkuOrderBean> beanStreamWithDims = joinDims(beanStreamWithoutDims);
        beanStreamWithDims.print();
    
        // 5. 写出到 clickhouse 中
    }
    
    private SingleOutputStreamOperator<TradeSkuOrderBean> joinDims(SingleOutputStreamOperator<TradeSkuOrderBean> beanStreamWithoutDims) {
        /*
        补充维度:
            sql 中: 使用 lookup join 进行补充
            
            流中: 只能自己手动补充
         */
        SingleOutputStreamOperator<TradeSkuOrderBean> skuInfoStream = beanStreamWithoutDims
            .map(new DimMapFunction<TradeSkuOrderBean>() {
                @Override
                public String getTable() {
                    return "dim_sku_info";
                }
    
                @Override
                public String getId(TradeSkuOrderBean bean) {
                    return bean.getSkuId();
                }
    
                @Override
                public void addDim(TradeSkuOrderBean bean, JSONObject dim) {
                    // {"ID": "", "SPU_ID": "", ...}
                    bean.setSkuName(dim.getString("SKU_NAME"));
                    bean.setSpuId(dim.getString("SPU_ID"));
                    bean.setTrademarkId(dim.getString("TM_ID"));
                    bean.setCategory3Id(dim.getString("CATEGORY3_ID"));
                }
            });
    
    
        SingleOutputStreamOperator<TradeSkuOrderBean> spuInfoStream = skuInfoStream
            .map(new DimMapFunction<TradeSkuOrderBean>() {
                @Override
                public String getTable() {
                    return "dim_spu_info";
                }
            
                @Override
                public String getId(TradeSkuOrderBean bean) {
                    return bean.getSpuId();
                }
            
                @Override
                public void addDim(TradeSkuOrderBean bean, JSONObject dim) {
                    // {"ID": "", "SPU_ID": "", ...}
                    bean.setSpuName(dim.getString("SPU_NAME"));
                    
                }
            });
    
        SingleOutputStreamOperator<TradeSkuOrderBean> tmStream = spuInfoStream
            .map(new DimMapFunction<TradeSkuOrderBean>() {
                @Override
                public String getTable() {
                    return "dim_base_trademark";
                }
            
                @Override
                public String getId(TradeSkuOrderBean bean) {
                    return bean.getTrademarkId();
                }
            
                @Override
                public void addDim(TradeSkuOrderBean bean, JSONObject dim) {
                    // {"ID": "", "SPU_ID": "", ...}
                    bean.setTrademarkName(dim.getString("TM_NAME"));
                
                }
            });
    
        SingleOutputStreamOperator<TradeSkuOrderBean> c3Stream = tmStream
            .map(new DimMapFunction<TradeSkuOrderBean>() {
                @Override
                public String getTable() {
                    return "dim_base_category3";
                }
            
                @Override
                public String getId(TradeSkuOrderBean bean) {
                    return bean.getCategory3Id();
                }
            
                @Override
                public void addDim(TradeSkuOrderBean bean, JSONObject dim) {
                    // {"ID": "", "SPU_ID": "", ...}
                    bean.setCategory3Name(dim.getString("NAME"));
                    bean.setCategory2Id(dim.getString("CATEGORY2_ID"));
                
                }
            });
    
        SingleOutputStreamOperator<TradeSkuOrderBean> c2Stream = c3Stream
            .map(new DimMapFunction<TradeSkuOrderBean>() {
                @Override
                public String getTable() {
                    return "dim_base_category2";
                }
            
                @Override
                public String getId(TradeSkuOrderBean bean) {
                    return bean.getCategory2Id();
                }
            
                @Override
                public void addDim(TradeSkuOrderBean bean, JSONObject dim) {
                    // {"ID": "", "SPU_ID": "", ...}
                    bean.setCategory2Name(dim.getString("NAME"));
                    bean.setCategory1Id(dim.getString("CATEGORY1_ID"));
                
                }
            });
        
        return c2Stream
            .map(new DimMapFunction<TradeSkuOrderBean>() {
                @Override
                public String getTable() {
                    return "dim_base_category1";
                }
        
                @Override
                public String getId(TradeSkuOrderBean bean) {
                    return bean.getCategory1Id();
                }
        
                @Override
                public void addDim(TradeSkuOrderBean bean, JSONObject dim) {
                    // {"ID": "", "SPU_ID": "", ...}
                    bean.setCategory1Name(dim.getString("NAME"));
            
                }
            });
    }
    
    private SingleOutputStreamOperator<TradeSkuOrderBean> windowAndAgg(SingleOutputStreamOperator<TradeSkuOrderBean> beanStream) {
      return  beanStream
            .assignTimestampsAndWatermarks(
                WatermarkStrategy
                    .<TradeSkuOrderBean>forBoundedOutOfOrderness(Duration.ofSeconds(3))
                    .withTimestampAssigner((bean, ts) -> bean.getTs())
                    .withIdleness(Duration.ofSeconds(60))
            )
            .keyBy(TradeSkuOrderBean::getSkuId)
            .window(TumblingEventTimeWindows.of(Time.seconds(5)))
            .reduce(
                new ReduceFunction<TradeSkuOrderBean>() {
                    @Override
                    public TradeSkuOrderBean reduce(TradeSkuOrderBean value1,
                                                    TradeSkuOrderBean value2) throws Exception {
                        value1.setOrderAmount(value1.getOrderAmount().add(value2.getOrderAmount()));
                        value1.setOriginalAmount(value1.getOriginalAmount().add(value2.getOriginalAmount()));
                        value1.setActivityAmount(value1.getActivityAmount().add(value2.getActivityAmount()));
                        value1.setCouponAmount(value1.getCouponAmount().add(value2.getCouponAmount()));
                        return value1;
                    }
                },
                new ProcessWindowFunction<TradeSkuOrderBean, TradeSkuOrderBean, String, TimeWindow>() {
                    @Override
                    public void process(String skuId,
                                        Context ctx,
                                        Iterable<TradeSkuOrderBean> elements,
                                        Collector<TradeSkuOrderBean> out) throws Exception {
                        TradeSkuOrderBean bean = elements.iterator().next();
                        
                        bean.setStt(AtguiguUtil.tsToDateTime(ctx.window().getStart()));
                        bean.setEdt(AtguiguUtil.tsToDateTime(ctx.window().getEnd()));
    
                        bean.setTs(System.currentTimeMillis());
    
    
                        out.collect(bean);
                    }
                }
            );
    }
    
    private SingleOutputStreamOperator<TradeSkuOrderBean> distinctByOrderDetailId(SingleOutputStreamOperator<TradeSkuOrderBean> beanStream) {
       return beanStream
            .keyBy(TradeSkuOrderBean::getOrderDetailId)
            .process(new KeyedProcessFunction<String, TradeSkuOrderBean, TradeSkuOrderBean>() {
    
                private ValueState<TradeSkuOrderBean> state;
    
                @Override
                public void open(Configuration parameters) throws Exception {
                    ValueStateDescriptor<TradeSkuOrderBean> desc =
                        new ValueStateDescriptor<>("beanState", TradeSkuOrderBean.class);
                    StateTtlConfig ttlConfig = new StateTtlConfig.Builder(org.apache.flink.api.common.time.Time.seconds(10))
                        .updateTtlOnCreateAndWrite()
                        .build();
                    
                    desc.enableTimeToLive(ttlConfig);
                    state = getRuntimeContext().getState(desc);
                }
    
                @Override
                public void processElement(TradeSkuOrderBean currentBean,
                                           Context ctx,
                                           Collector<TradeSkuOrderBean> out) throws Exception {
                    TradeSkuOrderBean lastBean = state.value();
                    if (lastBean == null) { // 当前详情 id 第一条数据
                        out.collect(currentBean);
//                        state.update(currentBean);
                    }else{ // 不是第一条
                        // 用新的数据, 减去状态中的数据, 输出
                        lastBean.setOriginalAmount(currentBean.getOriginalAmount().subtract(lastBean.getOriginalAmount()));
                        lastBean.setActivityAmount(currentBean.getActivityAmount().subtract(lastBean.getActivityAmount()));
                        lastBean.setCouponAmount(currentBean.getCouponAmount().subtract(lastBean.getCouponAmount()));
                        lastBean.setOrderAmount(currentBean.getOrderAmount().subtract(lastBean.getOrderAmount()));
                        
                        out.collect(lastBean);
                        // 把新的数据存入到状态中
//                        state.update(currentBean);
                    }
                    state.update(currentBean);
                }
            });
    }
    
    private SingleOutputStreamOperator<TradeSkuOrderBean> parseToPojo(DataStreamSource<String> stream) {
      return  stream.map(new MapFunction<String, TradeSkuOrderBean>() {
            @Override
            public TradeSkuOrderBean map(String json) throws Exception {
                JSONObject obj = JSON.parseObject(json);
                return TradeSkuOrderBean.builder()
                    .skuId(obj.getString("sku_id"))
                    .originalAmount(obj.getBigDecimal("split_original_amount"))
                    .activityAmount(obj.getBigDecimal("split_activity_amount") == null ? new BigDecimal(0) : obj.getBigDecimal("split_activity_amount"))
                    .couponAmount(obj.getBigDecimal("split_coupon_amount") == null ? new BigDecimal(0) : obj.getBigDecimal("split_coupon_amount"))
                    .orderAmount(obj.getBigDecimal("split_total_amount"))
                    .ts(obj.getLong("ts") * 1000)
                    .orderDetailId(obj.getString("id"))
                    .build();
            }
            
        });
    }
}
/*
交易域SKU粒度下单各窗口汇总表
    source: dwd 层的下单明细
    sku_id 是聚合的维度   keyBy: sku_id
    指标:  下单原始总金额
           下单分摊总金额
           优惠券减免
           活动减免
-------------
1. 读取dwd 层的下单明细

2. 按照详情去重

    什么时候重复:
        order_detail join order_info left join detail_act left join detail_cou lookup join dic
        由于详情表有 left join, 所以我们消费的时候会有重复数据
        
        order_detail_id   sku_id    分摊总金额   原始总金额     活动指标   优惠券指标
            1                2       100         200         null     null    -> 详情表先来(左表)
            null
            1               2        100         200          10       null   -> 对应详情的活动来了
            null
            1               2        100         200          10       20      -> 对应详情的优惠券来了

        用普通的 kafka 消费的时候, 会读到 3 条数据, 这三条数据他们会进入到同一个窗口中(他们的 ts 是一样的)
        
        将来按照 sku_id 聚会额时候, 会发线数据被重复计算了
        
        所以需要去重!
     如何去重?
        同一个order_detail_id的最后一条数据是最完整的, 拥有所有的数据.应该保留这条
        
        1. 使用窗口
            给每条数据添加一个时间, 表示数据生成的时间
            order_detail_id   sku_id    分摊总金额   原始总金额     活动指标   优惠券指标    时间戳
                1                2       100         200         null     null          0-> 详情表先来(左表)
                null
                1               2        100         200          10       null         1-> 对应详情的活动来了
                null
                1               2        100         200          10       20           2-> 对应详情的优惠券来了
            
            按照order_detail_id 分组, 然后开 5s 的事件时间窗口. 等到窗口关闭的时候, 所有数据全部到达.
            按照时间戳排序, 最大时间戳就是我们想要的数据
            
            也可以用处理时间窗口: 用 session 窗口
                gap 设置 5s
                
                当窗口关闭的时候, 所有的数据都到了.
                
                最后一条来了之后, 5 session 窗口才会关闭
                
                时效向比较低: 最后到了 5s 之后, 才会输出结果
                
                最后一条+5s 输出结果
        
        2. 使用定时器
            当第一条数据来了之后, 注册一个 5s 的定时器.
                第一条来了, 把数据存储到状态中, 以后每来一条, 都比较下, 把时间戳最大的保留下来
                
             5s 之后, 所有的数据来齐了, 这个时候状态中存储的一定是时间戳最大的,就是我们想要的
                实效性: 第一条数据的时间+5s后输出结果
                
        3.  补偿法(选择这个)
            order_detail_id   sku_id    分摊总金额   原始总金额     活动指标   优惠券指标
                1                2       100         200         null     null
                1               2        100         200          10       null
                1               2        100         200          10       20
                
              --------
               1                2        100         200            0     0     -> 存储到状态中   . 然后直接输出
               1                2        100-100     200-200      10-0    0-0   -> 输出聚合结果 把第二条的原始数据存入到状态中
               1                2        100-100     200-200      10-10   20-0   -> 同时
               
               
               
            
              优点: 实效性高
              ----
              曾经:(写放大)
              1                2       100         200         0     0 输出
              1                2       -100         -200         0     0 输出上一条的相反数
              1               2        100         200          10       0 第二条
              1               2        -100         -200       -10       0 第二条相反
              1               2        100         200          10       20 第三条
              
         4. 如果需要的指标都在左表, 则直接输出第一条即可(或者右表的数据根本就没有任何用处)

2. 把数据封装到一个 pojo 中

3. 分组(keyBy: sku_id), 开窗, 聚合

5. 根据 sku_id 补充一些需要的维度
    复杂: 涉及到优化
        旁路缓存
        异步

4. 写出到 clickhouse


 */