package com.atguigu.gmall.realtime.app.dws;

import com.alibaba.druid.pool.DruidPooledConnection;
import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import com.atguigu.gmall.realtime.app.BaseAppV1;
import com.atguigu.gmall.realtime.bean.TradeSkuOrderBean;
import com.atguigu.gmall.realtime.common.Constant;
import com.atguigu.gmall.realtime.util.AtguiguUtil;
import com.atguigu.gmall.realtime.util.DimUtil;
import com.atguigu.gmall.realtime.util.DruidDSUtil;
import com.atguigu.gmall.realtime.util.RedisUtil;
import org.apache.commons.beanutils.BeanUtils;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.common.functions.ReduceFunction;
import org.apache.flink.api.common.functions.RichMapFunction;
import org.apache.flink.api.common.state.ValueState;
import org.apache.flink.api.common.state.ValueStateDescriptor;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.KeyedProcessFunction;
import org.apache.flink.streaming.api.functions.windowing.ProcessWindowFunction;
import org.apache.flink.streaming.api.windowing.assigners.TumblingEventTimeWindows;
import org.apache.flink.streaming.api.windowing.time.Time;
import org.apache.flink.streaming.api.windowing.windows.TimeWindow;
import org.apache.flink.util.Collector;
import redis.clients.jedis.Jedis;

import java.math.BigDecimal;
import java.time.Duration;

/**
 * @Author lzc
 * @Date 2022/12/13 08:30
 */
public class Dws_09_DwsTradeSkuOrderWindow extends BaseAppV1 {
    public static void main(String[] args) {
        new Dws_09_DwsTradeSkuOrderWindow().init(
            4009,
            2,
            "Dws_09_DwsTradeSkuOrderWindow",
            Constant.TOPIC_DWD_TRADE_ORDER_DETAIL
        );
    }
    
    @Override
    protected void handle(StreamExecutionEnvironment env,
                          DataStreamSource<String> stream) {
        //        2. 解析成 pojo 类型
        SingleOutputStreamOperator<TradeSkuOrderBean> beanStream = parseToPojo(stream);
        //        3. 按照详情 id, 去重
        beanStream = distinctByOrderDetailId(beanStream);
        //        4. 分组,开窗, 聚和
        //        key: sku_id
        SingleOutputStreamOperator<TradeSkuOrderBean> beanStreamWithoutDims = windowAndAgg(beanStream);
        //        5. 补充维度信息
        //        sku spu  tm c3 c2 c1
        joinDims(beanStreamWithoutDims);
        
        //        优化: 缓存  异步
        //
        //        6. 写处到 clickhouse 中
    }
    
    private void joinDims(SingleOutputStreamOperator<TradeSkuOrderBean> stream) {
        stream
            .map(new RichMapFunction<TradeSkuOrderBean, TradeSkuOrderBean>() {
                
                private Jedis redisClient;
                private DruidPooledConnection conn;
                
                @Override
                public void open(Configuration parameters) throws Exception {
                    // 建立到 phoenix 的连接
                    conn = DruidDSUtil.getPhoenixConn();
                    
                    redisClient = RedisUtil.getRedisClient();
                }
                
                @Override
                public void close() throws Exception {
                    // 关闭到 phoenix 的连接
                    if (conn != null) {
                        conn.close();
                    }
                    if (redisClient != null) {
                        
                        redisClient.close();
                    }
                }
                
                @Override
                public TradeSkuOrderBean map(TradeSkuOrderBean bean) throws Exception {
                    // 补充维度信息
                    // 1. 补充 dim_sku_info     select * from dim_sku_info where id=sku_id
                    // {"SKU_ID": "a", "SKU_NAME":...}
                    JSONObject skuInfo = DimUtil.readDim(redisClient,conn, "dim_sku_info", bean.getSkuId());
                    bean.setSkuName(skuInfo.getString("SKU_NAME"));
                    
                    bean.setSpuId(skuInfo.getString("SPU_ID"));
                    bean.setTrademarkId(skuInfo.getString("TM_ID"));
                    bean.setCategory3Id(skuInfo.getString("CATEGORY3_ID"));
                    
                    // 2. spu_info
                    JSONObject spuInfo = DimUtil.readDim(redisClient,conn, "dim_spu_info", bean.getSpuId());
                    bean.setSpuName(spuInfo.getString("SPU_NAME"));
                    
                    // 3. base_trademark
                    JSONObject tm = DimUtil.readDim(redisClient,conn, "dim_base_trademark", bean.getTrademarkId());
                    bean.setTrademarkName(tm.getString("TM_NAME"));
                    
                    // 4. c3
                    JSONObject c3 = DimUtil.readDim(redisClient,conn, "dim_base_category3", bean.getCategory3Id());
                    bean.setCategory3Name(c3.getString("NAME"));
                    bean.setCategory2Id(c3.getString("CATEGORY2_ID"));
                    
                    // 5. c2
                    JSONObject c2 = DimUtil.readDim(redisClient,conn, "dim_base_category2", bean.getCategory2Id());
                    bean.setCategory2Name(c2.getString("NAME"));
                    bean.setCategory1Id(c2.getString("CATEGORY1_ID"));
                    
                    
                    // 6. c1
                    JSONObject c1 = DimUtil.readDim(redisClient,conn, "dim_base_category1", bean.getCategory1Id());
                    bean.setCategory1Name(c1.getString("NAME"));
                    
                    
                    return bean;
                }
            })
            .print();
    }
    
    private SingleOutputStreamOperator<TradeSkuOrderBean> windowAndAgg(
        SingleOutputStreamOperator<TradeSkuOrderBean> beanStream) {
        return beanStream
            .assignTimestampsAndWatermarks(
                WatermarkStrategy
                    .<TradeSkuOrderBean>forBoundedOutOfOrderness(Duration.ofSeconds(3))
                    .withTimestampAssigner((bean, ts) -> bean.getTs())
            )
            .keyBy(TradeSkuOrderBean::getSkuId)
            .window(TumblingEventTimeWindows.of(Time.seconds(5)))
            .reduce(
                new ReduceFunction<TradeSkuOrderBean>() {
                    @Override
                    public TradeSkuOrderBean reduce(TradeSkuOrderBean value1,
                                                    TradeSkuOrderBean value2) throws Exception {
                        value1.setOriginalAmount(value1.getOriginalAmount().add(value2.getOriginalAmount()));
                        value1.setOrderAmount(value1.getOrderAmount().add(value2.getOrderAmount()));
                        value1.setActivityAmount(value1.getActivityAmount().add(value2.getActivityAmount()));
                        value1.setCouponAmount(value1.getCouponAmount().add(value2.getCouponAmount()));
                        return value1;
                    }
                },
                new ProcessWindowFunction<TradeSkuOrderBean, TradeSkuOrderBean, String, TimeWindow>() {
                    @Override
                    public void process(String key,
                                        Context ctx,
                                        Iterable<TradeSkuOrderBean> elements,
                                        Collector<TradeSkuOrderBean> out) throws Exception {
                        TradeSkuOrderBean bean = elements.iterator().next();
                        bean.setStt(AtguiguUtil.toDatTime(ctx.window().getStart()));
                        bean.setEdt(AtguiguUtil.toDatTime(ctx.window().getEnd()));
                        
                        bean.setTs(System.currentTimeMillis());
                        
                        out.collect(bean);
                        
                    }
                }
            );
        
    }
    
    private SingleOutputStreamOperator<TradeSkuOrderBean> distinctByOrderDetailId(
        SingleOutputStreamOperator<TradeSkuOrderBean> beanStream) {
        return beanStream
            .keyBy(TradeSkuOrderBean::getOrderDetailId)
            .process(new KeyedProcessFunction<String, TradeSkuOrderBean, TradeSkuOrderBean>() {
                
                private ValueState<TradeSkuOrderBean> lastBeanState;
                
                @Override
                public void open(Configuration parameters) throws Exception {
                    lastBeanState = getRuntimeContext().getState(new ValueStateDescriptor<TradeSkuOrderBean>("lastBeanState", TradeSkuOrderBean.class));
                }
                
                @Override
                public void processElement(TradeSkuOrderBean bean,
                                           Context ctx,
                                           Collector<TradeSkuOrderBean> out) throws Exception {
                    TradeSkuOrderBean lastBean = lastBeanState.value();
                    
                    if (lastBean == null) {  // 同一个详情id 的第一条数据过来, 直接输出
                        out.collect(bean);
                        // 存入到状态中
                        lastBeanState.update(bean);
                    } else {
                        
                        TradeSkuOrderBean newBean = new TradeSkuOrderBean();
                        BeanUtils.copyProperties(newBean, bean);  // 把 bean 中所有属性的值复制到 newBean 中
                        lastBeanState.update(newBean);  // 更新修改前的数据
                        
                        // 第二条数据进来. bean如果直接修改,
                        bean.setOriginalAmount(bean.getOriginalAmount().subtract(lastBean.getOriginalAmount()));
                        bean.setActivityAmount(bean.getActivityAmount().subtract(lastBean.getActivityAmount()));
                        bean.setCouponAmount(bean.getCouponAmount().subtract(lastBean.getCouponAmount()));
                        bean.setOrderAmount(bean.getOrderAmount().subtract(lastBean.getOrderAmount()));
                        out.collect(bean);
                    }
                }
            });
    }
    
    private SingleOutputStreamOperator<TradeSkuOrderBean> parseToPojo(DataStreamSource<String> stream) {
        return stream.map(new MapFunction<String, TradeSkuOrderBean>() {
            @Override
            public TradeSkuOrderBean map(String value) throws Exception {
                JSONObject obj = JSON.parseObject(value);
                
                BigDecimal splitActivityAmount = obj.getBigDecimal("split_activity_amount");
                BigDecimal splitCouponAmount = obj.getBigDecimal("split_coupon_amount");
                
                return TradeSkuOrderBean.builder()
                    .skuId(obj.getString("sku_id"))
                    .originalAmount(obj.getBigDecimal("split_original_amount"))
                    .activityAmount(splitActivityAmount == null ? new BigDecimal(0) : splitActivityAmount)
                    .couponAmount(splitCouponAmount == null ? new BigDecimal(0) : splitCouponAmount)
                    .orderAmount(obj.getBigDecimal("split_total_amount"))
                    .ts(obj.getLong("ts") * 1000)
                    .orderDetailId(obj.getString("id"))
                    .build();
            }
        });
    }
}
/*
-------------
每次都需要查询数据库.
第一次查, 以后不要查询, 查缓存
1. 加缓存. 防止每次都去查询数据库.
    flink 内存(状态)
        优点: 本地内存, 读取快.
        
        缺点:
            1. 占用 flink 的内存. 导致内存不足.
                    增加内存解决
                    
            2. 当维度发生变化的时候, 缓存中对应的维度没有办法更新
                不能选择 flink 的做缓存
    
    
    外置缓存(redis)(旁路缓存)
        缺点:
            通过网络访问, 效率没有本地内存快
            
        优点:
            当维度发生变化的随后, 缓存可以及时更新
            
-----------------
redis 中数据结构的选择

string
key                     value
dim_sku_info:10         json 格式的字符串
优点:
    1. 读写方便
    2. 方便给每条维度设置 ttl
缺点:
    一个 id 一个 key, key 比较多.
        单独选一个库

list
key                     value
dim_sku_info            {json 格式字符串, jison 格式字符串,...}
优点:
    key少
    写方便 lpush rpush
缺点:
    读不方便:需要遍历 list 集合
    没有办法给个维度单独设置 ttl


set


hash(map)
key                 field           value
dim_sku_info          10            json 格式字符串
                    ....
                    
优点:
    key 少
    读写方便 hset hget
    
缺点:
    没有办法给单个维度单独设置 ttl


zset






-------------
SKU粒度下单各窗口汇总表
1. 读数据源
    dwd 层下单事务事实表
    
2. 解析成 pojo 类型

3. 按照详情 id, 去重

4. 分组,开窗, 聚会
       key: sku_id
       
5. 补充维度信息
       sku spu  tm c3 c2 c1
       
       优化: 缓存  异步
       
6. 写处到 clickhouse 中


------------
详情id    订单id      sku_id       原始金额   分摊金额     右表                       spu_name sku_nane c3_name
10          1         2             200     100        null       数据生成时间1
null(已经被处理过, 忽略)
10          1         2              200      100        有值      数据生成时间2

去重:
   按照详情 id 分组, 找到最后一个
   
   思路:
     1. 假设我们需要的那些度量值,都在左表. 右表其实没有用上,只保留第一条就可以了.
     
     2. 第一条来的时候, 注册一个定时器, 定时器 5s 后触发. 等到定时器触发的时候, 所有的数据已经到齐了
        找到时间最大的那个
            数据要等到第一个到了之后 5s 才能计算出来
            
     3. session 窗口. 重复会进入同一个 session 窗口. 找到时间最大的那个
            数据要等到最后一个来了之后 5s 才能计算出来
            
     4. 抵消法
     
     详情id    订单id      sku_id       原始金额   分摊金额     右表
     10          1         2             200     100        null
     
     第一个来了后, 直接输出. 这条数据存入到状态中  第一: 200 100  / 第二: 200 100 300
        详情id    订单id      sku_id       原始金额   分摊金额           右表
        10          1         2             200     100               0
        10          1         2             200-200      100-100     300-0
        10          1         2              200-200      100-100     300-300
   
 -------
 流中 join
    来一条使用 jdbc(其他客户端)去读
 
 sql 中 join
    lookup join
 */