package com.atguigu.gmall.realtime.app.dws;

import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import com.atguigu.gmall.realtime.app.BaseApp;
import com.atguigu.gmall.realtime.bean.TradeSkuOrderBean;
import com.atguigu.gmall.realtime.common.Constant;
import com.atguigu.gmall.realtime.util.AtguiguUtil;
import com.atguigu.gmall.realtime.util.DimUtil;
import com.atguigu.gmall.realtime.util.JdbcUtil;
import com.atguigu.gmall.realtime.util.RedisUtil;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.common.functions.ReduceFunction;
import org.apache.flink.api.common.functions.RichMapFunction;
import org.apache.flink.api.common.state.ValueState;
import org.apache.flink.api.common.state.ValueStateDescriptor;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.KeyedProcessFunction;
import org.apache.flink.streaming.api.functions.windowing.ProcessWindowFunction;
import org.apache.flink.streaming.api.windowing.assigners.TumblingEventTimeWindows;
import org.apache.flink.streaming.api.windowing.time.Time;
import org.apache.flink.streaming.api.windowing.windows.TimeWindow;
import org.apache.flink.util.Collector;
import redis.clients.jedis.Jedis;

import java.math.BigDecimal;
import java.sql.Connection;
import java.time.Duration;

/**
 * @Author lzc
 * @Date 2023/1/8 08:26
 */
public class Dws_09_TradeSkuOrderBean_Cache extends BaseApp {
    public static void main(String[] args) {
        new Dws_09_TradeSkuOrderBean_Cache().init(
            4009,
            2,
            "Dws_09_TradeSkuOrderBean_Cache",
            Constant.TOPIC_DWD_TRADE_ORDER_DETAIL
        );
    }
    
    @Override
    public void handle(StreamExecutionEnvironment env,
                       DataStreamSource<String> stream) {
        // 1. 先把数据封装到 pojo 中
        SingleOutputStreamOperator<TradeSkuOrderBean> beanStream = parseToPojo(stream);
        
        // 2. 按照详情 id 去重
        SingleOutputStreamOperator<TradeSkuOrderBean> distinctedStream = distinctByOrderDetailId(beanStream);
        // 3. 开窗聚合
        SingleOutputStreamOperator<TradeSkuOrderBean> streamWithoutDims = windowAndAgg(distinctedStream);
        // 4. 补充维度信息
        joinDims(streamWithoutDims);
        
        // 5. 写出到 ClickHouse 中
        
    }
    
    private void joinDims(SingleOutputStreamOperator<TradeSkuOrderBean> streamWithoutDims) {
        /*
        补充维度信息:
            sql 中:
                lookup join
            流:
                每来一条数据, 然后去使用相应的客户端, 去查找维度表
                
        维度表需要:
            sku_info:  sku_id ->  sku_name  spu_id tm_id c3_id
            spu_info:   spu_id->sku_name
            base_trademark: tm_id -> tm_name
            base_category3: c3_id -> c3_name c2_id
            base_category2: c2_id -> c2_name c1_id
            base_category1: c1_id -> c1_name
       
         */
        streamWithoutDims
            .map(new RichMapFunction<TradeSkuOrderBean, TradeSkuOrderBean>() {
    
                private Jedis jedis;
                private Connection conn;
                
                @Override
                public void open(Configuration parameters) throws Exception {
                    conn = JdbcUtil.getPhoenixConnection();
    
                    jedis = RedisUtil.getRedisClient();
                }
    
                @Override
                public void close() throws Exception {
                    JdbcUtil.closeConnection(conn);
                    
                    // 从连接池获取的客户端, close 是归还
                    // 如果是手动 new 出来, 则才是关闭
                    jedis.close();
                }
    
                @Override
                public TradeSkuOrderBean map(TradeSkuOrderBean bean) throws Exception {
                    // 1. sku_info
                    JSONObject skuInfo = DimUtil.readDim(jedis, conn, "dim_sku_info", bean.getSkuId());
                    bean.setSkuName(skuInfo.getString("SKU_NAME"));
                    bean.setSpuId(skuInfo.getString("SPU_ID"));
                    bean.setTrademarkId(skuInfo.getString("TM_ID"));
                    bean.setCategory3Id(skuInfo.getString("CATEGORY3_ID"));
                    
                    // 2. spu_info
                    JSONObject spuInfo = DimUtil.readDim(jedis, conn, "dim_spu_info", bean.getSpuId());
                    bean.setSpuName(spuInfo.getString("SPU_NAME"));
                    
                    // 3. tm
                    JSONObject tm = DimUtil.readDim(jedis, conn, "dim_base_trademark", bean.getTrademarkId());
                    bean.setTrademarkName(tm.getString("TM_NAME"));
                    
                    // 4. c3
                    JSONObject c3 = DimUtil.readDim(jedis, conn, "dim_base_category3", bean.getCategory3Id());
                    bean.setCategory3Name(c3.getString("NAME"));
                    bean.setCategory2Id(c3.getString("CATEGORY2_ID"));
                    
                    // 5. c2
                    JSONObject c2 = DimUtil.readDim(jedis, conn, "dim_base_category2", bean.getCategory2Id());
                    bean.setCategory2Name(c2.getString("NAME"));
                    bean.setCategory1Id(c2.getString("CATEGORY1_ID"));
                    
                    // 6. c1
                    JSONObject c1 = DimUtil.readDim(jedis, conn, "dim_base_category1", bean.getCategory1Id());
                    bean.setCategory1Name(c1.getString("NAME"));
                    
                    
                    return bean;
                }
            })
            .print();
    }
    
    private SingleOutputStreamOperator<TradeSkuOrderBean> windowAndAgg(
        SingleOutputStreamOperator<TradeSkuOrderBean> distinctedStream) {
        return distinctedStream
            .assignTimestampsAndWatermarks(
                WatermarkStrategy
                    .<TradeSkuOrderBean>forBoundedOutOfOrderness(Duration.ofSeconds(3))
                    .withTimestampAssigner((bean, ts) -> bean.getTs())
                    .withIdleness(Duration.ofSeconds(60))
            
            )
            .keyBy(TradeSkuOrderBean::getSkuId)
            .window(TumblingEventTimeWindows.of(Time.seconds(5)))
            .reduce(
                new ReduceFunction<TradeSkuOrderBean>() {
                    @Override
                    public TradeSkuOrderBean reduce(TradeSkuOrderBean value1,
                                                    TradeSkuOrderBean value2) throws Exception {
                        value1.setOriginalAmount(value1.getOriginalAmount().add(value2.getOriginalAmount()));
                        value1.setActivityAmount(value1.getActivityAmount().add(value2.getActivityAmount()));
                        value1.setCouponAmount(value1.getCouponAmount().add(value2.getCouponAmount()));
                        value1.setOrderAmount(value1.getOrderAmount().add(value2.getOrderAmount()));
                        return value1;
                    }
                },
                new ProcessWindowFunction<TradeSkuOrderBean, TradeSkuOrderBean, String, TimeWindow>() {
                    @Override
                    public void process(String key,
                                        Context ctx,
                                        Iterable<TradeSkuOrderBean> elements,
                                        Collector<TradeSkuOrderBean> out) throws Exception {
                        TradeSkuOrderBean bean = elements.iterator().next();
                        
                        bean.setStt(AtguiguUtil.tsToDateTime(ctx.window().getStart()));
                        bean.setEdt(AtguiguUtil.tsToDateTime(ctx.window().getEnd()));
                        
                        bean.setTs(System.currentTimeMillis());
                        
                        out.collect(bean);
                    }
                }
            );
    }
    
    private SingleOutputStreamOperator<TradeSkuOrderBean> distinctByOrderDetailId(
        SingleOutputStreamOperator<TradeSkuOrderBean> beanStream) {
        return beanStream
            .keyBy(TradeSkuOrderBean::getOrderDetailId)
            .process(new KeyedProcessFunction<String, TradeSkuOrderBean, TradeSkuOrderBean>() {
                
                private ValueState<TradeSkuOrderBean> beanState;
                
                @Override
                public void open(Configuration parameters) throws Exception {
                    beanState = getRuntimeContext().getState(new ValueStateDescriptor<TradeSkuOrderBean>("bean", TradeSkuOrderBean.class));
                }
                
                @Override
                public void processElement(TradeSkuOrderBean currentBean,
                                           Context ctx,
                                           Collector<TradeSkuOrderBean> out) throws Exception {
                    
                    TradeSkuOrderBean lastBean = beanState.value();
                    // 1. 把这个数据发送到下游
                    // 1.1 如果是第一条, 直接发
                    // 1.2 如果不是第一条则需要用新数据,减去状态中的数据
                    if (lastBean == null) {
                        out.collect(currentBean);
                    } else { // 不是第一条
                        // 新值减旧值
                        lastBean.setOriginalAmount(currentBean.getOriginalAmount().subtract(lastBean.getOriginalAmount()));
                        lastBean.setActivityAmount(currentBean.getActivityAmount().subtract(lastBean.getActivityAmount()));
                        lastBean.setCouponAmount(currentBean.getCouponAmount().subtract(lastBean.getCouponAmount()));
                        lastBean.setOrderAmount(currentBean.getOrderAmount().subtract(lastBean.getOrderAmount()));
                        
                        out.collect(lastBean);
                    }
                    
                    // 2. 先把数据存入到状态中
                    beanState.update(currentBean);
                    
                }
            });
    }
    
    private SingleOutputStreamOperator<TradeSkuOrderBean> parseToPojo(DataStreamSource<String> stream) {
        return stream
            .map(new MapFunction<String, TradeSkuOrderBean>() {
                @Override
                public TradeSkuOrderBean map(String value) throws Exception {
                    JSONObject obj = JSON.parseObject(value);
                    return TradeSkuOrderBean.builder()
                        .orderDetailId(obj.getString("id"))
                        .skuId(obj.getString("sku_id"))
                        .originalAmount(obj.getBigDecimal("split_original_amount"))
                        .orderAmount(obj.getBigDecimal("split_total_amount"))
                        .activityAmount(obj.getBigDecimal("split_activity_amount") == null ? new BigDecimal(0) : obj.getBigDecimal("split_activity_amount"))
                        .couponAmount(obj.getBigDecimal("split_coupon_amount") == null ? new BigDecimal(0) : obj.getBigDecimal("split_coupon_amount"))
                        .ts(obj.getLong("ts") * 1000)
                        .build();
                }
            });
    }
}
/*
-----------------------
把读到的维度数据存入到内存中,等到下次使用同一个维度的时候,可以提高查询的速度

flink 的状态
    优点:
        1. 是本地内存, 读写速度极快
        2. 数据结构也比较丰富
        
    缺点:
        1. 数据会占据 flink 的内存, 影响 flink 的计算.  解决: 加内存
        2. 当维度发生更新的时候, 缓存中的维度不能及时更新. 解决: 无法解决.

redis(旁路缓存)
    优点:
        1. 数据结构也比较丰富  string list set map zset
        2. 当维度发生更新的时候, 缓存中的维度可以及时更新(DimApp)
    
    缺点:
        1. redis 是外置的缓存, 每次访问需要通过网络, 效率比较本地内要差
        2. 如果状态过大, 也会占用过多的内存 解决: 加内存
-----------
redis中数据结构的选择:
string
    优点:
        key                     value
        表名+id                  json 格式的字符串
        
        dim_sku_info:1          {"id": ...,"name": ..., "":"",...}
        dim_sku_info:1          {"id": ...,"name": ..., "":"",...}
        
      1.读写方便
      2.非常方便的给每条数据设置 ttl
    
    缺点:
        key 会非常的多, 不方便关联.
        
        key 越多, 与其他的 key 产生冲突的概率就变大
        
        解决:可以把维度数据的缓存单独存储到某个库


list
    key                 value
    表名                 [json格式化字符串, json 格式字符串,...]
    dim_sku_info        [{..}, {...}, {...},...]
    
    好处:
        key 比较少, 方便管理.
    坏处:
        读不方便(每次需要读取这个张表所有数据, 然后遍历), 写方便
        
        没有办法单独给每条数据设置 ttl

set
    去重的功能

map(hash)

    key             field           value
    表名              id              json 格式字符串
    dim_sku_info      1            {...}
                     1            {...}
    
     好处:
        读写方便
        一张表一个 key,方便管理
        
    坏处:
        没有办法单独给每条数据设置 ttl

zset
    ...



------------------------

rm -rf /opt/module/maxwell-1.27.1/lib/mysql-connector-java-8.0.17.jar
cp /opt/module/hive-3.1.2/lib/mysql-connector-java-5.1.27-bin.jar  /opt/module/maxwell-1.27.1/lib/

 */