package com.atguigu.realtime.app.dws;

import com.alibaba.druid.pool.DruidDataSource;
import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import com.atguigu.realtime.app.BaseAppV1;
import com.atguigu.realtime.bean.TradeSkuOrderBean;
import com.atguigu.realtime.common.Constant;
import com.atguigu.realtime.util.AtguiguUtil;
import com.atguigu.realtime.util.DimUtil;
import com.atguigu.realtime.util.DruidDSUtil;
import com.atguigu.realtime.util.RedisUtil;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.api.common.functions.ReduceFunction;
import org.apache.flink.api.common.functions.RichMapFunction;
import org.apache.flink.api.common.state.ValueState;
import org.apache.flink.api.common.state.ValueStateDescriptor;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.KeyedProcessFunction;
import org.apache.flink.streaming.api.functions.windowing.ProcessWindowFunction;
import org.apache.flink.streaming.api.windowing.assigners.TumblingEventTimeWindows;
import org.apache.flink.streaming.api.windowing.time.Time;
import org.apache.flink.streaming.api.windowing.windows.TimeWindow;
import org.apache.flink.util.Collector;
import redis.clients.jedis.Jedis;

import java.sql.Connection;
import java.time.Duration;

/**
 * @Author lzc
 * @Date 2022/7/26 15:14
 */
public class Dws_08_DwsTradeSkuOrderWindow_Cache extends BaseAppV1 {
    public static void main(String[] args) {
        new Dws_08_DwsTradeSkuOrderWindow_Cache().init(
            3008,
            2,
            "Dws_08_DwsTradeSkuOrderWindow_Cache",
            Constant.TOPIC_DWD_TRADE_ORDER_DETAIL
        );
    }
    
    @Override
    protected void handle(StreamExecutionEnvironment env,
                          DataStreamSource<String> stream) {
        // 1. 按照order_detail_id进程去重   新的
        SingleOutputStreamOperator<JSONObject> distinctedStream = distinctByOrderDetailId(stream);
        // 2. 把数据封装到pojo中
        SingleOutputStreamOperator<TradeSkuOrderBean> beanStream = parstToPojo(distinctedStream);
        // 3. 按照sku_id分组, 开窗聚合
        SingleOutputStreamOperator<TradeSkuOrderBean> beanStreamWithoutDim = windowAndAgg(beanStream);
        // 4. 补充维度信息   新的
        addDim(beanStreamWithoutDim).print();
        
        // 5. 数据写出到clickhouse中
        
    }
    
    private SingleOutputStreamOperator<TradeSkuOrderBean> addDim(SingleOutputStreamOperator<TradeSkuOrderBean> beanStreamWithoutDim) {
        /*
          维度在哪里?
            hbase+Phoenix
          怎么查:
             补充的产品相关的维度
             有sku_id, 根据sku_id补充其他维度
             
             1. 先补:去sku_info补充 sku_name   spu_id  tm_id  category3_id
                select * from sku_info where id=?
                
             2. 补tm_name
                   select * from base_trademark where id=?
                   
             3. 查询 base_category3, 补 category3_name category2_id
                select * from base_category3 where id=?
             
             4. 查base_category2, 补category2_name category1_id
             
                select * from base_category2 where id=?
             5. 查询base_category1, 补category1_name
                select * from base_category1 where id=?
             
         */
      return  beanStreamWithoutDim.map(new RichMapFunction<TradeSkuOrderBean, TradeSkuOrderBean>() {
    
          private Jedis redisClient;
          private Connection conn;
            
            @Override
            public void open(Configuration parameters) throws Exception {
                // 1. 获取Phoenix连接
                DruidDataSource dataSource = DruidDSUtil.createDataSource();
                conn = dataSource.getConnection();
    
                redisClient = RedisUtil.getRedisClient();
            }
            
            @Override
            public void close() throws Exception {
                // 关闭Phoenix连接
                if (conn != null) {
                    conn.close();
                }
    
                if (redisClient != null) {
                    redisClient.close();  // 如果是手动创建的客户端是关闭.
                    // 如果是从连接池获取的客户端, 这个是归还
                }
            }
            
            @Override
            public TradeSkuOrderBean map(TradeSkuOrderBean bean) throws Exception {
                // 1. 查询sku_info
                JSONObject skuInfo = DimUtil.readDim(redisClient, conn, "dim_sku_info", bean.getSkuId());
                // {"ID":"1", "SPU_ID:"10",....}
                bean.setSkuName(skuInfo.getString("SKU_NAME"));
                bean.setSpuId(skuInfo.getString("SPU_ID"));
                bean.setTrademarkId(skuInfo.getString("TM_ID"));
                bean.setCategory3Id(skuInfo.getString("CATEGORY3_ID"));
                
                // 2. 查询spu_info
                JSONObject spuInfo = DimUtil.readDim(redisClient,conn, "dim_spu_info", bean.getSpuId());
                bean.setSpuName(spuInfo.getString("SPU_NAME"));
                
                // 3. base_trademark
                JSONObject baseTrademark = DimUtil.readDim(redisClient,conn, "dim_base_trademark", bean.getTrademarkId());
                bean.setTrademarkName(baseTrademark.getString("TM_NAME"));
                
                // 4. c3
                JSONObject c3 = DimUtil.readDim(redisClient,conn, "dim_base_category3", bean.getCategory3Id());
                bean.setCategory3Name(c3.getString("NAME"));
                // 查询c2_id
                bean.setCategory2Id(c3.getString("CATEGORY2_ID"));
                
                // 5. c2
                JSONObject c2 = DimUtil.readDim(redisClient,conn, "dim_base_category2", bean.getCategory2Id());
                bean.setCategory2Name(c2.getString("NAME"));
                // 查询c1_id
                bean.setCategory1Id(c2.getString("CATEGORY1_ID"));
                // 6. c1
                JSONObject c1 = DimUtil.readDim(redisClient,conn, "dim_base_category1", bean.getCategory1Id());
                bean.setCategory1Name(c1.getString("NAME"));
    
    
                return bean;
            }
        });
    }
    
    private SingleOutputStreamOperator<TradeSkuOrderBean> windowAndAgg(
        SingleOutputStreamOperator<TradeSkuOrderBean> beanStream) {
        return beanStream
            .assignTimestampsAndWatermarks(
                WatermarkStrategy
                    .<TradeSkuOrderBean>forBoundedOutOfOrderness(Duration.ofSeconds(3))
                    .withTimestampAssigner((bean, ts) -> bean.getTs())
            )
            .keyBy(TradeSkuOrderBean::getSkuId)
            .window(TumblingEventTimeWindows.of(Time.seconds(5)))
            .reduce(
                new ReduceFunction<TradeSkuOrderBean>() {
                    @Override
                    public TradeSkuOrderBean reduce(TradeSkuOrderBean value1,
                                                    TradeSkuOrderBean value2) throws Exception {
                        value1.setOrderAmount(value1.getOrderAmount() + value2.getOrderAmount());
                        value1.setOrderOriginTotalAmount(value1.getOrderOriginTotalAmount() + value2.getOrderOriginTotalAmount());
                        value1.setOrderActivityReduceAmount(value1.getOrderActivityReduceAmount() + value2.getOrderActivityReduceAmount());
                        value1.setOrderCouponReduceAmount(value1.getOrderCouponReduceAmount() + value2.getOrderCouponReduceAmount());
                        return value1;
                    }
                },
                new ProcessWindowFunction<TradeSkuOrderBean, TradeSkuOrderBean, String, TimeWindow>() {
                    @Override
                    public void process(String skuId,
                                        Context ctx,
                                        Iterable<TradeSkuOrderBean> elements,
                                        Collector<TradeSkuOrderBean> out) throws Exception {
                        TradeSkuOrderBean bean = elements.iterator().next();
                        bean.setStt(AtguiguUtil.toDateTime(ctx.window().getStart()));
                        bean.setEdt(AtguiguUtil.toDateTime(ctx.window().getEnd()));
                        
                        bean.setTs(ctx.currentProcessingTime()); // 结果是什么时候统计出来的
                        
                        out.collect(bean);
                    }
                }
            );
    }
    
    private SingleOutputStreamOperator<TradeSkuOrderBean> parstToPojo(SingleOutputStreamOperator<JSONObject> stream) {
        return stream
            .map(obj -> {
                /*TradeSkuOrderBean bean = new TradeSkuOrderBean();
                bean.setSkuId(obj.getString("sku_id"));
                return bean;*/
                return TradeSkuOrderBean.builder()
                    .skuId(obj.getString("sku_id"))
                    .ts(obj.getLong("ts") * 1000)
                    .orderOriginTotalAmount(obj.getDoubleValue("split_original_amount"))  // 如果字段不存在, 则返回0D
                    .orderAmount(obj.getDoubleValue("split_total_amount"))
                    .orderCouponReduceAmount(obj.getDoubleValue("split_coupon_amount"))
                    .orderActivityReduceAmount(obj.getDoubleValue("split_activity_amount"))
                    .build();
            });
    }
    
    private SingleOutputStreamOperator<JSONObject> distinctByOrderDetailId(DataStreamSource<String> stream) {
        return stream
            .map(JSON::parseObject)
            .keyBy(obj -> obj.getString("id"))
            .process(new KeyedProcessFunction<String, JSONObject, JSONObject>() {
                
                private ValueState<JSONObject> maxDateDataState;
                
                @Override
                public void onTimer(long timestamp, OnTimerContext ctx, Collector<JSONObject> out) throws Exception {
                    //当定时器触发的时候, 状态中存储的就是时间最大的那个, 就是数据最新的
                    out.collect(maxDateDataState.value());
                }
                
                @Override
                public void open(Configuration parameters) throws Exception {
                    maxDateDataState = getRuntimeContext().getState(new ValueStateDescriptor<JSONObject>("maxDateDataState", JSONObject.class));
                }
                
                @Override
                public void processElement(JSONObject value,
                                           Context ctx,
                                           Collector<JSONObject> out) throws Exception {
                    
                    /*
                    第一条数据进来的时候, 状态应该是空, 注册定时器, 更新状态
                    
                    后面的来的时候状态不为空, 比较时间, 如果来的时候打, 更新状态
                    
                    等到定时器触发的时候,. 状态中的一定存储的时候时间最大的那个, 数据最全的那个
                     */
                    
                    if (maxDateDataState.value() == null) {
                        // 1. 更新状态
                        maxDateDataState.update(value);
                        
                        // 2. 注册处理时间定时器: 5s后触发
                        ctx.timerService().registerProcessingTimeTimer(ctx.timerService().currentProcessingTime() + 5000);
                    } else {
                        // 不是第一条: 比较时间
                        //"row_op_ts": "2022-07-20 06:09:31.236Z"
                        //"row_op_ts": "2022-07-20 06:09:31.4Z"  // 400
                        //"row_op_ts": "2022-07-20 06:09:31.17Z"  // 170
                        
                        //"row_op_ts": "2022-07-20 06:09:31.12Z"  // 120
                        //"row_op_ts": "2022-07-20 06:09:31.1Z"  // 100
                        
                        
                        String current = value.getString("row_op_ts");
                        String last = maxDateDataState.value().getString("row_op_ts");
                        // 如果current > last 返回true , 否则返回false
                        boolean isGreater = AtguiguUtil.compareLTZ(current, last);
                        
                        // 表示新数据的时间大于状态中数据的时间, 更新状态
                        if (isGreater) {
                            maxDateDataState.update(value);
                        }
                    }
                }
            });
    }
}
/*
缓存选择redis:
选择哪种数据结构?

string
key            value
表名+id        json格式字符串

好处:
    1. 方便读写
    2. 每个维度都有一个key, 可以单独的给维度设置ttl
    
坏处:
    key过多, 不方便管理, 越容易与其他的key产生冲突
    
    可以单独把我们的维度放入一个库中, 不会和别人冲突

list
key         value
表名          {json格式字符串, json格式字符串, ....}

好处:  key比较少, 方便管理
坏处:
    读不方便.
    ttl设置也不方便, 没有办法单独给每个维度设置ttl

set
 ...

hash
key        field     value
表名         id1       json格式字符串
             id1       json格式字符串
             
好处:
    读写方便
    key也不多
    
坏处:
    没有办法单独给每个维度设置ttl


zset
  .....




把度过的维度数据, 存入到缓存, 如果下次使用, 先从缓存读, 缓存没有, 再去查询数据库

缓存优化

内置外部
    flink的状态
    
    优点
        存取的速度极快, 不需要经过网络. 因为存储的是本地内存
    
    缺点
        1. 维度数据存储到状态中, 要占用flink的内存
        2. 维度数据有可能发生变化, 一旦发生变化, 状态中的维度没有办法及时的变化

外部缓存  旁路缓存
    redis
    
    优点
        1. 方便读写
        2. 如果维度发生变化, redis缓存中的维度也可以及时的变化
    
    缺点
       所有维度存储到redis的内存中, 占用redis内存, 影响到其他使用




 */