package com.atguigu.realtime.app.dws;

import com.alibaba.druid.pool.DruidDataSource;
import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import com.atguigu.realtime.app.BaseAppV1;
import com.atguigu.realtime.bean.TradeSkuOrderBean;
import com.atguigu.realtime.common.Constant;
import com.atguigu.realtime.util.AtguiguUtil;
import com.atguigu.realtime.util.DimUtil;
import com.atguigu.realtime.util.DruidDSUtil;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.api.common.functions.ReduceFunction;
import org.apache.flink.api.common.functions.RichMapFunction;
import org.apache.flink.api.common.state.ValueState;
import org.apache.flink.api.common.state.ValueStateDescriptor;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.KeyedProcessFunction;
import org.apache.flink.streaming.api.functions.windowing.ProcessWindowFunction;
import org.apache.flink.streaming.api.windowing.assigners.TumblingEventTimeWindows;
import org.apache.flink.streaming.api.windowing.time.Time;
import org.apache.flink.streaming.api.windowing.windows.TimeWindow;
import org.apache.flink.util.Collector;

import java.sql.Connection;
import java.time.Duration;

/**
 * @Author lzc
 * @Date 2022/7/26 15:14
 */
public class Dws_08_DwsTradeSkuOrderWindow extends BaseAppV1 {
    public static void main(String[] args) {
        new Dws_08_DwsTradeSkuOrderWindow().init(
            3008,
            2,
            "Dws_08_DwsTradeSkuOrderWindow",
            Constant.TOPIC_DWD_TRADE_ORDER_DETAIL
        );
    }
    
    @Override
    protected void handle(StreamExecutionEnvironment env,
                          DataStreamSource<String> stream) {
        // 1. 按照order_detail_id进程去重   新的
        SingleOutputStreamOperator<JSONObject> distinctedStream = distinctByOrderDetailId(stream);
        // 2. 把数据封装到pojo中
        SingleOutputStreamOperator<TradeSkuOrderBean> beanStream = parstToPojo(distinctedStream);
        // 3. 按照sku_id分组, 开窗聚合
        SingleOutputStreamOperator<TradeSkuOrderBean> beanStreamWithoutDim = windowAndAgg(beanStream);
        // 4. 补充维度信息   新的
        addDim(beanStreamWithoutDim).print();
        
        // 5. 数据写出到clickhouse中
        
    }
    
    private SingleOutputStreamOperator<TradeSkuOrderBean> addDim(SingleOutputStreamOperator<TradeSkuOrderBean> beanStreamWithoutDim) {
        /*
          维度在哪里?
            hbase+Phoenix
          怎么查:
             补充的产品相关的维度
             有sku_id, 根据sku_id补充其他维度
             
             1. 先补:去sku_info补充 sku_name   spu_id  tm_id  category3_id
                select * from sku_info where id=?
                
             2. 补tm_name
                   select * from base_trademark where id=?
                   
             3. 查询 base_category3, 补 category3_name category2_id
                select * from base_category3 where id=?
             
             4. 查base_category2, 补category2_name category1_id
             
                select * from base_category2 where id=?
             5. 查询base_category1, 补category1_name
                select * from base_category1 where id=?
             
         */
      return  beanStreamWithoutDim.map(new RichMapFunction<TradeSkuOrderBean, TradeSkuOrderBean>() {
            
            private Connection conn;
            
            @Override
            public void open(Configuration parameters) throws Exception {
                // 1. 获取Phoenix连接
                DruidDataSource dataSource = DruidDSUtil.createDataSource();
                conn = dataSource.getConnection();
            }
            
            @Override
            public void close() throws Exception {
                // 关闭Phoenix连接
                conn.close();
            }
            
            @Override
            public TradeSkuOrderBean map(TradeSkuOrderBean bean) throws Exception {
                // 1. 查询sku_info
                JSONObject skuInfo = DimUtil.readDimFromPhoenix(conn, "dim_sku_info", bean.getSkuId());
                // {"ID":"1", "SPU_ID:"10",....}
                bean.setSkuName(skuInfo.getString("SKU_NAME"));
                bean.setSpuId(skuInfo.getString("SPU_ID"));
                bean.setTrademarkId(skuInfo.getString("TM_ID"));
                bean.setCategory3Id(skuInfo.getString("CATEGORY3_ID"));
                
                // 2. 查询spu_info
                JSONObject spuInfo = DimUtil.readDimFromPhoenix(conn, "dim_spu_info", bean.getSpuId());
                bean.setSpuName(spuInfo.getString("SPU_NAME"));
                
                // 3. base_trademark
                JSONObject baseTrademark = DimUtil.readDimFromPhoenix(conn, "dim_base_trademark", bean.getTrademarkId());
                bean.setTrademarkName(baseTrademark.getString("TM_NAME"));
                
                // 4. c3
                JSONObject c3 = DimUtil.readDimFromPhoenix(conn, "dim_base_category3", bean.getCategory3Id());
                bean.setCategory3Name(c3.getString("NAME"));
                // 查询c2_id
                bean.setCategory2Id(c3.getString("CATEGORY2_ID"));
                
                // 5. c2
                JSONObject c2 = DimUtil.readDimFromPhoenix(conn, "dim_base_category2", bean.getCategory2Id());
                bean.setCategory2Name(c2.getString("NAME"));
                // 查询c1_id
                bean.setCategory1Id(c2.getString("CATEGORY1_ID"));
                // 6. c1
                JSONObject c1 = DimUtil.readDimFromPhoenix(conn, "dim_base_category1", bean.getCategory1Id());
                bean.setCategory1Name(c1.getString("NAME"));
    
    
                return bean;
            }
        });
    }
    
    private SingleOutputStreamOperator<TradeSkuOrderBean> windowAndAgg(
        SingleOutputStreamOperator<TradeSkuOrderBean> beanStream) {
        return beanStream
            .assignTimestampsAndWatermarks(
                WatermarkStrategy
                    .<TradeSkuOrderBean>forBoundedOutOfOrderness(Duration.ofSeconds(3))
                    .withTimestampAssigner((bean, ts) -> bean.getTs())
            )
            .keyBy(TradeSkuOrderBean::getSkuId)
            .window(TumblingEventTimeWindows.of(Time.seconds(5)))
            .reduce(
                new ReduceFunction<TradeSkuOrderBean>() {
                    @Override
                    public TradeSkuOrderBean reduce(TradeSkuOrderBean value1,
                                                    TradeSkuOrderBean value2) throws Exception {
                        value1.setOrderAmount(value1.getOrderAmount() + value2.getOrderAmount());
                        value1.setOrderOriginTotalAmount(value1.getOrderOriginTotalAmount() + value2.getOrderOriginTotalAmount());
                        value1.setOrderActivityReduceAmount(value1.getOrderActivityReduceAmount() + value2.getOrderActivityReduceAmount());
                        value1.setOrderCouponReduceAmount(value1.getOrderCouponReduceAmount() + value2.getOrderCouponReduceAmount());
                        return value1;
                    }
                },
                new ProcessWindowFunction<TradeSkuOrderBean, TradeSkuOrderBean, String, TimeWindow>() {
                    @Override
                    public void process(String skuId,
                                        Context ctx,
                                        Iterable<TradeSkuOrderBean> elements,
                                        Collector<TradeSkuOrderBean> out) throws Exception {
                        TradeSkuOrderBean bean = elements.iterator().next();
                        bean.setStt(AtguiguUtil.toDateTime(ctx.window().getStart()));
                        bean.setEdt(AtguiguUtil.toDateTime(ctx.window().getEnd()));
                        
                        bean.setTs(ctx.currentProcessingTime()); // 结果是什么时候统计出来的
                        
                        out.collect(bean);
                    }
                }
            );
    }
    
    private SingleOutputStreamOperator<TradeSkuOrderBean> parstToPojo(SingleOutputStreamOperator<JSONObject> stream) {
        return stream
            .map(obj -> {
                /*TradeSkuOrderBean bean = new TradeSkuOrderBean();
                bean.setSkuId(obj.getString("sku_id"));
                return bean;*/
                return TradeSkuOrderBean.builder()
                    .skuId(obj.getString("sku_id"))
                    .ts(obj.getLong("ts") * 1000)
                    .orderOriginTotalAmount(obj.getDoubleValue("split_original_amount"))  // 如果字段不存在, 则返回0D
                    .orderAmount(obj.getDoubleValue("split_total_amount"))
                    .orderCouponReduceAmount(obj.getDoubleValue("split_coupon_amount"))
                    .orderActivityReduceAmount(obj.getDoubleValue("split_activity_amount"))
                    .build();
            });
    }
    
    private SingleOutputStreamOperator<JSONObject> distinctByOrderDetailId(DataStreamSource<String> stream) {
        return stream
            .map(JSON::parseObject)
            .keyBy(obj -> obj.getString("id"))
            .process(new KeyedProcessFunction<String, JSONObject, JSONObject>() {
                
                private ValueState<JSONObject> maxDateDataState;
                
                @Override
                public void onTimer(long timestamp, OnTimerContext ctx, Collector<JSONObject> out) throws Exception {
                    //当定时器触发的时候, 状态中存储的就是时间最大的那个, 就是数据最新的
                    out.collect(maxDateDataState.value());
                }
                
                @Override
                public void open(Configuration parameters) throws Exception {
                    maxDateDataState = getRuntimeContext().getState(new ValueStateDescriptor<JSONObject>("maxDateDataState", JSONObject.class));
                }
                
                @Override
                public void processElement(JSONObject value,
                                           Context ctx,
                                           Collector<JSONObject> out) throws Exception {
                    
                    /*
                    第一条数据进来的时候, 状态应该是空, 注册定时器, 更新状态
                    
                    后面的来的时候状态不为空, 比较时间, 如果来的时候打, 更新状态
                    
                    等到定时器触发的时候,. 状态中的一定存储的时候时间最大的那个, 数据最全的那个
                     */
                    
                    if (maxDateDataState.value() == null) {
                        // 1. 更新状态
                        maxDateDataState.update(value);
                        
                        // 2. 注册处理时间定时器: 5s后触发
                        ctx.timerService().registerProcessingTimeTimer(ctx.timerService().currentProcessingTime() + 5000);
                    } else {
                        // 不是第一条: 比较时间
                        //"row_op_ts": "2022-07-20 06:09:31.236Z"
                        //"row_op_ts": "2022-07-20 06:09:31.4Z"  // 400
                        //"row_op_ts": "2022-07-20 06:09:31.17Z"  // 170
                        
                        //"row_op_ts": "2022-07-20 06:09:31.12Z"  // 120
                        //"row_op_ts": "2022-07-20 06:09:31.1Z"  // 100
                        
                        
                        String current = value.getString("row_op_ts");
                        String last = maxDateDataState.value().getString("row_op_ts");
                        // 如果current > last 返回true , 否则返回false
                        boolean isGreater = AtguiguUtil.compareLTZ(current, last);
                        
                        // 表示新数据的时间大于状态中数据的时间, 更新状态
                        if (isGreater) {
                            maxDateDataState.update(value);
                        }
                    }
                }
            });
    }
}
/*


交易域SKU粒度下单各窗口汇总表

数据源:
    下单明细表  dwd_trade_order_detail

    详情 sku 粒度

    这张是从 预处理表中通过过滤order_info是新增的情况得到


    预处理表:
        4张事实表+一张维度(字典表 )  join

        join

        left join

        join 详情优惠券和详情活动的时候 left join, 写到数据会重复

        同一个详情id的数据可能重复, 因为left join 活动和优惠券

        对同一个详情id的数据要去重!!!

        去重思路:
            同一详情id的重复数据中, 要信息最全的那个.
            数据产生的时间越晚就越全

            "row_op_ts": "2022-07-20 06:09:31.236Z"

            当同一个详情id的所有记录都到齐了, 再排序, 找最大的
                    如果判断都到齐?
 
           1. 使用session 窗口
               gap: 5s
               当窗口触发计算的时候找时间最大的那个

               缺点:时效性低一些
                    最后一条到了之后 5s之后才会触发窗口计算


           2. 使用定时器
               第一个来我们可以判断出来
               注册一个5s后触发的定时器
                把数据本身也存入到状态, 每来一条数据判断如果事件变大, 就更新这个状态

                等到定时器触发的时候, 状态中存储的一定是最大的那个


           3. 如果我们的统计的指标中根本没有右表的数据
                只保留第一条就行了


  补充维度...
 */