package com.atguigu.bigdata.gmall.realtime.app.dws;

import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import com.atguigu.bigdata.gmall.realtime.app.BaseAppV1;
import com.atguigu.bigdata.gmall.realtime.bean.TradeSkuOrderBean;
import com.atguigu.bigdata.gmall.realtime.common.Constant;
import com.atguigu.bigdata.gmall.realtime.function.DimAsyncFunction;
import com.atguigu.bigdata.gmall.realtime.util.AtguiguUtil;
import com.atguigu.bigdata.gmall.realtime.util.FlinkSinkUtil;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.api.common.functions.ReduceFunction;
import org.apache.flink.api.common.state.ValueState;
import org.apache.flink.api.common.state.ValueStateDescriptor;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.streaming.api.datastream.AsyncDataStream;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.KeyedProcessFunction;
import org.apache.flink.streaming.api.functions.windowing.ProcessWindowFunction;
import org.apache.flink.streaming.api.windowing.assigners.TumblingEventTimeWindows;
import org.apache.flink.streaming.api.windowing.time.Time;
import org.apache.flink.streaming.api.windowing.windows.TimeWindow;
import org.apache.flink.util.Collector;

import java.math.BigDecimal;
import java.time.Duration;
import java.util.concurrent.TimeUnit;

/**
 * @Author lzc
 * @Date 2022/10/18 10:36
 */
public class Dws_09_DwsTradeSkuOrderWindow_Cache_Async extends BaseAppV1 {
    public static void main(String[] args) {
        new Dws_09_DwsTradeSkuOrderWindow_Cache_Async().init(
            4009,
            2,
            "Dws_09_DwsTradeSkuOrderWindow_Cache",
            Constant.TOPIC_DWD_TRADE_ORDER_DETAIL
        );
        
    }
    
    @Override
    protected void handle(StreamExecutionEnvironment env,
                          DataStreamSource<String> stream) {
        // 1. 按照 订单详情 id 去重
        SingleOutputStreamOperator<JSONObject> distinctedStream = distinct(stream);
        // 2. 把数据封装到 pojo 中
        SingleOutputStreamOperator<TradeSkuOrderBean> beanStream = parseToPOJO(distinctedStream);
        // 3. 开窗聚合
        SingleOutputStreamOperator<TradeSkuOrderBean> aggregatedStreamWithoutDims = windowAndAgg(beanStream);
        
        // 4. 补充维度信息
        SingleOutputStreamOperator<TradeSkuOrderBean> resultStream = addDims(aggregatedStreamWithoutDims);
    
        // 5. 写出到clickhouse 中
        writeToClickHouse(resultStream);
    }
    
    private void writeToClickHouse(SingleOutputStreamOperator<TradeSkuOrderBean> resultStream) {
        resultStream.addSink(FlinkSinkUtil.getClickHouseSink("dws_trade_sku_order_window", TradeSkuOrderBean.class));
    }
    
    private SingleOutputStreamOperator<TradeSkuOrderBean> addDims(SingleOutputStreamOperator<TradeSkuOrderBean> stream) {
        SingleOutputStreamOperator<TradeSkuOrderBean> skuInfoStream = AsyncDataStream.unorderedWait(
            stream,
            new DimAsyncFunction<TradeSkuOrderBean>(){
                @Override
                public String table() {
                    return "dim_sku_info";
                }
    
                @Override
                public String id(TradeSkuOrderBean input) {
                    return input.getSkuId();
                }
    
                @Override
                public void addDim(JSONObject dim, TradeSkuOrderBean input) {
                    input.setSkuName(dim.getString("SKU_NAME"));
    
                    input.setSpuId(dim.getString("SPU_ID"));
                    input.setTrademarkId(dim.getString("TM_ID"));
                    input.setCategory3Id(dim.getString("CATEGORY3_ID"));
                }
            },
            120,
            TimeUnit.SECONDS
        );
    
        SingleOutputStreamOperator<TradeSkuOrderBean> spuInfoStream = AsyncDataStream.unorderedWait(
            skuInfoStream,
            new DimAsyncFunction<TradeSkuOrderBean>(){
                @Override
                public String table() {
                    return "dim_spu_info";
                }
            
                @Override
                public String id(TradeSkuOrderBean input) {
                    return input.getSpuId();
                }
            
                @Override
                public void addDim(JSONObject dim, TradeSkuOrderBean input) {
                    input.setSpuName(dim.getString("SPU_NAME"));
                }
            },
            120,
            TimeUnit.SECONDS
        );
    
        SingleOutputStreamOperator<TradeSkuOrderBean> tmStream = AsyncDataStream.unorderedWait(
            spuInfoStream,
            new DimAsyncFunction<TradeSkuOrderBean>(){
                @Override
                public String table() {
                    return "dim_base_trademark";
                }
            
                @Override
                public String id(TradeSkuOrderBean input) {
                    return input.getTrademarkId();
                }
            
                @Override
                public void addDim(JSONObject dim, TradeSkuOrderBean input) {
                    input.setTrademarkName(dim.getString("TM_NAME"));
                }
            },
            120,
            TimeUnit.SECONDS
        );
    
        SingleOutputStreamOperator<TradeSkuOrderBean> c3Stream = AsyncDataStream.unorderedWait(
            tmStream,
            new DimAsyncFunction<TradeSkuOrderBean>(){
                @Override
                public String table() {
                    return "dim_base_category3";
                }
            
                @Override
                public String id(TradeSkuOrderBean input) {
                    return input.getCategory3Id();
                }
            
                @Override
                public void addDim(JSONObject dim, TradeSkuOrderBean input) {
                    input.setCategory3Name(dim.getString("NAME"));
                    input.setCategory2Id(dim.getString("CATEGORY2_ID"));
                }
            },
            120,
            TimeUnit.SECONDS
        );
    
        SingleOutputStreamOperator<TradeSkuOrderBean> c2Stream = AsyncDataStream.unorderedWait(
            c3Stream,
            new DimAsyncFunction<TradeSkuOrderBean>(){
                @Override
                public String table() {
                    return "dim_base_category2";
                }
            
                @Override
                public String id(TradeSkuOrderBean input) {
                    return input.getCategory2Id();
                }
            
                @Override
                public void addDim(JSONObject dim, TradeSkuOrderBean input) {
                    input.setCategory2Name(dim.getString("NAME"));
                    input.setCategory1Id(dim.getString("CATEGORY1_ID"));
                }
            },
            120,
            TimeUnit.SECONDS
        );
        
        return AsyncDataStream.unorderedWait(
            c2Stream,
            new DimAsyncFunction<TradeSkuOrderBean>(){
                @Override
                public String table() {
                    return "dim_base_category1";
                }
        
                @Override
                public String id(TradeSkuOrderBean input) {
                    return input.getCategory1Id();
                }
        
                @Override
                public void addDim(JSONObject dim, TradeSkuOrderBean input) {
                    input.setCategory1Name(dim.getString("NAME"));
                }
            },
            120,
            TimeUnit.SECONDS
        );
    
    
    }
    
    private SingleOutputStreamOperator<TradeSkuOrderBean> windowAndAgg(
        SingleOutputStreamOperator<TradeSkuOrderBean> beanStream) {
        return beanStream
            .assignTimestampsAndWatermarks(
                WatermarkStrategy
                    .<TradeSkuOrderBean>forBoundedOutOfOrderness(Duration.ofSeconds(3))
                    .withTimestampAssigner((bean, ts) -> bean.getTs())
            )
            .keyBy(TradeSkuOrderBean::getSkuId)
            .window(TumblingEventTimeWindows.of(Time.seconds(5)))
            .reduce(
                new ReduceFunction<TradeSkuOrderBean>() {
                    @Override
                    public TradeSkuOrderBean reduce(TradeSkuOrderBean b1,
                                                    TradeSkuOrderBean b2) throws Exception {
                        b1.setOrderOriginTotalAmount(b1.getOrderOriginTotalAmount().add(b2.getOrderOriginTotalAmount()));
                        b1.setOrderAmount(b1.getOrderAmount().add(b2.getOrderAmount()));
                        b1.setOrderActivityReduceAmount(b1.getOrderActivityReduceAmount().add(b2.getOrderActivityReduceAmount()));
                        b1.setOrderCouponReduceAmount(b1.getOrderCouponReduceAmount().add(b2.getOrderCouponReduceAmount()));
                        return b1;
                    }
                },
                new ProcessWindowFunction<TradeSkuOrderBean, TradeSkuOrderBean, String, TimeWindow>() {
                    @Override
                    public void process(String skuId,
                                        Context ctx,
                                        Iterable<TradeSkuOrderBean> elements,
                                        Collector<TradeSkuOrderBean> out) throws Exception {
                        TradeSkuOrderBean bean = elements.iterator().next();
                        bean.setStt(AtguiguUtil.toDatTime(ctx.window().getStart()));
                        bean.setEdt(AtguiguUtil.toDatTime(ctx.window().getEnd()));
                        
                        //                        bean.setTs(System.currentTimeMillis());
                        bean.setTs(ctx.currentProcessingTime());
                        
                        out.collect(bean);
                        
                    }
                }
            
            );
    }
    
    private SingleOutputStreamOperator<TradeSkuOrderBean> parseToPOJO(
        SingleOutputStreamOperator<JSONObject> distinctedStream) {
        return distinctedStream
            .map(obj -> TradeSkuOrderBean.builder()
                .skuId(obj.getString("sku_id"))
                .orderOriginTotalAmount(obj.getBigDecimal("split_original_amount"))
                .orderAmount(obj.getBigDecimal("split_total_amount"))
                .orderActivityReduceAmount(obj.getBigDecimal("split_activity_amount") == null ? new BigDecimal("0") : obj.getBigDecimal("split_activity_amount"))
                .orderCouponReduceAmount(obj.getBigDecimal("split_coupon_amount") == null ? new BigDecimal("0") : obj.getBigDecimal("split_coupon_amount"))
                .ts(obj.getLong("ts") * 1000) // 把s 变成 ms
                .build());
    }
    
    private SingleOutputStreamOperator<JSONObject> distinct(DataStreamSource<String> stream) {
        /*
        去重思路:
            目的 要 row_op_ts 最大的那个, 最全的, 也一定是最后来的那个
            
        思路 1:
            没有办法从个数上记录, 找到最后一个
            
            数据是同时产生, 由于网络的波动导致的数据重复
            不管详情有多少条重复, 最终 5s 内一定会来齐
            
            第一个来的的时候, 注册一个定时器:5s 后触发的定时器(处理时间)
                当定时器触发的时候, 同一个详情的所有重复数据来齐.
                找到row_op_ts最大的那个
                提高效率: 每来一个都比较得到最大的那个
                
            
           最终结果: 第一个来了之后, 5s 后处结果
                
        思路 2:
            窗口: 把同一个详情的重复数据, 放入到同一个窗口中, 等到窗口触发计算的时候, 排序找到最大的.
            
            基于时间的窗口: 滚动的基于事件时间, 窗口长度: 1s
                会话窗口: gap 5s
                    最后来了之后,等待 5s 之后出结果
                    
        思路 3:
            如果要汇总的指标只在左表中, 有表的没用.无需等最完整的那条数据
            
            换句话说过, 第一条不管全不全, 都满足需求. 所以只取第一条
                
            
            
         */
        return stream
            .map(JSON::parseObject)
            .keyBy(obj -> obj.getString("id"))
            // 要用定时器: 必须先 keyBy
            .process(new KeyedProcessFunction<String, JSONObject, JSONObject>() {
                
                private ValueState<JSONObject> dataState;
                private ValueState<Boolean> isFirstState;
                
                @Override
                public void open(Configuration parameters) throws Exception {
                    isFirstState = getRuntimeContext().getState(new ValueStateDescriptor<Boolean>("isFirstState", Boolean.class));
                    dataState = getRuntimeContext().getState(new ValueStateDescriptor<JSONObject>("dataState", JSONObject.class));
                }
                
                @Override
                public void onTimer(long timestamp,
                                    KeyedProcessFunction<String, JSONObject, JSONObject>.OnTimerContext ctx,
                                    Collector<JSONObject> out) throws Exception {
                    // 定时器触发的时候执行这个方法.
                    // 当定时器触发的时候, 状态中就是最新的那个
                    // 把状态中的值输出
                    out.collect(dataState.value());
                }
                
                @Override
                public void processElement(JSONObject value,
                                           Context ctx,
                                           Collector<JSONObject> out) throws Exception {
                    
                    // 当这个详情的第一条数据进来的时候, 注册一个定时器: 5s 后触发的定时器
                    if (isFirstState.value() == null) {
                        // 更新状态
                        isFirstState.update(false);
                        ctx.timerService().registerProcessingTimeTimer(ctx.timerService().currentProcessingTime() + 5000);
                        // 存储数据:
                        dataState.update(value);
                    } else {
                        // 不是第一条: 当前和状态中的进行比较, 时间大的存入到状态中
                        /*
                        2022-10-11 07:42:48.491Z
                        2022-10-11 07:42:48.49Z  // 490
                        2022-10-11 07:42:48.5Z   // 500ms
                        2022-10-11 07:42:48.005Z   // 5ms
                        
                        2022-10-11 07:42:48.51
                        2022-10-11 07:42:48.5
                         */
                        String last = dataState.value().getString("row_op_ts").replaceAll("Z", "");
                        String current = value.getString("row_op_ts").replaceAll("Z", "");
                        // 新数据的时间大于旧数据, 就更新状态
                        if (current.compareTo(last) > 0) {
                            dataState.update(value);
                        }
                    }
                }
            });
    }
}
/*
异步这块可能出现的问题:
    报一个异步超时. 是由于其他问题导致异步处理超时.
    1. 检测集群是否都正常开启
        hadoop 集群(hdfs), hbase , kafka, redis
            hdfs: 一般会报错 datanode 不可用. 空间不够
                删除 hdfs 上 /user/atguigu/.fink
                去虚拟机  删除 /opt/software/
        hbase 去 webui 确认
            如果有问题, 恢复状态
                删除 1:  hdfs   /hbase
                删除 2: zk deleteall /hbase
                
                需要重新同步维度数据
        kafka
            删   删除三个节点下 log 目录下所有数据
             xcall rm -rf /opt/module/kafka-3.0.0/logs/ *
             
       redis 连不上
            启动服务器  redis-server /etc/redis.conf
            
            修改配置 bind 0.0.0.0
            
     2. 检测 6 张维度表是否都全, 并且都有数据
        dim_sku_info  dim_spu_info dim_base_trademark dim_base_category3 dim_base_category2 dim_base_category1
        
     3. 检测 redis 中缓存是否问题
         缓存清除 flushall
         
     4. 找我



-------
缓存优化: 为了避免频繁的查询数据库, 把热点数据存储到缓存中

先读缓存, 缓存没有再去数据库读取

默认情况下, flink 的所有算子都是一种同步方式处理数据

提供并行处理的能力:
1. 增加并行度. 对资源占用要求比较高, 成本比较高

2. 使用异步处理的方式.
    处理数据的时候, 非阻塞的. 数据来了之后直接发送链接, 不用等待返回, 直接发后面的
    谁先回来, 就先处理谁.
    
    一般好用在需要通过网络与外界通讯的场景.
    
通过异步的方式去访问 redis 和 phoenix. 数据库需要提供异步客户端.

如果没有异步客户端, 需要:
    使用多线程+多客户端
    
   每个线程内创建一个 redis 和 phoenix 的客户端, 只这一条数据的处理.
   
   多线程应该使用: 线程池
   
   客户端: 使用连接池. redis 和 phoenix


new Thread().start()


*/