package com.atguigu.realtime.app.dws;

import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import com.atguigu.realtime.app.BaseApp;
import com.atguigu.realtime.bean.TradeSkuOrderBean;
import com.atguigu.realtime.common.Constant;
import com.atguigu.realtime.function.DimAsyncFunction;
import com.atguigu.realtime.util.AtguiguUtil;
import com.atguigu.realtime.util.FlinkSinkUtil;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.common.functions.ReduceFunction;
import org.apache.flink.api.common.state.StateTtlConfig;
import org.apache.flink.api.common.state.ValueState;
import org.apache.flink.api.common.state.ValueStateDescriptor;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.streaming.api.datastream.AsyncDataStream;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.KeyedProcessFunction;
import org.apache.flink.streaming.api.functions.windowing.ProcessWindowFunction;
import org.apache.flink.streaming.api.windowing.assigners.TumblingEventTimeWindows;
import org.apache.flink.streaming.api.windowing.time.Time;
import org.apache.flink.streaming.api.windowing.windows.TimeWindow;
import org.apache.flink.util.Collector;

import java.math.BigDecimal;
import java.time.Duration;
import java.util.concurrent.TimeUnit;

/**
 * @Author lzc
 * @Date 2023/3/20 09:44
 */
public class Dws_09_DwsTradeSkuOrderWindow_Cache_Async extends BaseApp {
    public static void main(String[] args) {
        new Dws_09_DwsTradeSkuOrderWindow_Cache_Async().init(
            4009,
            2,
            "Dws_09_DwsTradeSkuOrderWindow_Cache",
            Constant.TOPIC_DWD_TRADE_ORDER_DETAIL
        );
    }
    
    @Override
    public void handle(StreamExecutionEnvironment env,
                       DataStreamSource<String> stream) {
        // 1. 封装到 pojo 中
        SingleOutputStreamOperator<TradeSkuOrderBean> beanStream = parseToPojo(stream);
        // 2. 按照 order_detail_id 去重
        beanStream = distinctByOrderDetailId(beanStream);
        // 3. 分组开窗聚合
        SingleOutputStreamOperator<TradeSkuOrderBean> beanStreamWithoutDims = windowAndAgg(beanStream);
        
        // 4. 补充维度信息
        SingleOutputStreamOperator<TradeSkuOrderBean> beanStreamWithDims = joinDims(beanStreamWithoutDims);
        
        // 5. 写出到 clickhouse 中
        writeToClickHouse(beanStreamWithDims);
    }
    
    private void writeToClickHouse(SingleOutputStreamOperator<TradeSkuOrderBean> beanStreamWithDims) {
        beanStreamWithDims.addSink(FlinkSinkUtil.getClickHouseSink("dws_trade_sku_order_window", TradeSkuOrderBean.class));
    }
    
    private SingleOutputStreamOperator<TradeSkuOrderBean> joinDims(
        SingleOutputStreamOperator<TradeSkuOrderBean> beanStreamWithoutDims) {
        
        SingleOutputStreamOperator<TradeSkuOrderBean> skuInfoStream = AsyncDataStream.unorderedWait(
            beanStreamWithoutDims,
            new DimAsyncFunction<TradeSkuOrderBean>() {
                
                @Override
                public String getTable() {
                    return "dim_sku_info";
                }
                
                @Override
                public String getId(TradeSkuOrderBean bean) {
                    return bean.getSkuId();
                }
                
                @Override
                public void addDim(TradeSkuOrderBean bean, JSONObject dim) {
                    // {"ID": "", "SPU_ID": "", ...}
                    bean.setSkuName(dim.getString("SKU_NAME"));
                    bean.setSpuId(dim.getString("SPU_ID"));
                    bean.setTrademarkId(dim.getString("TM_ID"));
                    bean.setCategory3Id(dim.getString("CATEGORY3_ID"));
                }
            },
            60,
            TimeUnit.SECONDS
        );
        
        SingleOutputStreamOperator<TradeSkuOrderBean> spuInfoStream = AsyncDataStream.unorderedWait(
            skuInfoStream,
            new DimAsyncFunction<TradeSkuOrderBean>() {
                @Override
                public String getTable() {
                    return "dim_spu_info";
                }
                
                @Override
                public String getId(TradeSkuOrderBean bean) {
                    return bean.getSpuId();
                }
                
                @Override
                public void addDim(TradeSkuOrderBean bean, JSONObject dim) {
                    // {"ID": "", "SPU_ID": "", ...}
                    bean.setSpuName(dim.getString("SPU_NAME"));
                    
                }
            },
            60,
            TimeUnit.SECONDS
        );
    
        SingleOutputStreamOperator<TradeSkuOrderBean> tmStream = AsyncDataStream.unorderedWait(
            spuInfoStream,
            new DimAsyncFunction<TradeSkuOrderBean>() {
                @Override
                public String getTable() {
                    return "dim_base_trademark";
                }
    
                @Override
                public String getId(TradeSkuOrderBean bean) {
                    return bean.getTrademarkId();
                }
    
                @Override
                public void addDim(TradeSkuOrderBean bean, JSONObject dim) {
                    // {"ID": "", "SPU_ID": "", ...}
                    bean.setTrademarkName(dim.getString("TM_NAME"));
        
                }
            },
            60,
            TimeUnit.SECONDS
        );
    
        SingleOutputStreamOperator<TradeSkuOrderBean> C3Stream = AsyncDataStream.unorderedWait(
            tmStream,
            new DimAsyncFunction<TradeSkuOrderBean>() {
                @Override
                public String getTable() {
                    return "dim_base_category3";
                }
    
                @Override
                public String getId(TradeSkuOrderBean bean) {
                    return bean.getCategory3Id();
                }
    
                @Override
                public void addDim(TradeSkuOrderBean bean, JSONObject dim) {
                    // {"ID": "", "SPU_ID": "", ...}
                    bean.setCategory3Name(dim.getString("NAME"));
                    bean.setCategory2Id(dim.getString("CATEGORY2_ID"));
        
                }
            },
            60,
            TimeUnit.SECONDS
        );
    
        SingleOutputStreamOperator<TradeSkuOrderBean> C2Stream = AsyncDataStream.unorderedWait(
            C3Stream,
            new DimAsyncFunction<TradeSkuOrderBean>() {
                @Override
                public String getTable() {
                    return "dim_base_category2";
                }
    
                @Override
                public String getId(TradeSkuOrderBean bean) {
                    return bean.getCategory2Id();
                }
    
                @Override
                public void addDim(TradeSkuOrderBean bean, JSONObject dim) {
                    // {"ID": "", "SPU_ID": "", ...}
                    bean.setCategory2Name(dim.getString("NAME"));
                    bean.setCategory1Id(dim.getString("CATEGORY1_ID"));
        
                }
            },
            60,
            TimeUnit.SECONDS
        );
    
        return AsyncDataStream.unorderedWait(
            C2Stream,
            new DimAsyncFunction<TradeSkuOrderBean>() {
                @Override
                public String getTable() {
                    return "dim_base_category1";
                }
    
                @Override
                public String getId(TradeSkuOrderBean bean) {
                    return bean.getCategory1Id();
                }
    
                @Override
                public void addDim(TradeSkuOrderBean bean, JSONObject dim) {
                    // {"ID": "", "SPU_ID": "", ...}
                    bean.setCategory1Name(dim.getString("NAME"));
        
                }
            },
            60,
            TimeUnit.SECONDS
        );
    }
    
    private SingleOutputStreamOperator<TradeSkuOrderBean> windowAndAgg(
        SingleOutputStreamOperator<TradeSkuOrderBean> beanStream) {
        return beanStream
            .assignTimestampsAndWatermarks(
                WatermarkStrategy
                    .<TradeSkuOrderBean>forBoundedOutOfOrderness(Duration.ofSeconds(3))
                    .withTimestampAssigner((bean, ts) -> bean.getTs())
                    .withIdleness(Duration.ofSeconds(60))
            )
            .keyBy(TradeSkuOrderBean::getSkuId)
            .window(TumblingEventTimeWindows.of(Time.seconds(5)))
            .reduce(
                new ReduceFunction<TradeSkuOrderBean>() {
                    @Override
                    public TradeSkuOrderBean reduce(TradeSkuOrderBean value1,
                                                    TradeSkuOrderBean value2) throws Exception {
                        value1.setOrderAmount(value1.getOrderAmount().add(value2.getOrderAmount()));
                        value1.setOriginalAmount(value1.getOriginalAmount().add(value2.getOriginalAmount()));
                        value1.setActivityAmount(value1.getActivityAmount().add(value2.getActivityAmount()));
                        value1.setCouponAmount(value1.getCouponAmount().add(value2.getCouponAmount()));
                        return value1;
                    }
                },
                new ProcessWindowFunction<TradeSkuOrderBean, TradeSkuOrderBean, String, TimeWindow>() {
                    @Override
                    public void process(String skuId,
                                        Context ctx,
                                        Iterable<TradeSkuOrderBean> elements,
                                        Collector<TradeSkuOrderBean> out) throws Exception {
                        TradeSkuOrderBean bean = elements.iterator().next();
                        
                        bean.setStt(AtguiguUtil.tsToDateTime(ctx.window().getStart()));
                        bean.setEdt(AtguiguUtil.tsToDateTime(ctx.window().getEnd()));
                        
                        bean.setTs(System.currentTimeMillis());
                        
                        
                        out.collect(bean);
                    }
                }
            );
    }
    
    private SingleOutputStreamOperator<TradeSkuOrderBean> distinctByOrderDetailId(
        SingleOutputStreamOperator<TradeSkuOrderBean> beanStream) {
        return beanStream
            .keyBy(TradeSkuOrderBean::getOrderDetailId)
            .process(new KeyedProcessFunction<String, TradeSkuOrderBean, TradeSkuOrderBean>() {
                
                private ValueState<TradeSkuOrderBean> state;
                
                @Override
                public void open(Configuration parameters) throws Exception {
                    ValueStateDescriptor<TradeSkuOrderBean> desc =
                        new ValueStateDescriptor<>("beanState", TradeSkuOrderBean.class);
                    StateTtlConfig ttlConfig = new StateTtlConfig.Builder(org.apache.flink.api.common.time.Time.seconds(10))
                        .updateTtlOnCreateAndWrite()
                        .build();
                    
                    desc.enableTimeToLive(ttlConfig);
                    state = getRuntimeContext().getState(desc);
                }
                
                @Override
                public void processElement(TradeSkuOrderBean currentBean,
                                           Context ctx,
                                           Collector<TradeSkuOrderBean> out) throws Exception {
                    TradeSkuOrderBean lastBean = state.value();
                    if (lastBean == null) { // 当前详情 id 第一条数据
                        out.collect(currentBean);
                        //                        state.update(currentBean);
                    } else { // 不是第一条
                        // 用新的数据, 减去状态中的数据, 输出
                        lastBean.setOriginalAmount(currentBean.getOriginalAmount().subtract(lastBean.getOriginalAmount()));
                        lastBean.setActivityAmount(currentBean.getActivityAmount().subtract(lastBean.getActivityAmount()));
                        lastBean.setCouponAmount(currentBean.getCouponAmount().subtract(lastBean.getCouponAmount()));
                        lastBean.setOrderAmount(currentBean.getOrderAmount().subtract(lastBean.getOrderAmount()));
                        
                        out.collect(lastBean);
                        // 把新的数据存入到状态中
                        //                        state.update(currentBean);
                    }
                    state.update(currentBean);
                }
            });
    }
    
    private SingleOutputStreamOperator<TradeSkuOrderBean> parseToPojo(DataStreamSource<String> stream) {
        return stream.map(new MapFunction<String, TradeSkuOrderBean>() {
            @Override
            public TradeSkuOrderBean map(String json) throws Exception {
                JSONObject obj = JSON.parseObject(json);
                return TradeSkuOrderBean.builder()
                    .skuId(obj.getString("sku_id"))
                    .originalAmount(obj.getBigDecimal("split_original_amount"))
                    .activityAmount(obj.getBigDecimal("split_activity_amount") == null ? new BigDecimal(0) : obj.getBigDecimal("split_activity_amount"))
                    .couponAmount(obj.getBigDecimal("split_coupon_amount") == null ? new BigDecimal(0) : obj.getBigDecimal("split_coupon_amount"))
                    .orderAmount(obj.getBigDecimal("split_total_amount"))
                    .ts(obj.getLong("ts") * 1000)
                    .orderDetailId(obj.getString("id"))
                    .build();
            }
            
        });
    }
}
/*
如果不想异步出问题: 先在同步的时候,全部跑通

----
出现: 异步超时 Async timeout ....

1. 检查所有集群是否正常工作
    hdfs hbase redis kafka
2. 检查 phoenix 中的维度表是否都在
    6 张表
3. 检查每维度表是否都有数据, 并且条数和 mysql 的一致

4. 找我

    
    hbase(如果 phoenix 进不去) 和 kafka() 容易出问题
    
    hbase复原:
        
        1. 去 hdfs 删除一个目录 /hbase
        2. 去 zookeeper 上删除节点: /hbase
            zkCli.sh
                deleteall /hbase
     
     kafka复原:
        1. 去 zookeeper 上删除节点: /kafka
            zkCli.sh
                deleteall /kafka
        
        2. 删除 kafka 中已经存储的数据:
            xcall 'rm -rf /opt/module/kafka-3.0.0/logs/*'
 
    
    
    redis 集群容易忘记启动

    


----------------
 同步读法:
    在一个并行度内, 同步读写
    
 解决:
   1. 增加并行度
        成本高, 需要占用更多的资源.
        有上限
        
   2. 异步的方式
        发送请求之后, 不会的等待响应, 直接发送后面的请求.
        谁先返回就处理谁
        
使用 flink 异步 io, 需要满足条件:
    客户端要支持异步读写
        (内部进行读写的时候不会互相阻塞)
        
  
  redis 和 phoenix: 目前没有提供异步客户端
  
  退而求其次:
    使用
        多线程+多客户端
        
    如何有 3 个元素: 创建 3 个子线程, 每个子线程中创建 1 客户端
    
        每处理一个元素, 就创建一个新的线程, 在线程内再创建新的客户端, 进行读写
        
        
    
  
  
  

 
 
 */