package com.atguigu.gmall.realtime.app.dws;

import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import com.atguigu.gmall.realtime.app.BaseApp;
import com.atguigu.gmall.realtime.bean.TradeSkuOrderBean;
import com.atguigu.gmall.realtime.common.Constant;
import com.atguigu.gmall.realtime.function.AsyncDimFunction;
import com.atguigu.gmall.realtime.util.AtguiguUtil;
import com.atguigu.gmall.realtime.util.FlinkSinUtil;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.common.functions.ReduceFunction;
import org.apache.flink.api.common.functions.RichMapFunction;
import org.apache.flink.api.common.state.StateTtlConfig;
import org.apache.flink.api.common.state.ValueState;
import org.apache.flink.api.common.state.ValueStateDescriptor;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.streaming.api.datastream.AsyncDataStream;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.windowing.ProcessWindowFunction;
import org.apache.flink.streaming.api.windowing.assigners.TumblingEventTimeWindows;
import org.apache.flink.streaming.api.windowing.time.Time;
import org.apache.flink.streaming.api.windowing.windows.TimeWindow;
import org.apache.flink.util.Collector;

import java.math.BigDecimal;
import java.time.Duration;
import java.util.concurrent.TimeUnit;

/**
 * @Author lzc
 * @Date 2023/2/20 09:57
 */
public class Dws_09_DwsTradeSkuOrderWindow_Async extends BaseApp {
    public static void main(String[] args) {
        new Dws_09_DwsTradeSkuOrderWindow_Async().init(
            4009,
            2,
            "Dws_09_DwsTradeSkuOrderWindow",
            Constant.TOPIC_DWD_TRADE_ORDER_DETAIL
        );
    }
    
    @Override
    protected void handle(StreamExecutionEnvironment env,
                          DataStreamSource<String> stream) {
        // 1. 先封装数据到 pojo
        SingleOutputStreamOperator<TradeSkuOrderBean> beanStream = parseToPojo(stream);
        // 2. 按照 order_detail_id 实现去重
        beanStream = distinctByOrderDetailId(beanStream);
        // 3. 开窗聚合
        SingleOutputStreamOperator<TradeSkuOrderBean> resultStreamWithoutDimsStream = windowAndAgg(beanStream);
        // 5. 补充维度信息
        SingleOutputStreamOperator<TradeSkuOrderBean> resultStream = joinDims(resultStreamWithoutDimsStream);
        
        // 6. 写出到 clickhouse 中
        writeToClickHouse(resultStream);
    }
    
    private void writeToClickHouse(SingleOutputStreamOperator<TradeSkuOrderBean> resultStream) {
        resultStream.addSink(FlinkSinUtil.getClickHouseSink("dws_trade_sku_order_window", TradeSkuOrderBean.class));
    }
    
    private SingleOutputStreamOperator<TradeSkuOrderBean> joinDims(
        SingleOutputStreamOperator<TradeSkuOrderBean> stream) {
        
        SingleOutputStreamOperator<TradeSkuOrderBean> skuInfoStream = AsyncDataStream.unorderedWait(
            stream,
            new AsyncDimFunction<TradeSkuOrderBean>() {
                @Override
                public String getTable() {
                    return "dim_sku_info";
                }
                
                @Override
                public Object getId(TradeSkuOrderBean value) {
                    return value.getSkuId();
                }
                
                @Override
                public void addDim(TradeSkuOrderBean value, JSONObject dim) {
                    value.setSkuName(dim.getString("SKU_NAME"));
                    value.setSpuId(dim.getString("SPU_ID"));
                    value.setCategory3Id(dim.getString("CATEGORY3_ID"));
                    value.setTrademarkId(dim.getString("TM_ID"));
                }
            },
            60,
            TimeUnit.SECONDS
        );
        
        SingleOutputStreamOperator<TradeSkuOrderBean> spuInfoStream = AsyncDataStream.unorderedWait(
            skuInfoStream,
            new AsyncDimFunction<TradeSkuOrderBean>() {
                @Override
                public String getTable() {
                    return "dim_spu_info";
                }
                
                @Override
                public Object getId(TradeSkuOrderBean value) {
                    return value.getSpuId();
                }
                
                @Override
                public void addDim(TradeSkuOrderBean value, JSONObject dim) {
                    value.setSpuName(dim.getString("SPU_NAME"));
                }
            },
            60,
            TimeUnit.SECONDS
        );
        
        
        SingleOutputStreamOperator<TradeSkuOrderBean> tmStream = AsyncDataStream.unorderedWait(
            spuInfoStream,
            new AsyncDimFunction<TradeSkuOrderBean>() {
                @Override
                public String getTable() {
                    return "dim_base_trademark";
                }
                
                @Override
                public Object getId(TradeSkuOrderBean value) {
                    return value.getTrademarkId();
                }
                
                @Override
                public void addDim(TradeSkuOrderBean value, JSONObject dim) {
                    value.setTrademarkName(dim.getString("TM_NAME"));
                }
            },
            60,
            TimeUnit.SECONDS
        );
        
        SingleOutputStreamOperator<TradeSkuOrderBean> c3Stream = AsyncDataStream.unorderedWait(
            tmStream,
            new AsyncDimFunction<TradeSkuOrderBean>() {
                @Override
                public String getTable() {
                    return "dim_base_category3";
                }
                
                @Override
                public Object getId(TradeSkuOrderBean value) {
                    return value.getCategory3Id();
                }
                
                @Override
                public void addDim(TradeSkuOrderBean value, JSONObject dim) {
                    value.setCategory3Name(dim.getString("NAME"));
                    value.setCategory2Id(dim.getString("CATEGORY2_ID"));
                }
            },
            60,
            TimeUnit.SECONDS
        );
        
        SingleOutputStreamOperator<TradeSkuOrderBean> c2Stream = AsyncDataStream.unorderedWait(
            c3Stream,
            new AsyncDimFunction<TradeSkuOrderBean>() {
                @Override
                public String getTable() {
                    return "dim_base_category2";
                }
                
                @Override
                public Object getId(TradeSkuOrderBean value) {
                    return value.getCategory2Id();
                }
                
                @Override
                public void addDim(TradeSkuOrderBean value, JSONObject dim) {
                    value.setCategory2Name(dim.getString("NAME"));
                    value.setCategory1Id(dim.getString("CATEGORY1_ID"));
                }
            },
            60,
            TimeUnit.SECONDS
        );
        return AsyncDataStream.unorderedWait(
            c2Stream,
            new AsyncDimFunction<TradeSkuOrderBean>() {
                @Override
                public String getTable() {
                    return "dim_base_category1";
                }
                
                @Override
                public Object getId(TradeSkuOrderBean value) {
                    return value.getCategory1Id();
                }
                
                @Override
                public void addDim(TradeSkuOrderBean value, JSONObject dim) {
                    value.setCategory1Name(dim.getString("NAME"));
                }
            },
            60,
            TimeUnit.SECONDS
        );
        
    }
    
    ;
    
    private SingleOutputStreamOperator<TradeSkuOrderBean> windowAndAgg(
        SingleOutputStreamOperator<TradeSkuOrderBean> beanStream) {
        return beanStream
            .assignTimestampsAndWatermarks(
                WatermarkStrategy
                    .<TradeSkuOrderBean>forBoundedOutOfOrderness(Duration.ofSeconds(3))
                    .withTimestampAssigner((bean, ts) -> bean.getTs())
                    .withIdleness(Duration.ofSeconds(60))
            )
            .keyBy(TradeSkuOrderBean::getSkuId)
            .window(TumblingEventTimeWindows.of(Time.seconds(5)))
            .reduce(
                new ReduceFunction<TradeSkuOrderBean>() {
                    @Override
                    public TradeSkuOrderBean reduce(TradeSkuOrderBean value1,
                                                    TradeSkuOrderBean value2) throws Exception {
                        value1.setOrderAmount(value1.getOrderAmount().add(value2.getOrderAmount()));
                        value1.setOriginalAmount(value1.getOriginalAmount().add(value2.getOriginalAmount()));
                        value1.setActivityAmount(value1.getActivityAmount().add(value2.getActivityAmount()));
                        value1.setCouponAmount(value1.getCouponAmount().add(value2.getCouponAmount()));
                        
                        return value1;
                    }
                },
                new ProcessWindowFunction<TradeSkuOrderBean, TradeSkuOrderBean, String, TimeWindow>() {
                    @Override
                    public void process(String skuId,
                                        Context ctx,
                                        Iterable<TradeSkuOrderBean> elements,
                                        Collector<TradeSkuOrderBean> out) throws Exception {
                        TradeSkuOrderBean bean = elements.iterator().next();
                        bean.setStt(AtguiguUtil.tsToDateTime(ctx.window().getStart()));
                        bean.setEdt(AtguiguUtil.tsToDateTime(ctx.window().getEnd()));
                        
                        bean.setTs(System.currentTimeMillis());
                        
                        out.collect(bean);
                    }
                }
            );
    }
    
    private SingleOutputStreamOperator<TradeSkuOrderBean> distinctByOrderDetailId(
        SingleOutputStreamOperator<TradeSkuOrderBean> beanStream) {
        return beanStream
            .keyBy(TradeSkuOrderBean::getOrderDetailId)
            .map(new RichMapFunction<TradeSkuOrderBean, TradeSkuOrderBean>() {
                
                private ValueState<TradeSkuOrderBean> lastBeanState;
                
                @Override
                public void open(Configuration parameters) throws Exception {
                    // 给状态设置 ttl
                    StateTtlConfig config = new StateTtlConfig
                        .Builder(org.apache.flink.api.common.time.Time.seconds(10)) // ttl 时间
                        .setUpdateType(StateTtlConfig.UpdateType.OnReadAndWrite) // 当创建写和读的时候,更新 ttl
                        .setStateVisibility(StateTtlConfig.StateVisibility.NeverReturnExpired) // 当过期之后, 如果去获取状态返回 null
                        .build();
                    
                    ValueStateDescriptor<TradeSkuOrderBean> desc = new ValueStateDescriptor<>("lastBean", TradeSkuOrderBean.class);
                    desc.enableTimeToLive(config);
                    
                    lastBeanState = getRuntimeContext().getState(desc);
                }
                
                @Override
                public TradeSkuOrderBean map(TradeSkuOrderBean bean) throws Exception {
                    // 当第一条数据来的时候: 1. 把数据存储到状态中 2. 输出
                    
                    // 不是第一条: 1. 对四个指标用新值-状态中的值, 输出  2. 把第二条数据存入到状态
                    
                    TradeSkuOrderBean lastBean = lastBeanState.value();
                    if (lastBean != null) {
                        // 计算后的值,存入到 lastBean, 把 lastBean 输出
                        lastBean.setOrderAmount(bean.getOrderAmount().subtract(lastBean.getOrderAmount()));  // 新值 - 旧值
                        lastBean.setOriginalAmount(bean.getOriginalAmount().subtract(lastBean.getOriginalAmount()));  // 新值 - 旧值
                        lastBean.setActivityAmount(bean.getActivityAmount().subtract(lastBean.getActivityAmount()));  // 新值 - 旧值
                        lastBean.setCouponAmount(bean.getCouponAmount().subtract(lastBean.getCouponAmount()));  // 新值 - 旧值
                    }
                    lastBeanState.update(bean);  // 新值存入到状态中
                    return lastBean == null ? bean : lastBean; // 计算后的值
                }
            });
    }
    
    private SingleOutputStreamOperator<TradeSkuOrderBean> parseToPojo(DataStreamSource<String> stream) {
        return stream.map(new MapFunction<String, TradeSkuOrderBean>() {
            @Override
            public TradeSkuOrderBean map(String value) throws Exception {
                JSONObject obj = JSON.parseObject(value);
                
                return TradeSkuOrderBean.builder()
                    .orderDetailId(obj.getString("id"))
                    .skuId(obj.getString("sku_id"))
                    .orderAmount(obj.getBigDecimal("split_total_amount"))
                    .originalAmount(obj.getBigDecimal("split_original_amount"))
                    .activityAmount(obj.getBigDecimal("split_activity_amount") == null ? new BigDecimal(0) : obj.getBigDecimal("split_activity_amount"))
                    .couponAmount(obj.getBigDecimal("split_coupon_amount") == null ? new BigDecimal(0) : obj.getBigDecimal("split_coupon_amount"))
                    .ts(obj.getLong("ts") * 1000)  // ts变成毫秒
                    .build();
            }
        });
    }
}
/*
不管是读取数据还是 redis 都是通过网络读取的
同步算子:
   
    1. 增加并行度
    
    2. 异步方式
    
        你的数据库的客户端必须支持异步访问方式, 得有异步客户端
            redis 和 phoenix 都没有提供异步客户端
            
        多线程+多同步客户端
            线程与线程直接不会互相阻塞, 把不同的请求放入到不同的线程,每个线程提供一个单独的客户端
            
            使用线程池
            
-----------
异步超时
1. 检查集群是否都是完整启动正常
    zk kafka hdfs hbase redis
    hdfs:
        安全模式  datanode
    
    kafka
        重置:
            1. 取 zk 客户端上传 /kafka
            deleteall /kafka
            
            2. 删除 kakfa 三台节点的数据
                $kafka_home/logs  或者 $kafka_home/data
    
    hbase
        先检测 hdfs 是否 ok:  安全模式和 datanode
        
        重置 hbase:
            1. 取 zk 客户端上传 /hbase
            deleteall /hbase
            
            2. 取 hdfs 上删除目录: /hbase
            
2. 检测 6 张维度表是否都在, 并且都有数据, 如果不确定, 可以使用 bootstrap 功能把维度数据重新跑一下

3. redis 能否连接

4. phoenix 地址...

5. 建议在测试异步之前, 保证同步的时候能够正常

6. 找我


 
        
 */