package com.atguigu.gmall.realtime.app.dws;

import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import com.atguigu.gmall.realtime.app.BaseApp;
import com.atguigu.gmall.realtime.bean.TradeSkuOrderBean;
import com.atguigu.gmall.realtime.common.Constant;
import com.atguigu.gmall.realtime.function.AsyncDimFunction;
import com.atguigu.gmall.realtime.util.AtguiguUtil;
import com.atguigu.gmall.realtime.util.FlinkSinkUtil;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.common.functions.ReduceFunction;
import org.apache.flink.api.common.state.ValueState;
import org.apache.flink.api.common.state.ValueStateDescriptor;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.streaming.api.datastream.AsyncDataStream;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.KeyedProcessFunction;
import org.apache.flink.streaming.api.functions.windowing.ProcessWindowFunction;
import org.apache.flink.streaming.api.windowing.assigners.TumblingEventTimeWindows;
import org.apache.flink.streaming.api.windowing.time.Time;
import org.apache.flink.streaming.api.windowing.windows.TimeWindow;
import org.apache.flink.util.Collector;

import java.math.BigDecimal;
import java.time.Duration;
import java.util.concurrent.TimeUnit;

/**
 * @Author lzc
 * @Date 2023/1/8 08:26
 */
public class Dws_09_TradeSkuOrderBean_Cache_Async extends BaseApp {
    public static void main(String[] args) {
        new Dws_09_TradeSkuOrderBean_Cache_Async().init(
            4009,
            2,
            "Dws_09_TradeSkuOrderBean_Cache_Async",
            Constant.TOPIC_DWD_TRADE_ORDER_DETAIL
        );
    }
    
    @Override
    public void handle(StreamExecutionEnvironment env,
                       DataStreamSource<String> stream) {
        // 1. 先把数据封装到 pojo 中
        SingleOutputStreamOperator<TradeSkuOrderBean> beanStream = parseToPojo(stream);
        
        // 2. 按照详情 id 去重
        SingleOutputStreamOperator<TradeSkuOrderBean> distinctedStream = distinctByOrderDetailId(beanStream);
        // 3. 开窗聚合
        SingleOutputStreamOperator<TradeSkuOrderBean> streamWithoutDims = windowAndAgg(distinctedStream);
        // 4. 补充维度信息
        SingleOutputStreamOperator<TradeSkuOrderBean> streamWithDims = joinDims(streamWithoutDims);
        // 5. 写出到 ClickHouse 中
        writeToClickHouse(streamWithDims);
        
    }
    
    private void writeToClickHouse(SingleOutputStreamOperator<TradeSkuOrderBean> streamWithDims) {
        streamWithDims.addSink(FlinkSinkUtil.getClickHouseSink("dws_trade_sku_order_window", TradeSkuOrderBean.class));
    }
    
    private SingleOutputStreamOperator<TradeSkuOrderBean> joinDims(SingleOutputStreamOperator<TradeSkuOrderBean> streamWithoutDims) {
        SingleOutputStreamOperator<TradeSkuOrderBean> skuStream = AsyncDataStream.unorderedWait(
            streamWithoutDims,
            new AsyncDimFunction<TradeSkuOrderBean>() {
                @Override
                public String getTable() {
                    return "dim_sku_info";
                }
            
                @Override
                public String getId(TradeSkuOrderBean input) {
                    return input.getSkuId();
                }
            
                @Override
                public void addDim(TradeSkuOrderBean bean,
                                   JSONObject dim) {
                    bean.setSkuName(dim.getString("SKU_NAME"));
                    bean.setSpuId(dim.getString("SPU_ID"));
                    bean.setTrademarkId(dim.getString("TM_ID"));
                    bean.setCategory3Id(dim.getString("CATEGORY3_ID"));
                }
            },
            120,
            TimeUnit.SECONDS
        );
    
        SingleOutputStreamOperator<TradeSkuOrderBean> spuStream = AsyncDataStream.unorderedWait(
            skuStream,
            new AsyncDimFunction<TradeSkuOrderBean>() {
                @Override
                public String getTable() {
                    return "dim_spu_info";
                }
            
                @Override
                public String getId(TradeSkuOrderBean input) {
                    return input.getSpuId();
                }
            
                @Override
                public void addDim(TradeSkuOrderBean bean,
                                   JSONObject dim) {
                    bean.setSpuName(dim.getString("SPU_NAME"));
                }
            },
            120,
            TimeUnit.SECONDS
        );
    
        SingleOutputStreamOperator<TradeSkuOrderBean> tmStream = AsyncDataStream.unorderedWait(
            spuStream,
            new AsyncDimFunction<TradeSkuOrderBean>() {
                @Override
                public String getTable() {
                    return "dim_base_trademark";
                }
            
                @Override
                public String getId(TradeSkuOrderBean input) {
                    return input.getTrademarkId();
                }
            
                @Override
                public void addDim(TradeSkuOrderBean bean,
                                   JSONObject dim) {
                    bean.setTrademarkName(dim.getString("TM_NAME"));
                }
            },
            120,
            TimeUnit.SECONDS
        );
    
        SingleOutputStreamOperator<TradeSkuOrderBean> c3Stream = AsyncDataStream.unorderedWait(
            tmStream,
            new AsyncDimFunction<TradeSkuOrderBean>() {
                @Override
                public String getTable() {
                    return "dim_base_category3";
                }
            
                @Override
                public String getId(TradeSkuOrderBean input) {
                    return input.getCategory3Id();
                }
            
                @Override
                public void addDim(TradeSkuOrderBean bean,
                                   JSONObject dim) {
                    bean.setCategory3Name(dim.getString("NAME"));
                    bean.setCategory2Id(dim.getString("CATEGORY2_ID"));
                }
            },
            120,
            TimeUnit.SECONDS
        );
    
        SingleOutputStreamOperator<TradeSkuOrderBean> c2Stream = AsyncDataStream.unorderedWait(
            c3Stream,
            new AsyncDimFunction<TradeSkuOrderBean>() {
                @Override
                public String getTable() {
                    return "dim_base_category2";
                }
            
                @Override
                public String getId(TradeSkuOrderBean input) {
                    return input.getCategory2Id();
                }
            
                @Override
                public void addDim(TradeSkuOrderBean bean,
                                   JSONObject dim) {
                    bean.setCategory2Name(dim.getString("NAME"));
                    bean.setCategory1Id(dim.getString("CATEGORY1_ID"));
                }
            },
            120,
            TimeUnit.SECONDS
        );
    
      return  AsyncDataStream.unorderedWait(
            c2Stream,
            new AsyncDimFunction<TradeSkuOrderBean>() {
                @Override
                public String getTable() {
                    return "dim_base_category1";
                }
            
                @Override
                public String getId(TradeSkuOrderBean input) {
                    return input.getCategory1Id();
                }
            
                @Override
                public void addDim(TradeSkuOrderBean bean,
                                   JSONObject dim) {
                    bean.setCategory1Name(dim.getString("NAME"));
                }
            },
            30,
            TimeUnit.SECONDS
        );
    
    }
    
    private SingleOutputStreamOperator<TradeSkuOrderBean> windowAndAgg(
        SingleOutputStreamOperator<TradeSkuOrderBean> distinctedStream) {
        return distinctedStream
            .assignTimestampsAndWatermarks(
                WatermarkStrategy
                    .<TradeSkuOrderBean>forBoundedOutOfOrderness(Duration.ofSeconds(3))
                    .withTimestampAssigner((bean, ts) -> bean.getTs())
                    .withIdleness(Duration.ofSeconds(60))
            
            )
            .keyBy(TradeSkuOrderBean::getSkuId)
            .window(TumblingEventTimeWindows.of(Time.seconds(5)))
            .reduce(
                new ReduceFunction<TradeSkuOrderBean>() {
                    @Override
                    public TradeSkuOrderBean reduce(TradeSkuOrderBean value1,
                                                    TradeSkuOrderBean value2) throws Exception {
                        value1.setOriginalAmount(value1.getOriginalAmount().add(value2.getOriginalAmount()));
                        value1.setActivityAmount(value1.getActivityAmount().add(value2.getActivityAmount()));
                        value1.setCouponAmount(value1.getCouponAmount().add(value2.getCouponAmount()));
                        value1.setOrderAmount(value1.getOrderAmount().add(value2.getOrderAmount()));
                        return value1;
                    }
                },
                new ProcessWindowFunction<TradeSkuOrderBean, TradeSkuOrderBean, String, TimeWindow>() {
                    @Override
                    public void process(String key,
                                        Context ctx,
                                        Iterable<TradeSkuOrderBean> elements,
                                        Collector<TradeSkuOrderBean> out) throws Exception {
                        TradeSkuOrderBean bean = elements.iterator().next();
                        
                        bean.setStt(AtguiguUtil.tsToDateTime(ctx.window().getStart()));
                        bean.setEdt(AtguiguUtil.tsToDateTime(ctx.window().getEnd()));
                        
                        bean.setTs(System.currentTimeMillis());
                        
                        out.collect(bean);
                    }
                }
            );
    }
    
    private SingleOutputStreamOperator<TradeSkuOrderBean> distinctByOrderDetailId(
        SingleOutputStreamOperator<TradeSkuOrderBean> beanStream) {
        return beanStream
            .keyBy(TradeSkuOrderBean::getOrderDetailId)
            .process(new KeyedProcessFunction<String, TradeSkuOrderBean, TradeSkuOrderBean>() {
                
                private ValueState<TradeSkuOrderBean> beanState;
                
                @Override
                public void open(Configuration parameters) throws Exception {
                    beanState = getRuntimeContext().getState(new ValueStateDescriptor<TradeSkuOrderBean>("bean", TradeSkuOrderBean.class));
                }
                
                @Override
                public void processElement(TradeSkuOrderBean currentBean,
                                           Context ctx,
                                           Collector<TradeSkuOrderBean> out) throws Exception {
                    
                    TradeSkuOrderBean lastBean = beanState.value();
                    // 1. 把这个数据发送到下游
                    // 1.1 如果是第一条, 直接发
                    // 1.2 如果不是第一条则需要用新数据,减去状态中的数据
                    if (lastBean == null) {
                        out.collect(currentBean);
                    } else { // 不是第一条
                        // 新值减旧值
                        lastBean.setOriginalAmount(currentBean.getOriginalAmount().subtract(lastBean.getOriginalAmount()));
                        lastBean.setActivityAmount(currentBean.getActivityAmount().subtract(lastBean.getActivityAmount()));
                        lastBean.setCouponAmount(currentBean.getCouponAmount().subtract(lastBean.getCouponAmount()));
                        lastBean.setOrderAmount(currentBean.getOrderAmount().subtract(lastBean.getOrderAmount()));
                        
                        out.collect(lastBean);
                    }
                    
                    // 2. 先把数据存入到状态中
                    beanState.update(currentBean);
                    
                }
            });
    }
    
    private SingleOutputStreamOperator<TradeSkuOrderBean> parseToPojo(DataStreamSource<String> stream) {
        return stream
            .map(new MapFunction<String, TradeSkuOrderBean>() {
                @Override
                public TradeSkuOrderBean map(String value) throws Exception {
                    JSONObject obj = JSON.parseObject(value);
                    return TradeSkuOrderBean.builder()
                        .orderDetailId(obj.getString("id"))
                        .skuId(obj.getString("sku_id"))
                        .originalAmount(obj.getBigDecimal("split_original_amount"))
                        .orderAmount(obj.getBigDecimal("split_total_amount"))
                        .activityAmount(obj.getBigDecimal("split_activity_amount") == null ? new BigDecimal(0) : obj.getBigDecimal("split_activity_amount"))
                        .couponAmount(obj.getBigDecimal("split_coupon_amount") == null ? new BigDecimal(0) : obj.getBigDecimal("split_coupon_amount"))
                        .ts(obj.getLong("ts") * 1000)
                        .build();
                }
            });
    }
}
/*
异步超时
    一般都是其他问题导致的异步超时
    
    1. 先检测各个集群是否正常
        hdfs  hbase redis kafka zk
        
        kafka 集群:
            停止 kafka 集群
            1. 删除 kafka 的数据
                xcall "rm -rf /opt/module/kafka-3.0.0/logs/*"
            2. zk 中删除 /kafka 节点 zkClient.sh
                deleteall /kafka
                
        hbase集群的问题:
            一般是因为 hdfs 出问题, 由于进入了安全模式(由于数据丢失, 我们这里一般是 ck 数据的问题)
            1. 第一步退出安全模式
                hdfs dfsadmin -safemode leave
            2. 删除 ck 目录
                /gmall
            如何 hbase 还不好: 需要复原 hbase
            
            1. 删除 hdfs 的目录: /hbase
            2. zk 中删除 /hbase 节点 zkClient.sh
                deleteall /hase
                
      2. 检测 6 张维度表是否都在, 并且每张表都应该有完整的数据
            需要分别对 6 张表用 bootstrap 同步一样
      
      3. 代码问题
            最好逐步验证
            现在没有异步的能够正常
            
      4. 找我
            
            

-----------------------
都需要通过网络读
第一个数据
 发送请求->网络传输->redis(phoenix)->网络传输回来->继续处理
第二个数据
    ....
第...
 
 每个并行度处理数据是一种同步方式处理
 
1. 增加并行度
    增加了同时处理数据的能力.
    有上限, 并且成本高

2. 对单个并行度来说, 可以使用异步的方式来处理数据
    来了数据, 就发送网络请求,不等回复, 就继续发送后面的请求...
    
    谁先返回, 就限处理谁
    
    使用异步 io: 要有支持异步查询的客户端
    
        redis 和 phoenix 的客户端目前都不支持异步操作
        
    没异步客户端: 使用 多线程
        每个线程中, 创建一个同步客户端
        
        多线程+多客户端
            一个线程一个客户端
            
            线程: 线程池
            客户端: 连接池
            
     
     总结:
        多线程+多客户端: 一个线程一个客户端
        
        使用线程池和连接池
        

------------------------

rm -rf /opt/module/maxwell-1.27.1/lib/mysql-connector-java-8.0.17.jar
cp /opt/module/hive-3.1.2/lib/mysql-connector-java-5.1.27-bin.jar  /opt/module/maxwell-1.27.1/lib/

 */