package com.atguigu.gmall.realtime.function;

import com.alibaba.fastjson.JSONObject;
import com.atguigu.gmall.realtime.util.HbaseUtil;
import com.atguigu.gmall.realtime.util.RedisUtil;
import io.lettuce.core.api.StatefulRedisConnection;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.streaming.api.functions.async.ResultFuture;
import org.apache.flink.streaming.api.functions.async.RichAsyncFunction;
import org.apache.hadoop.hbase.client.AsyncConnection;

import java.util.Collections;
import java.util.concurrent.CompletableFuture;
import java.util.function.Consumer;
import java.util.function.Function;
import java.util.function.Supplier;

/**
 * @Author lzc
 * @Date 2023/5/5 11:19
 */
public abstract class AsyncDimFunctionHBase<T> extends RichAsyncFunction<T, T> implements DimFunction<T> {
    
    
    private AsyncConnection hbaseAsyncConn;
    private StatefulRedisConnection<String, String> asyncRedisConn;
    
    
    @Override
    public void open(Configuration parameters) throws Exception {
        // 异步客户端每个并行度一个
        hbaseAsyncConn = HbaseUtil.getHbaseAsyncConnection();
        asyncRedisConn = RedisUtil.getAsyncRedisConnection();
        
    }
    
    @Override
    public void close() throws Exception {
        hbaseAsyncConn.close();
        asyncRedisConn.close();
    }
    
    // 异步调用函数:
    // input: 需要异步处理的元素
    // resultFuture: 用来输出最终异步处理后元素
    @Override
    public void asyncInvoke(T bean,
                            ResultFuture<T> resultFuture) throws Exception {
        CompletableFuture
            .supplyAsync(new Supplier<JSONObject>() {
                @Override
                public JSONObject get() {
                    // 从 redis 异步的方式读取维度数据
                    return RedisUtil.asyncReadDim(asyncRedisConn, getTable() + ":" + getId(bean));
                }
            })
            .thenApplyAsync(new Function<JSONObject, JSONObject>() {
                @Override
                public JSONObject apply(JSONObject dim) { // 这里的 dim 参数,就是上个异步操作返回的结果
                    // 如果 dim 为 null,表示没有从 redis 读取到 dim 数据. 所以从 hbase 读取维度数据
                    if (dim == null) {
                        System.out.println(getTable() + "  " + getId(bean) + "  从 hbase 读取");
                        dim = HbaseUtil.asyncReadDim(hbaseAsyncConn, "gmall", getTable(), getId(bean));
                        // 写入到redis中
                        RedisUtil.asyncWriteDim(asyncRedisConn, getTable() + ":" + getId(bean), dim);
                    }else{
                        System.out.println(getTable() + "  " + getId(bean) + "  从 redis 读取");
                    }
                    return dim;
                }
            })
            .thenAccept(new Consumer<JSONObject>() {
                @Override
                public void accept(JSONObject dim) {
                    addDim(bean, dim);  // 在这个抽象方法内补充维度
                    // 收集结果
                    resultFuture.complete(Collections.singletonList(bean));
                }
            });
    }
    
    // 如果异步超时,会执行这个方法
    @Override
    public void timeout(T input, ResultFuture<T> resultFuture) throws Exception {
        // 根据自己情况进行处理
        //        super.timeout(input, resultFuture);
        throw new RuntimeException("异步超时:一般是其他原因导致的异步超时, 请检查: \n" +
                                       "1.检查集群是否都 ok: hdfs redis hbase kafka. \n" +
                                       "2.检查下 redis 的配置 bind 0.0.0.0 \n" +
                                       "3.检查用到的 6 张维度表是否都在,并且每张表都数据. 最好通过 maxwell-bootstrap 同步一下 \n" +
                                       "4. 找我" +
                                       "");
    }
}
