package com.atguigu.gmall.realtime.app.func;

import com.alibaba.fastjson.JSONObject;
import com.atguigu.gmall.realtime.beans.DimJoinFunction;
import com.atguigu.gmall.realtime.common.GmallConfig;
import com.atguigu.gmall.realtime.utils.HbaseUtil;
import com.atguigu.gmall.realtime.utils.RedisUtil;
import io.lettuce.core.api.StatefulRedisConnection;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.streaming.api.functions.async.ResultFuture;
import org.apache.flink.streaming.api.functions.async.RichAsyncFunction;
import org.apache.hadoop.hbase.client.AsyncConnection;

import java.util.Collections;
import java.util.concurrent.CompletableFuture;
import java.util.function.Consumer;
import java.util.function.Function;
import java.util.function.Supplier;

/**
 * @author Felix
 * @date 2023/8/11
 * 发送异步请求 完成维度关联
 */
public abstract class DimAsyncFunction<T> extends RichAsyncFunction<T,T> implements DimJoinFunction<T> {

    private String tableName;
    private StatefulRedisConnection<String,String> asyncRedisConn;
    private AsyncConnection asyncHbaseConn;

    public DimAsyncFunction(String tableName) {
        this.tableName = tableName;
    }

    @Override
    public void open(Configuration parameters) throws Exception {
        asyncRedisConn = RedisUtil.getAsyncRedisConnection();
        asyncHbaseConn = HbaseUtil.getAsyncConnection();
    }

    @Override
    public void close() throws Exception {
        RedisUtil.closeAsyncRedisConnection(asyncRedisConn);
        HbaseUtil.closeAsyncConnection(asyncHbaseConn);
    }
    @Override
    public void asyncInvoke(T obj, ResultFuture<T> resultFuture) throws Exception {
        CompletableFuture.supplyAsync(
            //开始一个线程任务，有返回值
            new Supplier<JSONObject>() {
                @Override
                public JSONObject get() {
                    //发送异步请求，从Redis中获取维度数据
                    JSONObject dimJsonObj = RedisUtil.asyncGetDimInfo(asyncRedisConn, tableName + ":" + getKey(obj));
                    return dimJsonObj;
                }
            }
        ).thenApplyAsync(
            //发送异步请求，并将上一个线程任务返回结果作为当前任务的参数，并有返回值
            new Function<JSONObject, JSONObject>() {
                @Override
                public JSONObject apply(JSONObject dimJsonObj) {
                    if(dimJsonObj == null){
                        //说明没有从redis中获取到维度数据，发送异步请求到hbase查询维度
                        System.out.println(tableName + "  " + getKey(obj) + "  从 hbase 读取");
                        dimJsonObj = HbaseUtil.getDimInfoFromHbaseByAsync(asyncHbaseConn, GmallConfig.HBASE_NAMESPACE, tableName, getKey(obj));
                        if(dimJsonObj != null){
                            //将从hbase中查询到的维度数据 放到redis中缓存起来
                            RedisUtil.asyncWriteDimInfo(asyncRedisConn,tableName + ":" + getKey(obj),dimJsonObj);
                        }
                    }else{
                        //说明从redis中获取到维度数据
                        System.out.println(tableName + "  " + getKey(obj) + "  从 redis 读取");
                    }
                    return dimJsonObj;
                }
            }
        ).thenAcceptAsync(
            //发送异步请求，并将上一个线程任务返回结果作为当前任务的参数，没有有返回值
            new Consumer<JSONObject>() {
                @Override
                public void accept(JSONObject dimJsonObj) {
                    if(dimJsonObj != null){
                        //将维度对象相关的维度属性补充到流中的对象上
                        join(obj,dimJsonObj);
                        //向下游传递数据
                        resultFuture.complete(Collections.singleton(obj));
                    }
                }
            }
        );
    }

    @Override
    public void timeout(T input, ResultFuture<T> resultFuture) throws Exception {
        // 根据自己情况进行处理
        throw new RuntimeException("异步超时:一般是其他原因导致的异步超时, 请检查: \n" +
            "1.检查集群是否都 ok: hdfs redis hbase kafka. \n" +
            "2.检查下 redis 的配置 bind 0.0.0.0 \n" +
            "3.检查用到的6张维度表是否都在,并且每张表都数据. 最好通过 maxwell-bootstrap 同步一下 \n" +
            "4. 找我" +
            "");

    }
}
