package com.atguigu.realtime.func;

import com.alibaba.fastjson.JSONObject;
import com.atguigu.realtime.beans.DimJoinFunction;
import com.atguigu.realtime.common.GmallConfig;
import com.atguigu.realtime.utils.HbaseUtil;
import com.atguigu.realtime.utils.RedisUtil;
import io.lettuce.core.api.StatefulRedisConnection;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.streaming.api.functions.async.ResultFuture;
import org.apache.flink.streaming.api.functions.async.RichAsyncFunction;
import org.apache.hadoop.hbase.client.AsyncConnection;

import java.util.Collections;
import java.util.concurrent.CompletableFuture;
import java.util.function.Consumer;
import java.util.function.Function;
import java.util.function.Supplier;

/**
 * @author: 洛尘
 * @since: 2023-10-15 21:08
 * @description:发送异步请求完成维度关联
 **/
public abstract class DimAsyncFunction<T> extends RichAsyncFunction<T,T> implements DimJoinFunction<T> {
    private String tableName;
    private StatefulRedisConnection<String, String> redisConn;
    private AsyncConnection hbaseConn;

    public DimAsyncFunction(String tableName) {
        this.tableName = tableName;
    }

    @Override
    public void open(Configuration parameters) throws Exception {
            redisConn= RedisUtil.getAsyncRedisConnection();
            hbaseConn=HbaseUtil.getAsyncConnection();
    }

    @Override
    public void asyncInvoke(T obj, ResultFuture<T> resultFuture) throws Exception {
        //创建异步编排对象，并将返回值传递到下游的并行任务
        CompletableFuture.supplyAsync(
                new Supplier<JSONObject>() {
                    @Override
                    public JSONObject get() {
                            //发送请求到redis中获取维度数据
                        return RedisUtil.asyncGetDimInfo(redisConn,tableName.toLowerCase()+":" + getKey(obj));
                    }
                }
        ).thenApplyAsync(
                new Function<JSONObject, JSONObject>() {
                    @Override
                    public JSONObject apply(JSONObject dimJsonObj) {
                        if(dimJsonObj == null){
                            //缓存未命中，发送请求到Hbase中获取维度数据
                            System.out.println("~~~从Hbase中查询"+tableName+"表的"+getKey(obj)+"数据~~~");
                            dimJsonObj = HbaseUtil.getDimInfoFromHbaseByAsync(hbaseConn, GmallConfig.HBASE_NAMESPACE,tableName,getKey(obj));
                            //并将结果放到Redis中缓存起来
                            if(dimJsonObj != null){
                                RedisUtil.asyncWriteDim(redisConn,tableName.toLowerCase()+":" + getKey(obj),dimJsonObj);
                            }
                        }else{
                            System.out.println("~~~从Redis中查询"+tableName+"表的"+getKey(obj)+"数据~~~");
                        }
                        return dimJsonObj;
                    }

                }
        ).thenAcceptAsync(
                new Consumer<JSONObject>() {//执行并行任务，有入参，无返回值
                    @Override
                    public void accept(JSONObject dimJsonObj) {
                        if(dimJsonObj != null){
                            join(obj,dimJsonObj);
                        }
                        resultFuture.complete(Collections.singleton(obj));
                    }
                }
        );
        // 如果异步超时,会执行这个方法
    }
    public void timeout(T input, ResultFuture<T> resultFuture) throws Exception {
        // 根据自己情况进行处理
        throw new RuntimeException("异步超时:一般是其他原因导致的异步超时, 请检查: \n" +
                "1.检查集群是否都 ok: hdfs redis hbase kafka. \n" +
                "2.检查下 redis 的配置 bind 0.0.0.0 \n" +
                "3.检查用到的6张维度表是否都在,并且每张表都数据. 最好通过 maxwell-bootstrap 同步一下 \n" +
                "4. 找我" +
                "");
    }
}