package com.flink.hbase;

import org.apache.flink.api.common.functions.RuntimeContext;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.streaming.api.functions.async.ResultFuture;
import org.apache.flink.streaming.api.functions.async.RichAsyncFunction;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.*;
import org.apache.hadoop.hbase.util.Bytes;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import java.io.IOException;
import java.time.Instant;
import java.time.LocalDate;
import java.time.ZoneId;
import java.time.format.DateTimeFormatter;
import java.util.Collections;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.TimeUnit;

/**
 * HBase 异步数据源
 * 使用 AsyncTable API 异步获取用户数据
 */
public class HBaseAsyncSource extends RichAsyncFunction<String, User> {

    private static final Logger LOG = LoggerFactory.getLogger(HBaseAsyncSource.class);
    
    private static final String COLUMN_FAMILY = "info";
    private static final byte[] CF_BYTES = Bytes.toBytes(COLUMN_FAMILY);
    private static final byte[] NAME_QUALIFIER = Bytes.toBytes("name");
    private static final byte[] AGE_QUALIFIER = Bytes.toBytes("age");
    private static final byte[] GENDER_QUALIFIER = Bytes.toBytes("gender");
    
    private final String tableName;
    private final int asyncPoolSize;
    private final long timeoutMs;
    
    private transient AsyncConnection connection;
    private transient AsyncTable<AdvancedScanResultConsumer> asyncTable;
    private transient ExecutorService executorService;

    public HBaseAsyncSource(String tableName, int asyncPoolSize, long timeoutMs) {
        this.tableName = tableName;
        this.asyncPoolSize = asyncPoolSize;
        this.timeoutMs = timeoutMs;
    }

    @Override
    public void open(Configuration parameters) throws Exception {
        super.open(parameters);
        
        LOG.info("正在初始化 HBase 异步连接，表名: {}, 线程池大小: {}, 超时时间: {}ms", 
                 tableName, asyncPoolSize, timeoutMs);
        
        // 创建 HBase 配置
        org.apache.hadoop.conf.Configuration hbaseConf = createHBaseConfiguration();
        
        // 创建异步连接
        this.connection = ConnectionFactory.createAsyncConnection(hbaseConf).get();
        this.asyncTable = connection.getTable(TableName.valueOf(tableName));
        
        // 创建线程池
        this.executorService = Executors.newFixedThreadPool(asyncPoolSize, 
            r -> {
                Thread t = new Thread(r, "hbase-async-" + getRuntimeContext().getIndexOfThisSubtask());
                t.setDaemon(true);
                return t;
            });
        
        LOG.info("HBase 异步连接初始化完成");
    }

    @Override
    public void close() throws Exception {
        LOG.info("正在关闭 HBase 异步连接");
        
        if (executorService != null) {
            executorService.shutdown();
            try {
                if (!executorService.awaitTermination(30, TimeUnit.SECONDS)) {
                    executorService.shutdownNow();
                }
            } catch (InterruptedException e) {
                executorService.shutdownNow();
                Thread.currentThread().interrupt();
            }
        }
        
        if (connection != null) {
            connection.close();
        }
        
        super.close();
        LOG.info("HBase 异步连接已关闭");
    }

    @Override
    public void asyncInvoke(String userId, ResultFuture<User> resultFuture) throws Exception {
        // 创建异步查询任务
        CompletableFuture<User> future = CompletableFuture.supplyAsync(() -> {
            try {
                return getUserFromHBase(userId);
            } catch (Exception e) {
                LOG.error("从 HBase 获取用户数据失败，userId: {}", userId, e);
                throw new RuntimeException(e);
            }
        }, executorService);

        // 设置超时和结果处理
        future.orTimeout(timeoutMs, TimeUnit.MILLISECONDS)
              .whenComplete((user, throwable) -> {
                  if (throwable != null) {
                      LOG.error("异步获取用户数据失败，userId: {}", userId, throwable);
                      resultFuture.completeExceptionally(throwable);
                  } else {
                      if (user != null) {
                          resultFuture.complete(Collections.singleton(user));
                      } else {
                          LOG.warn("用户不存在，userId: {}", userId);
                          resultFuture.complete(Collections.emptyList());
                      }
                  }
              });
    }

    /**
     * 从 HBase 获取用户数据
     */
    private User getUserFromHBase(String userId) throws IOException {
        byte[] rowKey = Bytes.toBytes(userId);
        Get get = new Get(rowKey);
        get.addFamily(CF_BYTES);
        
        try {
            Result result = asyncTable.get(get).get(timeoutMs, TimeUnit.MILLISECONDS);
            
            if (result.isEmpty()) {
                return null;
            }
            
            return parseUserFromResult(userId, result);
            
        } catch (Exception e) {
            LOG.error("HBase 查询失败，userId: {}", userId, e);
            throw new IOException("HBase 查询失败", e);
        }
    }

    /**
     * 解析 HBase Result 为 User 对象
     */
    private User parseUserFromResult(String userId, Result result) {
        try {
            String name = getCellValueAsString(result, NAME_QUALIFIER);
            Integer age = getCellValueAsInteger(result, AGE_QUALIFIER);
            String gender = getCellValueAsString(result, GENDER_QUALIFIER);
            
            // 使用当前时间戳
            long currentTimestamp = System.currentTimeMillis();
            
            // 生成分区日期（当前日期）
            String dt = LocalDate.now().format(DateTimeFormatter.ofPattern("yyyy-MM-dd"));
            
            return new User(userId, name, age, gender, currentTimestamp, dt);
            
        } catch (Exception e) {
            LOG.error("解析 HBase 结果失败，userId: {}", userId, e);
            return null;
        }
    }

    /**
     * 获取 Cell 值作为字符串
     */
    private String getCellValueAsString(Result result, byte[] qualifier) {
        Cell cell = result.getColumnLatestCell(CF_BYTES, qualifier);
        if (cell != null) {
            return Bytes.toString(CellUtil.cloneValue(cell));
        }
        return null;
    }

    /**
     * 获取 Cell 值作为整数
     */
    private Integer getCellValueAsInteger(Result result, byte[] qualifier) {
        String strValue = getCellValueAsString(result, qualifier);
        if (strValue != null && !strValue.trim().isEmpty()) {
            try {
                return Integer.parseInt(strValue.trim());
            } catch (NumberFormatException e) {
                LOG.warn("无法解析整数值: {}", strValue);
            }
        }
        return null;
    }

    /**
     * 创建 HBase 配置
     */
    private org.apache.hadoop.conf.Configuration createHBaseConfiguration() {
        org.apache.hadoop.conf.Configuration conf = new org.apache.hadoop.conf.Configuration();
        
        // 基本 HBase 配置
        conf.set("hbase.zookeeper.quorum", getParameter("hbase.zookeeper.quorum", "localhost"));
        conf.set("hbase.zookeeper.property.clientPort", getParameter("hbase.zookeeper.port", "2181"));
        conf.set("zookeeper.znode.parent", getParameter("hbase.zookeeper.znode.parent", "/hbase"));
        
        // Kerberos 认证配置
        if (getParameter("security.kerberos.enabled", "false").equals("true")) {
            conf.set("hadoop.security.authentication", "kerberos");
            conf.set("hbase.security.authentication", "kerberos");
            conf.set("hbase.master.kerberos.principal", getParameter("hbase.master.kerberos.principal", ""));
            conf.set("hbase.regionserver.kerberos.principal", getParameter("hbase.regionserver.kerberos.principal", ""));
            
            // 客户端配置
            String principal = getParameter("security.kerberos.login.principal", "");
            String keytab = getParameter("security.kerberos.login.keytab", "");
            if (!principal.isEmpty() && !keytab.isEmpty()) {
                conf.set("hbase.client.kerberos.principal", principal);
                conf.set("hbase.client.keytab.file", keytab);
            }
        }
        
        // 性能调优配置
        conf.setInt("hbase.client.scanner.caching", 
                    Integer.parseInt(getParameter("hbase.client.scanner.caching", "1000")));
        conf.setLong("hbase.client.scanner.timeout.period", 
                     Long.parseLong(getParameter("hbase.client.scanner.timeout", "60000")));
        conf.setInt("hbase.client.retries.number", 
                    Integer.parseInt(getParameter("hbase.client.retries", "3")));
        
        return conf;
    }

    /**
     * 获取参数值，支持从 Flink 全局配置或系统属性获取
     */
    private String getParameter(String key, String defaultValue) {
        RuntimeContext runtimeContext = getRuntimeContext();
        if (runtimeContext != null) {
            Configuration configuration = (Configuration) runtimeContext.getExecutionConfig().getGlobalJobParameters();
            if (configuration != null && configuration.containsKey(key)) {
                return configuration.getString(key, defaultValue);
            }
        }
        return System.getProperty(key, defaultValue);
    }
} 