package org.apache.flink.connector.scylladb.source;

import com.datastax.driver.core.*;
import org.apache.flink.connector.scylladb.serialization.ScyllaDBRowDataDeserializer;
import org.apache.flink.table.data.*;
import org.apache.flink.table.functions.FunctionContext;
import org.apache.flink.table.functions.TableFunction;
import org.apache.flink.table.types.logical.ArrayType;
import org.apache.flink.table.types.logical.LogicalType;
import org.apache.flink.table.types.logical.RowType;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import java.math.BigDecimal;
import java.time.Duration;
import java.time.LocalDateTime;
import java.util.List;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;

/**
 * ScyllaDB 查找函数，用于维表关联查询
 */
public class ScyllaDBLookupFunction extends TableFunction<RowData> {

    private static final Logger LOG = LoggerFactory.getLogger(ScyllaDBLookupFunction.class);

    private final String url;
    private final String keySpace;
    private final String tableName;
    private final String username;
    private final String password;
    private final long cacheMaxRows;
    private final Duration cacheTtl;
    private final int maxRetries;
    private final int connectionPoolLocalSize;
    private final int connectionPoolRemoteSize;
    private final Duration connectionTimeout;
    private final Duration requestTimeout;
    private final String consistencyLevel;
    private final boolean sslEnabled;
    private final String sslKeystorePath;
    private final String sslKeystorePassword;
    private final String sslTruststorePath;
    private final String sslTruststorePassword;
    private final RowType rowType;
    private RowType sourceRowType;  // 原始表的 RowType，在 open() 中初始化
    private final int[] keyIndices;
    private final boolean aggregateMode;
    private final String aggregateFieldName;

    private final ConcurrentHashMap<String, CacheEntry> cache = new ConcurrentHashMap<>(1024);

    private transient Cluster cluster;
    private transient Session session;
    private transient ScyllaDBRowDataDeserializer deserializer;
    private transient PreparedStatement lookupStatement;

    public ScyllaDBLookupFunction(
            String url,
            String keySpace,
            String tableName,
            String username,
            String password,
            long cacheMaxRows,
            Duration cacheTtl,
            int maxRetries,
            int connectionPoolLocalSize,
            int connectionPoolRemoteSize,
            Duration connectionTimeout,
            Duration requestTimeout,
            String consistencyLevel,
            boolean sslEnabled,
            String sslKeystorePath,
            String sslKeystorePassword,
            String sslTruststorePath,
            String sslTruststorePassword,
            RowType rowType,
            int[] keyIndices,
            boolean aggregateMode,
            String aggregateFieldName) {
        
        this.url = url;
        this.keySpace = keySpace;
        this.tableName = tableName;
        this.username = username;
        this.password = password;
        this.cacheMaxRows = cacheMaxRows;
        this.cacheTtl = cacheTtl;
        this.maxRetries = maxRetries;
        this.connectionPoolLocalSize = connectionPoolLocalSize;
        this.connectionPoolRemoteSize = connectionPoolRemoteSize;
        this.connectionTimeout = connectionTimeout;
        this.requestTimeout = requestTimeout;
        this.consistencyLevel = consistencyLevel;
        this.sslEnabled = sslEnabled;
        this.sslKeystorePath = sslKeystorePath;
        this.sslKeystorePassword = sslKeystorePassword;
        this.sslTruststorePath = sslTruststorePath;
        this.sslTruststorePassword = sslTruststorePassword;
        this.rowType = rowType;
        this.sourceRowType = null; // 将在 open() 方法中初始化
        this.keyIndices = keyIndices;
        this.aggregateMode = aggregateMode;
        this.aggregateFieldName = aggregateFieldName;
    }

    @Override
    public void open(FunctionContext context) throws Exception {
        LOG.info("Opening ScyllaDB lookup function for {}.{}", keySpace, tableName);
        
        // 创建连接
        String[] hostAndPort = url.split(":");
        String host = hostAndPort[0];
        int port = hostAndPort.length > 1 ? Integer.parseInt(hostAndPort[1]) : 9042;
        
        this.cluster = Cluster.builder()
                .addContactPoint(host)
                .withPort(port)
                .withCredentials(username, password)
                .build();
        
        this.session = cluster.connect(keySpace);
        
        // 在聚合模式下，需要使用原始表的 schema 进行反序列化
        if (aggregateMode) {
            // 创建原始表的 RowType（不包含聚合字段）
            this.sourceRowType = createOriginalRowType();
            this.deserializer = new ScyllaDBRowDataDeserializer(sourceRowType, null);
        } else {
            // 普通模式使用传入的 rowType
            this.sourceRowType = rowType;
            this.deserializer = new ScyllaDBRowDataDeserializer(rowType, null);
        }
        
        // 准备查找语句
        prepareLookupStatement();
        
        LOG.info("ScyllaDB lookup function opened successfully");
    }

    public void eval(Object... keys) {
        String cacheKey = buildCacheKey(keys);
        
        // 检查缓存
        CacheEntry entry = cache.get(cacheKey);
        if (entry != null && !entry.isExpired()) {
            if (aggregateMode) {
                // 聚合模式：缓存中存储的就是聚合后的结果
                collect(entry.rows.get(0));
            } else {
                // 普通模式：逐条返回
                for (RowData row : entry.rows) {
                    collect(row);
                }
            }
            return;
        }
        
        // 执行查询
        try {
            // 转换键值为Java原生类型
            Object[] convertedKeys = convertKeysToJavaTypes(keys);
            
            BoundStatement boundStatement = lookupStatement.bind(convertedKeys);
            // boundStatement.setConsistencyLevel(ConsistencyLevel.valueOf(consistencyLevel));
            
            ResultSet resultSet = session.execute(boundStatement);
            List<Row> rows = resultSet.all();
            
            if (aggregateMode) {
                // 聚合模式：将多条记录聚合为一条
                List<RowData> lookupResults = new java.util.ArrayList<>();
                for (Row row : rows) {
                    RowData rowData = deserializer.deserialize(row);
                    lookupResults.add(rowData);
                }
                
                // 创建聚合结果（即使没有查询结果也要创建）
                RowData aggregatedRow = createAggregatedRow(lookupResults, keys);
                collect(aggregatedRow);
                
                // 更新缓存（缓存聚合后的结果）
                if (cacheMaxRows > 0) {
                    List<RowData> cacheData = new java.util.ArrayList<>();
                    cacheData.add(aggregatedRow);
                    updateCache(cacheKey, cacheData);
                }
            } else {
                // 普通模式：逐条返回
                List<RowData> rowDataList = new java.util.ArrayList<>();
                for (Row row : rows) {
                    RowData rowData = deserializer.deserialize(row);
                    rowDataList.add(rowData);
                    collect(rowData);
                }
                
                // 更新缓存
                if (cacheMaxRows > 0) {
                    updateCache(cacheKey, rowDataList);
                }
            }
            
        } catch (Exception e) {
            LOG.error("Failed to lookup data from ScyllaDB", e);
            throw new RuntimeException("Failed to lookup data from ScyllaDB", e);
        }
    }

    @Override
    public void close() throws Exception {
        LOG.info("Closing ScyllaDB lookup function");
        
        if (session != null) {
            session.close();
        }
        
        if (cluster != null) {
            cluster.close();
        }
        
        if (cache != null) {
            cache.clear();
        }
        
        LOG.info("ScyllaDB lookup function closed");
    }

    private void prepareLookupStatement() {
        StringBuilder sql = new StringBuilder();
        sql.append("SELECT * FROM ").append(keySpace).append(".").append(tableName);
        sql.append(" WHERE ");
        
        // 构建 WHERE 子句，基于键索引
        // keyIndices 中存储的是聚合表（rowType）中字段的索引
        // 需要通过聚合表的字段名来获取对应的数据库表字段名
        for (int i = 0; i < keyIndices.length; i++) {
            if (i > 0) {
                sql.append(" AND ");
            }
            
            // 从聚合表（rowType）中获取字段名
            String aggregateFieldName = rowType.getFieldNames().get(keyIndices[i]);
            
            // 检查这个字段是否是聚合字段
            if (aggregateFieldName.equals(this.aggregateFieldName)) {
                throw new IllegalStateException("聚合字段不能作为查询键: " + aggregateFieldName);
            }
            
            // 使用字段名构建查询条件
            // 在聚合模式下，主键字段在聚合表和原始表中应该是一致的
            sql.append(aggregateFieldName).append(" = ?");
        }
        
        this.lookupStatement = session.prepare(sql.toString());
        LOG.info("Prepared lookup statement: {}", sql.toString());
    }

    private String buildCacheKey(Object... keys) {
        StringBuilder sb = new StringBuilder();
        for (int i = 0; i < keys.length; i++) {
            if (i > 0) {
                sb.append("|");
            }
            sb.append(keys[i] != null ? keys[i].toString() : "null");
        }
        return sb.toString();
    }

    private void updateCache(String cacheKey, List<RowData> rows) {
        // 简单的 LRU 缓存实现
        if (cache.size() >= cacheMaxRows) {
            // 清理过期条目
            cache.entrySet().removeIf(entry -> entry.getValue().isExpired());
            
            // 如果仍然超过限制，清理最旧的条目
            if (cache.size() >= cacheMaxRows) {
                cache.clear();
            }
        }
        
        cache.put(cacheKey, new CacheEntry(rows, System.currentTimeMillis() + cacheTtl.toMillis()));
    }

    private Object[] convertKeysToJavaTypes(Object[] keys) {
        Object[] convertedKeys = new Object[keys.length];
        for (int i = 0; i < keys.length; i++) {
            convertedKeys[i] = convertToJavaType(keys[i]);
        }
        return convertedKeys;
    }

    private Object convertToJavaType(Object value) {
        if (value == null) {
            return null;
        }
        
        // 处理Flink的内部数据类型
        if (value instanceof StringData) {
            return ((StringData) value).toString();
        } else if (value instanceof TimestampData) {
            return ((TimestampData) value).toLocalDateTime();
        } else if (value instanceof DecimalData) {
            return ((DecimalData) value).toBigDecimal();
        } else if (value instanceof ArrayData) {
            // 处理 Set 类型（将 ArrayData 转换为 Set<String>）
            ArrayData arrayData = (ArrayData) value;
            Set<String> setData = new java.util.HashSet<>();
            for (int i = 0; i < arrayData.size(); i++) {
                StringData stringElement = arrayData.getString(i);
                if (stringElement != null) {
                    setData.add(stringElement.toString());
                }
            }
            return setData;
        } else if (value instanceof byte[]) {
            return java.nio.ByteBuffer.wrap((byte[]) value);
        } else {
            // 对于其他类型（如基本类型），直接返回
            return value;
        }
    }

    /**
     * 将 Java 原生类型转换为 Flink 内部数据类型
     */
    private Object convertToFlinkType(Object value, LogicalType targetType) {
        if (value == null) {
            return null;
        }
        
        // 如果已经是 Flink 类型，直接返回
        if (value instanceof StringData || value instanceof TimestampData || 
            value instanceof DecimalData || value instanceof ArrayData) {
            return value;
        }
        
        // 根据目标类型进行转换
        switch (targetType.getTypeRoot()) {
            case CHAR:
            case VARCHAR:
                return StringData.fromString(value.toString());
            case BOOLEAN:
                return value instanceof Boolean ? (Boolean) value : Boolean.valueOf(value.toString());
            case TINYINT:
                return value instanceof Byte ? (Byte) value : Byte.valueOf(value.toString());
            case SMALLINT:
                return value instanceof Short ? (Short) value : Short.valueOf(value.toString());
            case INTEGER:
                return value instanceof Integer ? (Integer) value : Integer.valueOf(value.toString());
            case BIGINT:
                return value instanceof Long ? (Long) value : Long.valueOf(value.toString());
            case FLOAT:
                return value instanceof Float ? (Float) value : Float.valueOf(value.toString());
            case DOUBLE:
                return value instanceof Double ? (Double) value : Double.valueOf(value.toString());
            case DECIMAL:
                if (value instanceof BigDecimal) {
                    return DecimalData.fromBigDecimal((BigDecimal) value, 10, 2);
                } else {
                    return DecimalData.fromBigDecimal(new BigDecimal(value.toString()), 10, 2);
                }
            case TIMESTAMP_WITHOUT_TIME_ZONE:
                if (value instanceof LocalDateTime) {
                    return TimestampData.fromLocalDateTime((LocalDateTime) value);
                } else {
                    return TimestampData.fromLocalDateTime(LocalDateTime.parse(value.toString()));
                }
            case BINARY:
            case VARBINARY:
                if (value instanceof byte[]) {
                    return (byte[]) value;
                } else {
                    return value.toString().getBytes();
                }
            default:
                // 对于其他类型，转换为字符串
                return StringData.fromString(value.toString());
        }
    }

    private RowData createAggregatedRow(List<RowData> lookupResults, Object... keys) {
        if (!aggregateMode) {
            throw new IllegalStateException("非聚合模式下不能调用 createAggregatedRow 方法");
        }
        
        // 将查找结果转换为 ArrayData
        GenericArrayData arrayData = new GenericArrayData(lookupResults.toArray(new RowData[0]));
        
        // 构建聚合结果，需要包含所有目标字段
        GenericRowData aggregatedRow = new GenericRowData(rowType.getFieldCount());
        
        // 设置所有字段
        for (int i = 0; i < rowType.getFieldCount(); i++) {
            String fieldName = rowType.getFieldNames().get(i);
            
            if (fieldName.equals(aggregateFieldName)) {
                // 设置聚合字段
                aggregatedRow.setField(i, arrayData);
            } else {
                // 设置非聚合字段（例如主键字段）
                Object fieldValue = null;
                
                // 优先从查找结果中获取字段值
                if (!lookupResults.isEmpty()) {
                    // 查找在 sourceRowType 中对应的字段索引
                    int sourceFieldIndex = findFieldIndexInSourceRowType(fieldName);
                    if (sourceFieldIndex >= 0 && sourceFieldIndex < lookupResults.get(0).getArity()) {
                        RowData firstRow = lookupResults.get(0);
                        fieldValue = firstRow.isNullAt(sourceFieldIndex) ? 
                            null : getFieldValue(firstRow, sourceFieldIndex, sourceRowType.getTypeAt(sourceFieldIndex));
                    }
                }
                
                // 如果从查找结果中没有获取到值，尝试从查询键中获取
                if (fieldValue == null && keys != null) {
                    // 查找字段在聚合表中的索引位置，检查是否为查询键
                    for (int keyIdx = 0; keyIdx < keyIndices.length; keyIdx++) {
                        if (keyIndices[keyIdx] == i) {
                            // 这个字段是查询键，使用传入的键值
                            Object keyValue = keys.length > keyIdx ? keys[keyIdx] : null;
                            // 转换为 Flink 内部数据类型
                            fieldValue = convertToFlinkType(keyValue, rowType.getTypeAt(i));
                            break;
                        }
                    }
                }
                
                // 如果仍然没有值，但这是一个键字段，直接从keys中按索引获取
                if (fieldValue == null && keys != null) {
                    // 检查字段名是否在原始表的键字段中
                    int sourceKeyIndex = findFieldIndexInSourceRowType(fieldName);
                    if (sourceKeyIndex >= 0) {
                        // 检查这个字段是否对应某个查询键
                        for (int keyIdx = 0; keyIdx < keyIndices.length; keyIdx++) {
                            // keyIndices 存储的是聚合表中的字段索引，我们需要通过字段名匹配
                            String keyFieldName = rowType.getFieldNames().get(keyIndices[keyIdx]);
                            if (keyFieldName.equals(fieldName) && keyIdx < keys.length) {
                                Object keyValue = keys[keyIdx];
                                // 转换为 Flink 内部数据类型
                                fieldValue = convertToFlinkType(keyValue, rowType.getTypeAt(i));
                                break;
                            }
                        }
                    }
                }
                
                aggregatedRow.setField(i, fieldValue);
            }
        }
        
        return aggregatedRow;
    }
    
    /**
     * 在原始表的 RowType 中查找指定字段的索引
     */
    private int findFieldIndexInSourceRowType(String fieldName) {
        if (sourceRowType == null) {
            return -1;
        }
        
        List<String> fieldNames = sourceRowType.getFieldNames();
        for (int i = 0; i < fieldNames.size(); i++) {
            if (fieldNames.get(i).equals(fieldName)) {
                return i;
            }
        }
        
        return -1;
    }
    
    /**
     * 根据字段类型安全地获取字段值
     */
    private Object getFieldValue(RowData row, int fieldIndex, LogicalType fieldType) {
        if (row.isNullAt(fieldIndex)) {
            return null;
        }
        
        switch (fieldType.getTypeRoot()) {
            case CHAR:
            case VARCHAR:
                return row.getString(fieldIndex);
            case BOOLEAN:
                return row.getBoolean(fieldIndex);
            case TINYINT:
                return row.getByte(fieldIndex);
            case SMALLINT:
                return row.getShort(fieldIndex);
            case INTEGER:
                return row.getInt(fieldIndex);
            case BIGINT:
                return row.getLong(fieldIndex);
            case FLOAT:
                return row.getFloat(fieldIndex);
            case DOUBLE:
                return row.getDouble(fieldIndex);
            case DECIMAL:
                return row.getDecimal(fieldIndex, 10, 2); // 使用默认精度
            case TIMESTAMP_WITHOUT_TIME_ZONE:
                return row.getTimestamp(fieldIndex, 3); // 使用默认精度
            case BINARY:
            case VARBINARY:
                return row.getBinary(fieldIndex);
            case ARRAY:
                return row.getArray(fieldIndex);
            default:
                // 对于其他类型，返回字符串表示
                return row.getString(fieldIndex);
        }
    }

    /**
     * 在聚合模式下，创建原始表的 RowType（不包含聚合字段）
     * 根据 SQL定义推导出原始表的结构
     */
    private RowType createOriginalRowType() {
        if (!aggregateMode) {
            return rowType;
        }
        
        // 在聚合模式下，需要从聚合字段的类型中提取原始字段
        // 假设聚合字段是 ARRAY<ROW<...>>类型
        for (int i = 0; i < rowType.getFieldCount(); i++) {
            String fieldName = rowType.getFieldNames().get(i);
            if (fieldName.equals(aggregateFieldName)) {
                LogicalType fieldType = rowType.getTypeAt(i);
                if (fieldType instanceof ArrayType) {
                    ArrayType arrayType = (ArrayType) fieldType;
                    LogicalType elementType = arrayType.getElementType();
                    if (elementType instanceof RowType) {
                        // 返回聚合字段中的 ROW 类型，这就是原始表的结构
                        return (RowType) elementType;
                    }
                }
                break;
            }
        }
        
        // 如果找不到聚合字段，返回原始 rowType
        return rowType;
    }

    private static class CacheEntry {
        final List<RowData> rows;
        final long expireTime;

        CacheEntry(List<RowData> rows, long expireTime) {
            this.rows = rows;
            this.expireTime = expireTime;
        }

        boolean isExpired() {
            return System.currentTimeMillis() > expireTime;
        }
    }
} 