package tech.realcpf.duckengine1.biz;

import org.apache.arrow.adbc.core.AdbcDriver;
import org.apache.calcite.sql.*;
import org.apache.calcite.sql.parser.SqlParseException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import tech.realcpf.duckengine1.duckdb.DuckDBInstance;
import tech.realcpf.duckengine1.duckdb.ResultProcessor;
import tech.realcpf.duckengine1.integration.adbc.CommonAdbcReader;
import tech.realcpf.duckengine1.sqlparser.SqlReLocateFactory;

import java.sql.SQLException;
import java.util.*;
import java.util.concurrent.ConcurrentHashMap;
import java.util.stream.Collectors;

public class BizUserInstance {

    private static final Logger LOGGER = LoggerFactory.getLogger(BizUserInstance.class);

    private String s3Uri;

    private static final String GET_S3_URI_SQL = "select concat(getVariable('biz_s3_bucket'),'/',getVariable('biz_s3_base')) as s3_uri;\n";

    private static final String STRONG_BASE_FORMAT = "s3://%s/tenant_%d/user_%d";

    private static final String CATALOG_BASE_SQL = """
            select * from (select id, "name", "type","file_ext", ts, dd_version,ROW_NUMBER() OVER (ORDER BY dd_version DESC) as r_num
            from '%s/catalog.csv')
            where r_num = 1;
            """;

    private final DuckDBInstance duckDBInstance;

    private String S3_BIZ_USER_URI;

    private final Long userId;
    private final Long tenantId;

    private String initSql;

    private volatile boolean init = false;

    private final static Map<String, BizCatalog> CATALOG_MAP = new ConcurrentHashMap<>();

    public BizCatalog getCatalogByName(String name) {
        return CATALOG_MAP.get(name);
    }

    public BizUserInstance(Long userId, Long tenantId) {
        this.duckDBInstance = new DuckDBInstance(String.valueOf(userId), String.valueOf(tenantId), new Properties());
        this.userId = userId;
        this.tenantId = tenantId;
    }

    public String getS3_BIZ_USER_URI() {
        check();
        return S3_BIZ_USER_URI;
    }

    public boolean init(String config) {
        try {
            this.duckDBInstance.connection(false);
            this.duckDBInstance.exec(config);
            this.s3Uri = (String) this.duckDBInstance.execWithProcessor(GET_S3_URI_SQL, ResultProcessor.SINGLE_OBJECT_PROCESSOR);
            this.S3_BIZ_USER_URI = STRONG_BASE_FORMAT.formatted(s3Uri, userId, tenantId);
            this.init = true;
            Map<String, BizCatalog> catalogMap = catalogs().stream().collect(Collectors.toMap(BizCatalog::getName, b -> b));
            CATALOG_MAP.putAll(catalogMap);
            this.initSql = config;
            return true;
        } catch (SQLException e) {
            LOGGER.error("init error", e);
        }
        return false;
    }

    private void check() {
        if (!init) {
            throw new BizInstanceException();
        }
    }

    public List<BizCatalog> catalogs() {
        check();
        Object commonResult = this.duckDBInstance.execWithProcessor(CATALOG_BASE_SQL.formatted(S3_BIZ_USER_URI), ResultProcessor.CATALOG_PROCESSOR);
        if (commonResult instanceof List<?> bizCatalogs) {
            return (List<BizCatalog>) bizCatalogs;
        }

        return List.of();
    }

    /**
     * biz SQL
     * 1. translate catalog.table => "s3://xxxx"
     *
     * @param sql
     * @return
     */
    public List<Map<String, Object>> queryWithBizSql(String sql) {
        try {
            String newSql = SqlReLocateFactory.reTableName(sql, CATALOG_MAP, S3_BIZ_USER_URI);
            LOGGER.info("new sql is {}", newSql);
            List<Map<String, Object>> data = (List<Map<String, Object>>) this.duckDBInstance.execWithProcessor(newSql, ResultProcessor.MAP_PROCESSOR);
            return data;
        } catch (SqlParseException e) {
            throw new RuntimeException(e);
        }
    }

    /**
     * load data from other dataSource by adbc
     * <br>
     * source table => tableName
     * <br>
     * source custom sql = > sql
     * <br>
     * target catalog => catalog
     * <br>
     * source jdbc uri => uri
     * <br>
     * source username => username
     * <br>
     * source password  = > password
     * @param datasourceInfo
     */
    public void loadTableWithDataSource(Map<String, Object> datasourceInfo) {
        String tableName = (String) datasourceInfo.get("tableName");
        String customSql = (String) datasourceInfo.get("sql");
        String sourceSql;
        if (Objects.nonNull(customSql) && !customSql.isBlank()) {
            sourceSql = customSql;
        } else {
            sourceSql = "select * from " + tableName;
        }
        String targetTableName = tableName;
        if (datasourceInfo.containsKey("target")) {
            targetTableName = (String) datasourceInfo.get("target");
        }
        BizCatalog catalog = (BizCatalog) datasourceInfo.get("catalog");
        Map<String, Object> params = new HashMap<>();
        AdbcDriver.PARAM_URI.set(params, String.valueOf(datasourceInfo.get(AdbcDriver.PARAM_URI.getKey())));
        AdbcDriver.PARAM_USERNAME.set(params, String.valueOf(datasourceInfo.get(AdbcDriver.PARAM_USERNAME.getKey())));
        AdbcDriver.PARAM_PASSWORD.set(params, String.valueOf(datasourceInfo.get(AdbcDriver.PARAM_PASSWORD.getKey())));
        CommonAdbcReader adbcReader = new CommonAdbcReader(params);
        String tmpName = adbcReader.tmpTableName();
        String targetSql = "copy " + tmpName + " to '" + S3_BIZ_USER_URI + "/" + catalog.getName() + "/" + targetTableName + "/" + targetTableName + "." + catalog.getFileExt() + "'";
        adbcReader.loadBySql(sourceSql, targetSql, initSql);
    }

}
