package com.als;

import com.als.config.DatabaseConfig;
import lombok.extern.slf4j.Slf4j;
import org.yaml.snakeyaml.Yaml;

import java.io.InputStream;
import java.util.Map;

@Slf4j
public class YamlConfigReader {

    /**
     * 读取 YAML 配置文件
     */
    public static DatabaseConfig loadDatabaseConfig(String configFile) {
        try {
            Yaml yaml = new Yaml();
            InputStream inputStream = YamlConfigReader.class
                    .getClassLoader()
                    .getResourceAsStream(configFile);

            if (inputStream == null) {
                throw new RuntimeException("配置文件未找到: " + configFile);
            }

            Map<String, Object> config = yaml.load(inputStream);

            // 获取数据库配置
            Map<String, Object> databaseConfig = (Map<String, Object>) config.get("database");
            Map<String, Object> mysqlConfig = (Map<String, Object>) databaseConfig.get("mysql");

            DatabaseConfig dbConfig = new DatabaseConfig();
            dbConfig.setUrl((String) mysqlConfig.get("url"));
            dbConfig.setUsername((String) mysqlConfig.get("username"));
            dbConfig.setPassword((String) mysqlConfig.get("password"));
            dbConfig.setDriver((String) mysqlConfig.get("driver"));
            dbConfig.setQuery((String) mysqlConfig.get("query"));

            // 读取连接池配置
            if (mysqlConfig.containsKey("connection-pool")) {
                Map<String, Object> poolConfig = (Map<String, Object>) mysqlConfig.get("connection-pool");
                DatabaseConfig.ConnectionPool pool = new DatabaseConfig.ConnectionPool();
                pool.setInitialSize((Integer) poolConfig.get("initial-size"));
                pool.setMaxActive((Integer) poolConfig.get("max-active"));
                pool.setMinIdle((Integer) poolConfig.get("min-idle"));
                dbConfig.setConnectionPool(pool);
            }

            // 读取 Spark JDBC 配置
            if (mysqlConfig.containsKey("spark-jdbc")) {
                Map<String, Object> sparkConfig = (Map<String, Object>) mysqlConfig.get("spark-jdbc");
                DatabaseConfig.SparkJdbcConfig sparkJdbc = new DatabaseConfig.SparkJdbcConfig();
                sparkJdbc.setFetchSize((Integer) sparkConfig.get("fetch_size"));
                sparkJdbc.setPartitionColumn((String) sparkConfig.get("partition-column"));
                sparkJdbc.setLowerBound((String) sparkConfig.get("lower-bound"));
                sparkJdbc.setUpperBound((String) sparkConfig.get("upper-bound"));
                sparkJdbc.setNumPartitions((Integer) sparkConfig.get("num-partitions"));
                dbConfig.setSparkJdbc(sparkJdbc);
            }

            return dbConfig;

        } catch (Exception e) {
            throw new RuntimeException("读取配置文件失败: " + e.getMessage(), e);
        }
    }

    /**
     * 使用 Hutool 的简化版本（如果喜欢）
     */
    public static DatabaseConfig loadConfigWithHutool(String configFile) {
        try {
            cn.hutool.setting.dialect.Props props = new cn.hutool.setting.dialect.Props(configFile);
            DatabaseConfig config = new DatabaseConfig();
            config.setUrl(props.getStr("database.mysql.url"));
            config.setUsername(props.getStr("database.mysql.username"));
            config.setPassword(props.getStr("database.mysql.password"));
            config.setDriver(props.getStr("database.mysql.driver"));
            config.setQuery(props.getStr("database.mysql.query"));
            return config;
        } catch (Exception e) {
            throw new RuntimeException("读取配置文件失败: " + e.getMessage(), e);
        }
    }
}
