/*
 *    Copyright (c) 2018-2025, cloud All rights reserved.
 *
 * Redistribution and use in source and binary forms, with or without
 * modification, are permitted provided that the following conditions are met:
 *
 * Redistributions of source code must retain the above copyright notice,
 * this list of conditions and the following disclaimer.
 * Redistributions in binary form must reproduce the above copyright
 * notice, this list of conditions and the following disclaimer in the
 * documentation and/or other materials provided with the distribution.
 * Neither the name of the pig4cloud.com developer nor the names of its
 * contributors may be used to endorse or promote products derived from
 * this software without specific prior written permission.
 * Author: cloud
 */

package net.cyweb.cloud.common.datasource.config;

import com.baomidou.dynamic.datasource.creator.DataSourceProperty;
import com.baomidou.dynamic.datasource.creator.DefaultDataSourceCreator;
import com.baomidou.dynamic.datasource.creator.druid.DruidConfig;
import com.baomidou.dynamic.datasource.provider.AbstractDataSourceProvider;

import javax.sql.DataSource;
import java.util.HashMap;
import java.util.Map;

import static net.cyweb.cloud.common.datasource.support.DataSourceConstants.DS_MASTER;

/**
 * @author cloud
 * @date 2025/1/14
 * <p>
 * 保证原有的 druid 配置有效性，只需要增加其他扩展数据源即可
 */
public class MasterDataSourceProvider extends AbstractDataSourceProvider {

    private static final String SQL_LOG_FILTER = "sqlLogFilter";

    private final DruidDataSourceProperties properties;

    private final DefaultDataSourceCreator defaultDataSourceCreator;

    public MasterDataSourceProvider(DefaultDataSourceCreator defaultDataSourceCreator, DruidDataSourceProperties properties) {
        super(defaultDataSourceCreator);
        this.properties = properties;
        this.defaultDataSourceCreator = defaultDataSourceCreator;
    }


    /**
     * 加载所有数据源
     *
     * @return 所有数据源，key为数据源名称
     */
    @Override
    public Map<String, DataSource> loadDataSources() {
        Map<String, DataSource> map = new HashMap<>();
        // 添加默认主数据源
        DataSourceProperty property = new DataSourceProperty();
        property.setUsername(properties.getUsername());
        property.setPassword(properties.getPassword());
        property.setUrl(properties.getUrl());

        DruidConfig druidConfig = new DruidConfig();
        druidConfig.setProxyFilters(SQL_LOG_FILTER);
        property.setDruid(druidConfig);
        map.put(DS_MASTER, defaultDataSourceCreator.createDataSource(property));
        return map;
    }
}
