package com.dukk.espark.core;

import com.dukk.espark.utils.ConfigKit;
import com.mongodb.spark.config.WriteConfig;
import org.apache.spark.sql.SQLContext;
import java.io.Serializable;
import java.util.HashMap;
import java.util.Map;

/**
 *  DB会话工厂，获取数据库链接信息
 *
 * @author shengshi_feiyang@yeah.net
 * @version 2020-08-22
 */
public class DbFactory implements Serializable {

    private SQLContext sqlContext;
    private ConfigKit configKit;


    public DbFactory(SQLContext sqlContext, ConfigKit configKit){
        this.configKit = configKit;
        this.sqlContext = sqlContext;
    }


    /**
     *  获取输入mongodb链接配置信息
     */
    public WriteConfig mongoWriteConfig(String collectionName){
        Map<String, String> writerConfigMap = new HashMap<String, String>();
        writerConfigMap.put("uri", configKit.getEsparkMongodbUrl());
        writerConfigMap.put("database", configKit.getEsparkMongodbDatabase());
        writerConfigMap.put("collection", collectionName);
        return WriteConfig.create(this.sqlContext).withOptions(writerConfigMap);
    }


    /**
     * 获取mongodb库练级信息
     */
    public DBInfo getMongoDBInfo(){
        DBInfo dbInfo = new DBInfo();
        dbInfo.setUrl(configKit.getEsparkMongodbUrl());
        dbInfo.setDatabaseName(configKit.getEsparkMongodbDatabase());
        return dbInfo;
    }




}
