package com.dukk.espark.core;

import com.mongodb.client.MongoCollection;
import com.mongodb.spark.MongoConnector;
import com.mongodb.spark.config.ReadConfig;
import com.mongodb.spark.config.WriteConfig;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.api.java.function.Function;
import org.apache.spark.sql.SQLContext;
import org.bson.Document;

import java.io.Serializable;
import java.util.Collection;
import java.util.HashMap;
import java.util.Map;


/**
 *  底层mongodb库操作类
 * @version 2020-09-07
 * @author shengshi_feiyang@yeah.net
 */
public abstract class BaseMongoDao implements Serializable {

    protected SQLContext sqlContext;

    protected JavaSparkContext javaSparkContext;

    protected DBInfo dbInfo;



    public BaseMongoDao(JavaSparkContext javaSparkContext, SQLContext sqlContext, DBInfo dbInfo){
        this.javaSparkContext = javaSparkContext;
        this.sqlContext = sqlContext;
        this.dbInfo = dbInfo;

    }


    /**
     *  获取mongodb库读取链接读取配置信息
     */
    public ReadConfig mongoReadConfig(){
        Map<String, String> readConfig = new HashMap<String, String>();
        readConfig.put("uri", this.dbInfo.getUrl());
        readConfig.put("database", this.dbInfo.getDatabaseName());
        readConfig.put("collection", getCollectionName());
        return ReadConfig.create(this.sqlContext).withOptions(readConfig);
    }

    /**
     *  获取mongodb库读取链接读取配置信息
     */
    public ReadConfig mongoReadConfig(String collectionName){
        Map<String, String> readConfig = new HashMap<String, String>();
        readConfig.put("uri", this.dbInfo.getUrl());
        readConfig.put("database", this.dbInfo.getDatabaseName());
        readConfig.put("collection", collectionName);
        return ReadConfig.create(this.sqlContext).withOptions(readConfig);
    }

    /**
     *  获取mongodb库写入链接读取配置信息
     */
    public WriteConfig mongoWriteConfig(){
        Map<String, String> writeConfig = new HashMap<String, String>();
        writeConfig.put("uri", this.dbInfo.getUrl());
        writeConfig.put("database", this.dbInfo.getDatabaseName());
        writeConfig.put("collection", getCollectionName());
        return WriteConfig.create(this.sqlContext).withOptions(writeConfig);
    }


    public void dropCollection(){
        //提前把Collection先drop掉，也就是Overwrite模式
        MongoConnector.create(javaSparkContext).withCollectionDo(mongoWriteConfig(), Collection.class, new Function<MongoCollection<Collection>, Document>() {
            @Override
            public Document call(MongoCollection<Collection> v1) throws Exception {
                v1.drop();
                return null;
            }
        });
    }


    public abstract String getCollectionName();

}
