package com.navinfo.platform.common.service.impl;

import com.mongodb.spark.config.ReadConfig;
import com.navinfo.platform.common.service.ILoadDataChannel;
import org.apache.commons.lang.StringUtils;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.sql.SparkSession;
import org.bson.Document;

import java.util.*;

import static com.mongodb.spark.MongoSpark.load;

public enum LoadDataFromMongo implements ILoadDataChannel {
    //
    INSTANCE
    ;

    @SuppressWarnings("unchecked")
    @Override
    public <T> T readData(SparkSession spark, Object ... param) {
        Map<String, String> config = (Map<String, String>) param[0];
        String tableName = config.get("mongoCollectionName");
        String suffix = config.get("collectionSuffix");
        String condition = config.get("condition");
        String projection = config.get("projection");
        Map<String, String> readOverrides = new HashMap<String, String>();
        readOverrides.put("collection", tableName + suffix);
        readOverrides.put("readPreference.name", "secondary");
        JavaSparkContext jsc = new JavaSparkContext(spark.sparkContext());
        ReadConfig readConfig = ReadConfig.create(jsc).withOptions(readOverrides);
        List<Document> pipelines = new ArrayList<>();
        if(StringUtils.isNotEmpty(condition)){
            Document filter = Document.parse(condition);
            pipelines.add(filter);
        }
        if(StringUtils.isNotEmpty(projection)){
            Document project = Document.parse(projection);
            pipelines.add(project);
        }
        return (T) load(jsc, readConfig).withPipeline(pipelines);
    }
}
