package com.navinfo.tripanalysis.offline.util;

import org.apache.commons.lang3.StringUtils;
import org.apache.spark.sql.SparkSession;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import java.util.HashSet;
import java.util.Properties;
import java.util.Set;

/**
 * @author 沈东生
 */
public class SparkAppUtils {
    private static final Logger logger = LoggerFactory.getLogger(SparkAppUtils.class);
    /**
     * 不可配置的spark参数
     */
    private static Set<String> excludeKeys = new HashSet<>();

    public static SparkSession.Builder getSparkBuilder(Properties props) {
        SparkSession.Builder builder = SparkSession.builder();
        props.forEach((key0, value0) -> {
            String key = key0.toString();
            String value = value0.toString();
            if (StringUtils.startsWith(key, "spark") && StringUtils.isNotEmpty(value) && !excludeKeys.contains(key)) {
                builder.config(key, value);
                logger.info("-->set {}:{} to SparkSession from properties.", key, value);
            }
        });

        StringBuilder sbMongoUri = new StringBuilder();
        sbMongoUri.append("mongodb://");
        String mongoUser=props.getProperty("mongo.username");
        String mongoPass=props.getProperty("mongo.password");
        if(StringUtils.isNotEmpty(mongoUser) && StringUtils.isNotEmpty(mongoPass)){
            sbMongoUri.append(mongoUser).append(":").append(mongoPass).append("@");
        }
        sbMongoUri.append(props.getProperty("mongo.url"));
        sbMongoUri.append("/").append(props.getProperty("mongo.database"));
        String mongoCollectionName = props.getProperty("mongo.collection");
        if(StringUtils.isEmpty(mongoCollectionName)){
            mongoCollectionName="demo";
        }
        sbMongoUri.append(".").append(mongoCollectionName);
        logger.info("spark.mongodb.uri:{}",sbMongoUri);

        //设置SparkMongo
        builder.config("spark.mongodb.input.uri", sbMongoUri.toString());
        builder.config("spark.mongodb.output.uri", sbMongoUri.toString());

        //保存文件到hdfs时，不生成_SUCCESS文件夹
        builder.config("mapreduce.fileoutputcommitter.marksuccessfuljobs", "false");

        //打开hive支持
        builder.enableHiveSupport();

        return builder;
    }
}
