package com.avicit.zipkinkafkaesserver.config;

import com.google.common.collect.ImmutableMap;
import org.apache.spark.SparkConf;
import org.apache.spark.api.java.JavaSparkContext;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.Scope;

import java.util.Map;

/**
 * spark 本地库 基本配置
 */
@Configuration
public class SparkConfig {

    Map<String, String> sparkProperties = ImmutableMap.of("spark.ui.enabled", "false","spark.driver.allowMultipleContexts", "true");
    // local[*] master lets us run & test the job locally without setting a Spark cluster
    String sparkMaster = getEnv("SPARK_MASTER", "local[*]");

    private static String getEnv(String key, String defaultValue) {
        String result = System.getenv(key);
        return result != null ? result : defaultValue;
    }


    /**
     * 基本配置
     * @return
     */
    @Bean
    public SparkConf sparkConf() {
        SparkConf sparkConf = new SparkConf(true);
        sparkConf.set("spark.driver.allowMultipleContexts","true");
        sparkConf.setMaster(sparkMaster)
                .setAppName(getClass().getName());
        for (Map.Entry<String, String> entry : sparkProperties.entrySet()) {
            sparkConf.set(entry.getKey(), entry.getValue());
        }
        return sparkConf;
    }

    /**
     * java 操作spark 库
     * @param sparkConf
     * @return
     */
//    @Bean
//    @Scope("prototype")
    public JavaSparkContext sparkContext(SparkConf sparkConf) {
        JavaSparkContext javaSparkContext = new JavaSparkContext(sparkConf);
        return javaSparkContext;
    }
}
