package com.xnx3.obs.util;

import org.apache.hudi.QuickstartUtils;
import org.apache.spark.sql.SparkSession;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import java.util.Map;

/**
 * Spark client util
 * @author aly
 */
public class SparkUtil {

    private static final Logger LOGGER = LoggerFactory.getLogger(SparkUtil.class);

    // Spark client
    private static SparkSession sparkSession = null;

    /**
     * Obtain the client entity, and if it is not created, it will be automatically created
     * @return Spark client
     */
    public static SparkSession getSparkSession() {
        // Create spark client
        if (sparkSession == null) {
            sparkSession = SparkSession.builder().appName("Spark Session")
                    .master("local[*]")
                    .config("spark.serializer", "org.apache.spark.serializer.KryoSerializer")
                    .getOrCreate();
        }
        return sparkSession;
    }

    /**
     * Stop Client Running
     */
    public static void stop() {
        if (sparkSession != null) {
            sparkSession.stop();
        }
    }

    /**
     * Get Hudi spark default write configs
     * @return default write configs map
     */
    public static Map<String, String> getDefaultWriteConfigs() {
//        Map<String, String> options = new HashMap();
//        options.put("hoodie.insert.shuffle.parallelism", "2");
//        options.put("hoodie.upsert.shuffle.parallelism", "2");
//        options.put("hoodie.bulkinsert.shuffle.parallelism", "2");
//        options.put("hoodie.delete.shuffle.parallelism", "2");
        Map<String, String> options = QuickstartUtils.getQuickstartWriteConfigs();
        return options;
    }

}
