package com.gxljc.bear.crawler.util;

import com.gxljc.commons.util.config.CommConsts;
import com.gxljc.bear.crawler.base.DolphinCrawlerConf;
import org.apache.hadoop.conf.Configuration;
import org.apache.log4j.Logger;
import org.apache.spark.SparkConf;
import org.apache.spark.api.java.JavaSparkContext;

/**
 * spark工具，包含一些创建JavaSparkContext的静态接口
 *
 * @author tanghaitao
 * @since 2022-9-2 下午12:08:34
 */
public class SparkUtil {
	private static Logger LOG = Logger.getLogger(SparkUtil.class);

	/**
	 * @param appName
	 * @param coreMax     最大core数目
	 * @param parallelism 并发度，建议为core的N倍, 2<=N<=10
	 * @return
	 */
	public static JavaSparkContext createCommonsSparkContext(String appName,
	                                                         int coreMax, int parallelism, java.lang.Class cls) {
		if (parallelism < coreMax) {
			parallelism = coreMax;
		}
		Configuration conf = DolphinCrawlerConf.getInstance();
		String sparkUrl = conf.get(CommConsts.SPARK_MASTER_URL, "local");
		LOG.info("sparkUrl=" + sparkUrl);
		SparkConf sparkConf = new SparkConf().setMaster(sparkUrl)
				.setAppName(appName)
				.setJars(JavaSparkContext.jarOfClass(cls))
				.set("spark.cores.max", String.valueOf(coreMax))
				.set("spark.executor.memory", "10g")
				.set("spark.default.parallelism", String.valueOf(parallelism))
				.set("spark.worker.timeout", "60")
				.set("spark.akka.timeout", "60");
		return new JavaSparkContext(sparkConf);
	}

}
