package com.mayy.spark.demo;

import org.apache.spark.api.java.function.FilterFunction;
import org.apache.spark.sql.SparkSession;

public class SparkDebugDemo {
    public static void main(String[] args) {
        // 设置最小化内存配置
        System.setProperty("spark.driver.memory", "512m");
        System.setProperty("spark.executor.memory", "256m");


        // 设置本地模式
        SparkSession spark = SparkSession.builder()
                .appName("Spark Debug Demo")
                .master("local[1]")  // 使用2个核心
                .config("spark.driver.memory", "512m")
                .config("spark.executor.memory", "256m")
                .config("spark.memory.fraction", "0.5")  // 降低内存分数
                .config("spark.memory.storageFraction", "0.2")
                .config("spark.sql.adaptive.enabled", "false")
                .config("spark.sql.adaptive.coalescePartitions.enabled", "false")
                .config("spark.sql.adaptive.skewJoin.enabled", "false")
                .config("spark.sql.autoBroadcastJoinThreshold", "-1")  // 禁用广播
                .config("spark.sql.join.preferSortMergeJoin", "true")
                .config("spark.default.parallelism", "1")
                .config("spark.sql.shuffle.partitions", "1")
                .config("spark.serializer", "org.apache.spark.serializer.KryoSerializer")
                .config("spark.kryoserializer.buffer.max", "64m")

                .config("spark.eventLog.enabled", "false")  // 禁用事件日志节省内存
                .config("spark.ui.enabled", "false")       // 禁用Web UI节省内存
                .getOrCreate();

        try {
            // 你的Spark业务逻辑
            spark.range(10)
                    .filter(
                            (FilterFunction<Long>) x ->
                                    x % 2 == 0
                    )  // 在这里设置断点
                    .show();

        } finally {
            spark.stop();
        }
    }
}
