package org.example;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.sql.SparkSession;

import java.util.Arrays;
import java.util.List;

   public class Main {
   public static void main(String[] args) {
        final JavaSparkContext sc = new JavaSparkContext("local","spark");
        final List<Integer> nums = Arrays.asList(1,2,3,4,5,6);
        final JavaRDD<Integer> rdd = sc.parallelize(nums);
        System.out.println(rdd.partitions().size());
        sc.close();

//        object Main{
//            def main(args:Array[String]):Unit ={
//                    val spark = SparkSession
//                    .builder()
//                    .master()
//                    getOrCreate()
//                    val sc = spark.sparkContext
//                    sc.stop()
//            }
//        }



        }
    }
