package mongodb.spark.examples;

import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.sql.SparkSession;

public final class GettingStarted {

	public static void main(final String[] args) throws InterruptedException {
		/*
		 * Create the SparkSession. If config arguments are passed from the command line
		 * using --conf, parse args for the values to set.
		 */
		String uri = "mongodb://192.168.95.128/test.coll";
		SparkSession spark = SparkSession.builder()
			      .master("local")
			      .appName("MongoSparkConnectorIntro")
			      .config("spark.mongodb.input.uri", uri)
			      .config("spark.mongodb.output.uri", uri)
			      .getOrCreate();
		
		// Create a JavaSparkContext using the SparkSession's SparkContext object
		JavaSparkContext jsc = new JavaSparkContext(spark.sparkContext());

		// More application logic would go here...

		jsc.close();

	}
}