package spark01;

import com.mongodb.MongoClient;  
import com.mongodb.MongoClientURI;  
import com.mongodb.client.MongoDatabase;  
import com.mongodb.spark.MongoConnector;  
import com.mongodb.spark.MongoSpark;  
import com.mongodb.spark.config.ReadConfig;  
import com.mongodb.spark.config.WriteConfig;  
import com.mongodb.spark.rdd.api.java.JavaMongoRDD;  
import com.mongodb.spark.sql.helpers.StructFields;  
import org.apache.spark.SparkConf;  
import org.apache.spark.api.java.JavaRDD;  
import org.apache.spark.api.java.JavaSparkContext;  
import org.apache.spark.api.java.function.Function;  
import org.apache.spark.sql.Dataset;  
import org.apache.spark.sql.Row;  
import org.apache.spark.sql.SparkSession;  
import org.apache.spark.sql.types.DataTypes;  
import org.apache.spark.sql.types.StructType;  
import org.bson.Document;  
import org.bson.types.ObjectId;  
  
import java.util.HashMap;  
import java.util.List;  
import java.util.Map;  
  
import static java.lang.String.format;  
import static java.util.Arrays.asList;  
import static java.util.Collections.singletonList;  
  
public final class JavaIntroduction2 {  
  
/** 
 * Run this main method to see the output of this quick example. 
 * 
 * @param args takes an optional single argument for the connection string 
 * @throws InterruptedException if a latch is interrupted 
 */  
public static void main(final String[] args) throws InterruptedException {  
    JavaSparkContext jsc = createJavaSparkContext(args);  
    
    // Load inferring schema  
    Dataset<Row> df = MongoSpark.load(jsc).toDF();  
    df.printSchema();  
    df.show();  
    
    
    
}  
  
private static JavaSparkContext createJavaSparkContext(final String[] args) {  
    String uri = getMongoClientURI(args);  
    dropDatabase(uri);  
    SparkConf conf = new SparkConf()  
            .setMaster("local")  
            .setAppName("MongoSparkConnectorTour")  
            .set("spark.app.id", "MongoSparkConnectorTour")  
            .set("spark.mongodb.input.uri", uri)  
            .set("spark.mongodb.output.uri", uri);  
  
    return new JavaSparkContext(conf);  
}  
  
private static String getMongoClientURI(final String[] args) {  
    String uri;  
    if (args.length == 0) {  
//        uri = "mongodb://localhost/test.coll"; // default  
        uri = "mongodb://192.168.95.128/test.coll"; 
    } else {  
        uri = args[0];  
    }  
    return uri;  
}  
  
private static void dropDatabase(final String connectionString) {  
    MongoClientURI uri = new MongoClientURI(connectionString);  
    new MongoClient(uri).dropDatabase(uri.getDatabase());  
}  
}  