package mongodb.spark.examples;

import java.util.List;

import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.sql.SparkSession;
import org.apache.spark.api.java.function.Function;
import org.bson.Document;
import com.mongodb.spark.MongoSpark;

import static java.util.Arrays.asList;

public class MockData {

	public static void main(String[] args) {

		String uri = "mongodb://192.168.95.128/test.characters";
		SparkSession spark = SparkSession.builder().master("local").appName("MongoSparkConnectorIntro")
				.config("spark.mongodb.input.uri", uri).config("spark.mongodb.output.uri", uri).getOrCreate();

		// Create a JavaSparkContext using the SparkSession's SparkContext object
		JavaSparkContext jsc = new JavaSparkContext(spark.sparkContext());

		// More application logic would go here...

		
		// Add Sample Data  
	    List<String> characters = asList(  
	        "{'name': 'Bilbo Baggins', 'age': 50}",  
	        "{'name': 'Gandalf', 'age': 80}",  
	        "{'name': 'Thorin', 'age': 195}",  
	        "{'name': 'Balin', 'age': 178}",  
	        "{'name': '戴永杰', 'age': 77}",  
	        "{'name': 'Dwalin', 'age': 169}",  
	        "{'name': '罗生', 'age': 167}",  
	        "{'name': '佳佳', 'age': 158}",  
	        "{'name': '刘中原', 'age': 82}",
	        "{'name': 'Bombur'}"  
	    );  
	    MongoSpark.save(jsc.parallelize(characters).map(new Function<String, Document>() {  
			private static final long serialVersionUID = 1L;

			@Override  
	        public Document call(final String json) throws Exception {  
	            return Document.parse(json);  
	        }  
	    }));  
		
		jsc.close();

	}

}
