package mongodb.spark.examples;

import org.apache.spark.SparkConf;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.sql.SQLContext;
import org.apache.spark.sql.SparkSession;

import com.mongodb.spark.MongoSpark;

/**
 * 
 * spark +mongodb 测试 1000 万数据
 * 
 * @author 戴永杰
 *
 * @date 2018年5月18日 下午3:06:44
 * @version V1.0
 *
 */
public class BigDataTest {

	public static void main(String[] args) {

		String uri = "mongodb://dataviewer:dataviewer2017@10.0.14.24:27017/admin.bond_sentiment_news";
		SparkSession spark = SparkSession.builder().master("local").appName("MongoSparkConnectorIntro")
				.config("spark.mongodb.input.uri", uri).config("spark.mongodb.output.uri", uri).getOrCreate();

		JavaSparkContext jsc = new JavaSparkContext(spark.sparkContext());

		SQLContext sqlContext = SQLContext.getOrCreate(jsc.sc());  
		
//        DataFrame df = MongoSpark.load(jsc).toDF(Docs.class);  
//        df.registerTempTable("docs");  
//        df.printSchema();  
//        DataFrame mtable=sqlContext.sql("select _id.oid,description,name,timestamp,type,videoName from docs");  
//        DataFrame idtable=sqlContext.sql("select _id.oid,id.course,id.org, id.names.caption,id.names.course,id.names.vertical,id.names.video from docs");  
	
	
	}

}
