package com.study.spark.scala.mongo

import org.apache.spark.{SparkConf, SparkContext}
import org.bson.Document

/**
 * Spark读写Mongo
 * 参考：https://docs.mongodb.com/spark-connector/current/scala-api/
 *
 * @author stephen
 * @date 2019-08-07 18:20
 */
object SparkMongoDemo {

  def main(args: Array[String]): Unit = {
    val conf = new SparkConf()
      .setMaster("local")
      .setAppName("MongoSparkConnector")
      .set("spark.mongodb.input.uri", "mongodb://127.0.0.1/test.myCollection")
      .set("spark.mongodb.output.uri", "mongodb://127.0.0.1/test.myCollection")
    val sc = new SparkContext(conf)

    import com.mongodb.spark._
    // 读取
    //val readConfig = ReadConfig(Map("collection" -> "spark", "readPreference.name" -> "secondaryPreferred"), Some(ReadConfig(sc)))
    //val readRdd = MongoSpark.load(sc)
    val readRdd = sc.loadFromMongoDB() // 需要导入隐式转换
    //val readRdd = sc.loadFromMongoDB(readConfig)
    readRdd.foreach(println(_))

    // 写入
    //val documents = sc.parallelize((1 to 10).map(i => Document.parse(s"{test: $i}")))
    // scala类型List转换成java类型
    import scala.collection.JavaConverters._
    val documents = sc.parallelize(
      Seq(new Document("fruits", List("apples", "oranges", "pears").asJava))
    )
    //MongoSpark.save(documents)
    documents.saveToMongoDB()
    //documents.saveToMongoDB(WriteConfig(Map("uri" -> "mongodb://example.com/database.collection")))
  }
}
