package com.zha

import com.mongodb.spark.MongoSpark
import org.apache.spark.api.java.JavaSparkContext
import org.apache.spark.{SparkConf, SparkContext}
import org.apache.spark.sql.catalyst.expressions.GenericRowWithSchema
import org.apache.spark.sql.types.{DoubleType, IntegerType, StringType, StructField, StructType}
import org.apache.spark.sql.{DataFrame, Row, SQLContext, SaveMode, SparkSession}
import org.bson.Document

import scala.collection.mutable


/**
 * created by LMR on 2019/6/12
 */
object DataPreProcess {

  def main(args: Array[String]): Unit = {

    val conf = new SparkConf()
      .setMaster("local[*]").setAppName("MongoSparkRDD")
      .set("spark.mongodb.input.uri", "mongodb://ecmm:123456@10.25.19.3:27017/ecommerce_min.a_area")
    //创建sparkContext
    val sc = new JavaSparkContext(conf)

    val sparkSession = SparkSession.builder().config(conf).getOrCreate()
    //    {
    //      "_id": ObjectId("5f081a6b0e6480e7f7f6b4b3"),
    //      "area_id": NumberInt("15400"),
    //      "province": "台湾省(886)",
    //      "city": "",
    //      "contry": "",
    //      "bm": NumberInt("710000"),
    //      "comment": null
    //    }


    val value2 = MongoSpark.load(sc)
//    val frame1 = MongoSpark.read(sparkSession).load()
//    frame1.printSchema()
//    frame1.show(false)


    val value1 = value2.rdd
      .map(doc => {
        val value = doc.get("_id")
        var oid: String = null;

        printf(value.getClass.toString)
        if (value.isInstanceOf[org.bson.types.ObjectId]) {
          oid = value.toString
        } else {
          oid = value.toString
        }


        Row(oid, doc.get("area_id"), doc.get("province"))
      }).toJavaRDD()


    val schema = StructType(
      List(
        StructField("_id", StringType, true),
        StructField("area_id", IntegerType, true),
        StructField("province", StringType, true)
      )
    )


    val frame = sparkSession.createDataFrame(value1, schema);
//    frame.show(false)

    frame.write.mode(SaveMode.Overwrite).csv("1.csv")
    frame.unpersist()
    sparkSession.close()


  }

}