package com.zha

import com.mongodb.spark.MongoSpark
import org.apache.spark.SparkConf
import org.apache.spark.api.java.JavaRDD
import org.apache.spark.sql.catalyst.expressions.GenericRowWithSchema
import org.apache.spark.sql.types.{NullType, StringType, StructField, StructType}
import org.apache.spark.sql.{DataFrame, Row, SaveMode, SparkSession}


/**
 * created by LMR on 2019/6/12
 */
object DataPreProcess2 {

  def main(args: Array[String]): Unit = {

    val conf = new SparkConf()
      .setMaster("local[*]").setAppName("MongoSparkRDD")
      .set("spark.mongodb.input.uri", "mongodb://ecmm:123456@10.25.19.3:27017/ecommerce_min.a_store")

    val sparkSession = SparkSession.builder().config(conf).getOrCreate()
    val frame: DataFrame = MongoSpark.load(sparkSession)
    val value1: JavaRDD[Row] = frame.rdd.map(row => {
      val row1 = row.asInstanceOf[GenericRowWithSchema]
      val array: Array[Any] = row.schema.fields.toStream.map(t => {
        var value: Any = null
        if (t.name.equals("_id"))
          row1.getAs(t.name) match {
            case _: GenericRowWithSchema =>
              value = row1.getAs(t.name).asInstanceOf[GenericRowWithSchema].get(0)
            case _ =>
              value = row1.getAs(t.name).toString
          }
        else {
          value = row1.getAs(t.name)
        }
        value
      }).toArray
      Row.fromSeq(array)
    }).toJavaRDD()

    val schema = frame.schema
    val array = schema.fields.toStream
      .map(t => {
        var structType: StructField = null
        if (t.name.equals("_id")) {
          structType = StructField("_id", StringType, nullable = true)
        } else {
          if (t.dataType == NullType) {
            structType = StructField(t.name, StringType, nullable = true)
          } else {
            structType = t
          }
        }
        structType
      })
      .toArray
    val newSchema = new StructType(array)

    val frame1 = sparkSession.createDataFrame(value1, newSchema)
    frame1.show(false)
    frame1.write.mode(SaveMode.Overwrite).csv("1.csv")
    frame1.unpersist()
  }

}