package com.zha

import java.util.Date

import com.mongodb.spark.MongoSpark
import org.apache.spark.SparkConf
import org.apache.spark.api.java.JavaSparkContext
import org.apache.spark.sql.types._
import org.apache.spark.sql.{Row, SaveMode, SparkSession}
import org.bson.types.ObjectId

import scala.collection.mutable.ArrayBuffer
import scala.util.parsing.json.JSON


/**
 * created by LMR on 2019/6/12
 */
object DataPreProcess3 {

  def main(args: Array[String]): Unit = {
    print(System.getenv())

    val conf = new SparkConf()
      .setMaster("local[*]").setAppName("MongoSparkRDD")
      .set("spark.mongodb.input.uri", "mongodb://ecmm:123456@10.25.19.3:27017/ecommerce_min.a_order")
    //创建sparkContext
    val sc = {
      new JavaSparkContext(conf)
    }
    val sparkSession = SparkSession.builder().config(conf).getOrCreate()
    val value2 = MongoSpark.load(sc)
    val str2 = "{\"_id\":\"ObjectId\",\"create_time\":\"timestamp\",\"update_time\":\"timestamp\"}"
    val map = str2map(str2)
    val value1 = value2.rdd
      .map(doc => {
        val buffer = new ArrayBuffer[Any]()
        for ((k, v) <- map) {
          var oid: String = null
          if (null == doc.get(k)) {
            oid = null
          } else {
            if (v.equals("ObjectId")) {
              doc.get(k) match {
                case id: ObjectId =>
                  oid = id.toString
                case _ =>
                  oid = doc.get(k).toString
              }
              buffer.append(oid)
            } else doc.get(k) match {
              case date: Date =>
                import java.time.ZoneId
                val instant = date.toInstant
                buffer.append(java.sql.Timestamp.valueOf(instant.atZone(ZoneId.systemDefault).toLocalDateTime))
              case _ =>
                if (doc.get(k).equals("0000-00-00 00:00:00")) {
                  buffer.append(null)
                } else {
                  buffer.append(doc.get(k))
                }
            }
          }
        }
        Row.fromSeq(buffer)
      }).toJavaRDD()

    val arrayList = new ArrayBuffer[StructField]()
    map.foreach(t => arrayList.append(castToStructField(t._1, t._2)))
    val structType = new StructType(arrayList.toArray)
    val frame = sparkSession.createDataFrame(value1, structType);
    frame.createTempView("aa")
    val frame1 = sparkSession.sql("select *  from aa")
    frame1.printSchema()
    frame1.show(false)
    //    frame.show(false)
    //
    frame1.write.mode(SaveMode.Overwrite).csv("1.csv")
    //    frame.unpersist()
    sparkSession.close()

  }

  /**
   * jsond对象转换成 map
   *
   * @param jsonStr
   * @return vMap¬
   */
  def str2map(jsonStr: String): collection.immutable.Map[String, String] = {
    val option = JSON.parseFull(jsonStr)
    val vMap = option match {
      case Some(map: collection.immutable.Map[String, String]) => map
    }
    vMap
  }

  def castToStructField(key: String, value: String): StructField = {
    var structField: StructField = null
    if (value.equals("ObjectId") || value.equals("string")) {
      structField = StructField(key, StringType, nullable = true)
    } else if (value.equals("date") || value.equals("timestamp")) {
      structField = StructField(key, TimestampType, nullable = true)
    } else if (value.equals("int")) {
      structField = StructField(key, IntegerType, nullable = true)
    } else if (value.equals("long") || value.equals("bigint")) {
      structField = StructField(key, LongType, nullable = true)
    } else if (value.contains("decimal")) {
      val precision = value.substring(value.indexOf("(") + 1, value.indexOf(","))
      val scale = value.substring(value.indexOf(",") + 1, value.indexOf(")"))
      structField = StructField(key, DecimalType(precision.toInt, scale.toInt), nullable = true)
    } else if (value.equals("double")) {
      structField = StructField(key, DoubleType, nullable = true)
    } else {
      structField = StructField(key, StringType, nullable = true)
    }
    structField
  }
}