package com.o2o.cleaning.month.platform.ebusiness_plat.elm

import com.alibaba.fastjson.{JSON, JSONObject}
import com.mongodb.spark.MongoSpark
import com.mongodb.spark.config.ReadConfig
import com.mongodb.spark.rdd.MongoRDD
import org.apache.spark.SparkContext
import org.apache.spark.rdd.RDD
import org.apache.spark.sql.SparkSession
import org.bson.Document

/**
  * @ Auther: o2o-rd-0008
  * @ Date:   2021/1/12 15:10
  * @ Param:  ${PARAM}
  * @ Description: 
  */
object elm {
  def main(args: Array[String]): Unit = {
    val spark = SparkSession.builder()
//      .master("local[*]")
      .config("spark.debug.maxToStringFields", "10000")
      .appName("MongoSparkConnectorIntro")
      .config("spark.serializer", "org.apache.spark.serializer.KryoSerializer")
      .getOrCreate()
    //obs设置
    var sc: SparkContext = spark.sparkContext
    sc.hadoopConfiguration.set("fs.s3a.access.key", "GAO7EO9FWKPJ8WFCQDME")
    sc.hadoopConfiguration.set("fs.s3a.secret.key", "LZ0xaHBSYKHaJ9ECDbX9f7zin79UZkXfGoNapRPL")
    sc.hadoopConfiguration.set("fs.s3a.endpoint", "https://obs.cn-north-1.myhuaweicloud.com")
    sc.setLogLevel("ERROR")

    val readConfig = ReadConfig(Map("uri" -> "mongodb://ob:O2Odata123!@ 192.168.0.203:27017/admin", "database" -> "Eleme", "collection" -> "elm_shop_new_2101"))

    val mongoRDD: MongoRDD[Document] = MongoSpark.load(spark.sparkContext, readConfig)

    val rdd: RDD[String] = mongoRDD.map(line => {
      val nObject: JSONObject = JSON.parseObject(line.toJson())

      val jsonArray = nObject.getJSONArray("add_to_field")

      for (i <- 0 to jsonArray.size) {

          /*val ele = jsonArray.getJSONObject(i);
          long crawl_date = ele.getLong("crawl_date");
          String crawl_date_yyyyMMdd = new SimpleDateFormat("yyyyMMdd").format(new Date(crawl_date * 1000));

          if (crawl_date_yyyyMMdd.equals(this.bizdate)) { // 找到指定业务日期的数据
            targetEle = ele;

          }*/

      }

      nObject.remove("_id")
      nObject.toString
    })



  }
}
