package com.o2o.cleaning.month.platform.ebusiness_plat.meituan

import com.alibaba.fastjson.{JSON, JSONObject}
import com.mongodb.spark.MongoSpark
import com.mongodb.spark.rdd.MongoRDD
import org.apache.log4j.{Level, Logger}
import org.apache.spark.rdd.RDD
import org.apache.spark.sql._
import org.bson.Document


/**
 * 拉取美团外卖数据
 *
 * @ Auther: o2o-rd-0008
 * @ Date:   2018/11/2 17:13
 * @ Param:  $param$
 * @ Description:
 */
object MongoData2OBS {


  def main(args: Array[String]): Unit = {
    Logger.getLogger("org.apache.spark").setLevel(Level.WARN)

    /** *******************重要重要重要重要重要重要 ************************************************************/
    val timeStamp = Meituan_config.timeStamp
    val collect_names = Meituan_config.collect_names

    val database = collect_names.replace(".", ",").split(",")(0)
    val collection = collect_names.replace(".", ",").split(",")(1)

    //拉取数据保存路径
    //    val resultUrl = s"s3a://o2o-sourcedata/obs-source-${year}/${month}/${platform_Name}/${platform_Name}_${year}_${month}"
    val resultUrl = Meituan_config.detail_sourcePath


    /** *******************重要重要重要重要重要重要 *************************************************************/

    //***************以下代码不动******************************
    val spark = SparkSession.builder()
                      //  .master("local[*]")
      .appName(s"${this.getClass.getSimpleName}")
      //      .config("spark.hadoop.validateOutputSpecs", "false") //用于HDFS覆盖路径
      //      .config("spark.mongodb.input.uri", "mongodb://root:O2Odata123!@ 192.168.0.149:27017/admin")
      .config("spark.mongodb.input.uri", "mongodb://ob:O2Odata123!@ 192.168.0.56:27017/admin")
     //.config("spark.mongodb.input.uri", "mongodb://root:O2Odata123!@ 192.168.0.56:27017/admin")
      .config("spark.mongodb.input.database", s"${database}")
      .config("spark.mongodb.input.collection", s"${collection}")
      .config("spark.serializer", "org.apache.spark.serializer.KryoSerializer")
      .getOrCreate()


    val sc = spark.sparkContext
    sc.hadoopConfiguration.set("fs.s3a.access.key", "GAO7EO9FWKPJ8WFCQDME")
    sc.hadoopConfiguration.set("fs.s3a.secret.key", "LZ0xaHBSYKHaJ9ECDbX9f7zin79UZkXfGoNapRPL")
    sc.hadoopConfiguration.set("fs.s3a.endpoint", "https://obs.cn-north-1.myhuaweicloud.com")
    sc.setLogLevel("WARN")

    //    ===========拉取mong数据=============

    val mongoRDD: MongoRDD[Document] = MongoSpark.load(sc)

    val dataRDD = parsMongoRDD(mongoRDD, timeStamp)


    spark.read.json(dataRDD).coalesce(10).write.orc(resultUrl)

    sc.stop()
  }


  def parsMongoRDD(mongoRDD: MongoRDD[Document], timeStamp: String): RDD[String] = {

    val rdd: RDD[String] = mongoRDD.map(line => {

      val nObject: JSONObject = JSON.parseObject(line.toJson())

      nObject.getString("good_id")
      nObject.remove("_id")
      // val mtWmPoiId: AnyRef = nObject.getOrDefault("mtWmPoiId","-1")
      // 201812 拼接shopUrl
      // nObject.put("shopUrl",s"http://m.waimai.meituan.com/waimai/wxwallet/menu?dpShopId=&mtShopId=${mtWmPoiId}&source=shoplist&initialLat=&initialLng=&actualLat=&actualLng=")
      //      nObject.getJSONArray("flavors").get(0).toString
      val flavors = nObject.getOrDefault("flavors", "-1").toString
      val flavors_bak = if ("-1".equals(flavors)) {
        "-1"
      } else {
        try {
         // JSON.parseArray(flavors).get(0).toString
          JSON.parseObject(flavors).get(0).toString
        } catch {
          case e:Exception => e.printStackTrace()
            flavors
        }
      }

      val flov: JSONObject = JSON.parseObject(flavors_bak)
      nObject.put("flavors", s"${flov}")
      nObject.put("rootCategoryId", flov.get("rootCategoryId"))
      nObject.put("rootCategoryName", flov.get("rootCategoryName"))

      nObject.put("categoryId", flov.get("categoryId"))
      nObject.put("categoryName", flov.get("categoryName"))

      nObject.put("platformName", "美团外卖")
      nObject.put("platformId", "16")
      nObject.put("timeStamp", timeStamp)

      nObject.toString
    })

    rdd
  }

}

