package com.o2o.cleaning.month.platform.ebusiness_plat.meituan

import com.alibaba.fastjson.{JSON, JSONObject}
import com.mongodb.spark.MongoSpark
import com.o2o.cleaning.month.platform.ebusiness_plat.meituan.Meituan_wm.handleMeituanData
import org.apache.spark.sql.{DataFrame, SparkSession}

/**
  * @ Auther: o2o-rd-0008
  * @ Date:   2020/11/5 16:28
  * @ Param:  ${PARAM}
  * @ Description: 
  */
object MeituanAddr {



  def main(args: Array[String]): Unit = {
    val spark = SparkSession.builder()
      .master("local[*]")
      .appName("MonGo_To_local")
      .config("spark.mongodb.input.uri", "mongodb://root:O2Odata123!@ 192.168.0.58:27017/admin")
      .config("spark.mongodb.input.database", "MtTop")
      .config("spark.mongodb.input.collection", "mt_top_2010")
      .config("spark.serializer", "org.apache.spark.serializer.KryoSerializer")
      .getOrCreate()

    val sc = spark.sparkContext
    sc.hadoopConfiguration.set("fs.s3a.access.key", "GAO7EO9FWKPJ8WFCQDME")
    sc.hadoopConfiguration.set("fs.s3a.secret.key", "LZ0xaHBSYKHaJ9ECDbX9f7zin79UZkXfGoNapRPL")
    sc.hadoopConfiguration.set("fs.s3a.endpoint", "https://obs.cn-north-1.myhuaweicloud.com")
    sc.setLogLevel("WARN")

    // 加载mongodb配置信息
    val rdd = MongoSpark.load(sc)
    val values = rdd.map(line => {
      val nObject: JSONObject = JSON.parseObject(line.toJson())
      nObject.remove("_id")
      nObject.toString
    })

    spark.read.json(values).write.mode("overwrite")
      .orc("s3a://o2o-dataproces-group/xuechunhua/product/meituan/2020/10/meituan_11_5_bu/")

    // 第三步：处理美团数据
    val data_good: DataFrame = handleMeituanData(spark)

    data_good.write.orc("s3a://o2o-dataproces-group/xuechunhua/product/meituan/2020/10/meituan_11_5_bu_result_data/")
  }
}
