package a_o2odata_deal.utils.zhibo_zf.zhengli

import com.alibaba.fastjson.JSON
import com.mongodb.spark.MongoSpark
import com.mongodb.spark.config.ReadConfig
import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkConf, SparkContext}
import org.apache.spark.sql.SQLContext
import org.elasticsearch.spark._
import a_aa_amainpackage.a_o2odata_deal.config.config._

/**
  * @ Auther: o2o-rd-0008
  * @ Date:   2020/2/11 11:38
  * @ Param:  ${PARAM}
  * @ Description: 
  */
object mongo_main {
  def main(args: Array[String]): Unit = {
    val conf = new SparkConf()
    conf.setAppName(s"${this.getClass.getSimpleName}")
    conf.set("spark.debug.maxToStringFields", "500")
    conf.setMaster("local[*]")
    conf.set("es.nodes", "192.168.1.157")
    conf.set("es.port", "9200")
    conf.set("cluster.name", "O2OElastic")
    conf.set("spark.serializer", "org.apache.spark.serializer.KryoSerializer")
    conf.set("spark.sql.caseSensitive", "true")
    val sc = new SparkContext(conf)
    val sqlContext = new SQLContext(sc)
    sc.setLogLevel("WARN")
    sc.hadoopConfiguration.set("fs.s3a.access.key", "GAO7EO9FWKPJ8WFCQDME")
    sc.hadoopConfiguration.set("fs.s3a.secret.key", "LZ0xaHBSYKHaJ9ECDbX9f7zin79UZkXfGoNapRPL")
    sc.hadoopConfiguration.set("fs.s3a.endpoint", "https://obs.cn-north-1.myhuaweicloud.com")

    val brandapi_vf = ReadConfig(Map(
      "spark.mongodb.input.uri" -> "mongodb://ob:O2Odata123!@ 192.168.0.149:27017/admin"
      , "spark.mongodb.input.database" -> "Address"
      , "spark.mongodb.input.collection" -> a_aa_amainpackage.a_o2odata_deal.config.config.ali_store_address_all_collection))
    val brandapi_vf_df = sqlContext.read.json(MongoSpark.load(sc, brandapi_vf).map(line => {
      val nObject = JSON.parseObject(line.toJson())
      nObject.remove("_id")
      nObject.toJSONString
    })).repartition(5).write
      .json(a_aa_amainpackage.a_o2odata_deal.config.config.ali_store_address_all_path)

    val brandapi_vf2 = ReadConfig(Map(
      "spark.mongodb.input.uri" -> "mongodb://ob:O2Odata123!@ 192.168.0.149:27017/admin"
      , "spark.mongodb.input.database" -> "AliV"
      , "spark.mongodb.input.collection" -> a_aa_amainpackage.a_o2odata_deal.config.config.ali_zb_collection))
    val brandapi_vf_df2 = sqlContext.read.json(MongoSpark.load(sc, brandapi_vf2).map(line => {
      val nObject = JSON.parseObject(line.toJson())
      nObject.remove("_id")
      nObject.toJSONString
    })).repartition(5).write
      .json(a_aa_amainpackage.a_o2odata_deal.config.config.ali_zb_path)

    val brandapi_vf3 = ReadConfig(Map(
      "spark.mongodb.input.uri" -> "mongodb://ob:O2Odata123!@ 192.168.0.149:27017/admin"
      , "spark.mongodb.input.database" -> "AliV"
      , "spark.mongodb.input.collection" -> a_aa_amainpackage.a_o2odata_deal.config.config.ali_store_collection))
    val brandapi_vf_df3 = sqlContext.read.json(MongoSpark.load(sc, brandapi_vf3).map(line => {
      val nObject = JSON.parseObject(line.toJson())
      nObject.remove("_id")
      nObject.toJSONString
    })).repartition(5).write
      .json(a_aa_amainpackage.a_o2odata_deal.config.config.ali_store_path)

    val value: RDD[String] = sc.esJsonRDD(s"${years}_all_anchor/all_anchor_${years}_${months}").values
    sqlContext.read.json(value).drop("nick")
      .repartition(4).write.json(s"s3a://o2o-dataproces-group/panzonghao/zhibo_zf/${years}/${months}/all_anchor")
  }

}
