package com.o2o.regularservice.dws_orc

import com.alibaba.fastjson.JSON
import org.apache.spark.sql.SparkSession
import org.elasticsearch.spark._

/**
  * 离线数据转ORC文件
  */
object Orc {
  def main(args: Array[String]): Unit = {
    val spark = SparkSession.builder()
      .master("local[*]")
      .config("spark.serializer", "org.apache.spark.serializer.KryoSerializer")
      .config("spark.hadoop.fs.s3a.access.key", "GAO7EO9FWKPJ8WFCQDME")
      .config("spark.hadoop.fs.s3a.secret.key", "LZ0xaHBSYKHaJ9ECDbX9f7zin79UZkXfGoNapRPL")
      .config("fs.s3a.endpoint", "https://obs.cn-north-1.myhuaweicloud.com")
      .getOrCreate()

    val sc = spark.sparkContext
    sc.setLogLevel("WARN")

    //参数
    var platformArr = Array("meituan_tg")
//    var platformArr = Array("meituan_tg","dazhongdp")
    var monthArr = Array("1")
    var year = 2021

    for (platform <- platformArr) {
      for (month <- monthArr) {
        var path = s"s3a://dws-data/g_data/2021/${month}/${platform}"

        var value = sc.emptyRDD[(String, String)]

        //苏宁、拼多多  从ES中提取  157节点
        if(platform.equals("pinduoduo") || platform.equals("suning") ){
          value = sc.esJsonRDD(s"2020_${platform}/${platform}_2020_${month}".stripMargin,
            Map("es.nodes"->"192.168.1.29"
            ,"es.port"->"9200"
            ,"cluster.name"-> "O2OElastic"
            ,"es.net.http.auth.user"->"elastic"
            ,"es.net.http.auth.pass"->"changeme"))
        }
        //大众点评    从ES中提取  247节点
        else if(platform.equals("dazhongdp")){
          value = sc.esJsonRDD(s"247_${year}_dazhongdp/dazhongdp_${year}_${month}".stripMargin,
            Map("es.nodes"->"192.168.1.29"
            ,"es.port"->"9200","cluster.name"-> "O2OElastic"
            ,"es.net.http.auth.user"->"elastic"
            ,"es.net.http.auth.pass"->"changeme"))
        }
        //美团团购    从ES中提取  247节点
        else if(platform.equals("meituan_tg")){
          value = sc.esJsonRDD(s"247_${year}_mttg/mttg_${year}_${month}".stripMargin,
            Map("es.nodes"->"192.168.1.29"
            ,"es.port"->"9200","cluster.name"-> "O2OElastic"
            ,"es.net.http.auth.user"->"elastic"
            ,"es.net.http.auth.pass"->"changeme"))
        }

        var value_001 = value.values.map(x => {
          val nObject = JSON.parseObject(x)
          val value1 = nObject.keySet().toArray()
          var string = "-1"
          var value12 = "-1"
          for (i <- value1) {
            value12 = nObject.get(i).toString
            nObject.put(i.toString, value12)
          }
          nObject.remove("shop_pre_md5")
          nObject.remove("pre_md5")
          nObject.remove("md5_shopId")
          nObject.remove("md5_id")
          nObject.remove("promotion_info")
//          nObject.remove("Base_info")
//          nObject.remove("images")
          nObject.toString
        }).cache()

        val frame = spark.read.json(value_001)

        frame.repartition(5).write.mode("overwrite").orc(s"${path}")
        value_001.unpersist()
      }
    }
  }
}
