package com.o2o.cleaning.month.platform.ebusiness_plat.yangmatou

import com.alibaba.fastjson.{JSON, JSONObject}
import org.apache.spark.SparkContext
import org.apache.spark.sql.SparkSession
import org.elasticsearch.spark._

/**
  * @ Auther: o2o-rd-0008
  * @ Date:   2020/7/8 09:57
  * @ Param:  ${PARAM}
  * @ Description: 
  */
object Yangmatou_2_Obs {


  val index = "2020_yangmatou"
  val timestamp = "1593446400"
  val platformname= "yangmatou"
  val year = "2020"
  val month = "8"
  val resultPath = s"s3a://dws-data/g_data/${year}/${month}/${platformname}/"

  def main(args: Array[String]): Unit = {
    val spark = SparkSession.builder()
      .master("local[*]")
      .appName("SparkConnectorIntro")
      .config("es.nodes", "192.168.1.29")
      .config("es.port", "9200")
      .config("es.net.http.auth.user", "elastic")
      .config("es.net.http.auth.pass", "changeme")
      .config("cluster.name", "O2OElastic")
      .config("spark.serializer", "org.apache.spark.serializer.KryoSerializer")
      .getOrCreate()

    //obs设置
    var sc: SparkContext = spark.sparkContext

    sc.hadoopConfiguration.set("fs.s3a.access.key", "GAO7EO9FWKPJ8WFCQDME")
    sc.hadoopConfiguration.set("fs.s3a.secret.key", "LZ0xaHBSYKHaJ9ECDbX9f7zin79UZkXfGoNapRPL")
    sc.hadoopConfiguration.set("fs.s3a.endpoint", "https://obs.cn-north-1.myhuaweicloud.com")
    sc.setLogLevel("ERROR")

    val source = sc.esJsonRDD("2020_yangmatou/yangmatou_2020_8").values.map(lines=>{
      val nObject: JSONObject = JSON.parseObject(lines)

      nObject.toString
    })

    spark.read.json(source).write.orc(resultPath)


  }

}
