package com.o2o.cleaning.month.platform.ebusiness_plat.ddmc

import org.apache.spark.sql.SparkSession

object TestRead4ES2obs {
  def main(args: Array[String]): Unit = {

    val spark = SparkSession.builder()
      .appName("SparkTest")
      .master("local[*]")
      .config("es.nodes", "192.168.1.29")
      .config("es.port", "9200")
      .config("es.net.http.auth.user", "elastic")
      .config("es.net.http.auth.pass", "changeme")
      .config("es.batch.write.retry.count", "10")
      .config("es.batch.write.retry.wait", "60")
      .config("cluster.name", "O2OElastic")
      .getOrCreate()
    val sc = spark.sparkContext
    val sqlcontext = spark.sqlContext

    sc.hadoopConfiguration.set("fs.s3a.access.key", "GAO7EO9FWKPJ8WFCQDME")
    sc.hadoopConfiguration.set("fs.s3a.secret.key", "LZ0xaHBSYKHaJ9ECDbX9f7zin79UZkXfGoNapRPL")
    sc.hadoopConfiguration.set("fs.s3a.endpoint", "https://obs.cn-north-1.myhuaweicloud.com")
    import org.elasticsearch.spark._
    var platfroms = Array("dingdongmc")
    for (platform <- platfroms) {
      val index = s"247_2021_${platform}/${platform}_2021_3"
      var value = sc.esJsonRDD(index).values

      spark.read.json(value).repartition(5).write.orc(
        s"s3a://dws-data/g_data/2021/3/${platform}/")
    }

  }
}
