package com.o2o.yumidami

import com.alibaba.fastjson.JSON
import org.apache.spark.SparkContext
import org.apache.spark.sql.SparkSession

case object yumiObsDemo {
  def main(args: Array[String]): Unit = {
    //spark mongo连接配置
    val spark = SparkSession.builder()
      .master("local[*]")
      .config("spark.debug.maxToStringFields", "10000")
      .appName("MongoSparkConnectorIntro")
      .config("spark.serializer", "org.apache.spark.serializer.KryoSerializer")
      .getOrCreate()
    //obs设置
    var sc: SparkContext = spark.sparkContext
    sc.hadoopConfiguration.set("fs.s3a.access.key", "GAO7EO9FWKPJ8WFCQDME")
    sc.hadoopConfiguration.set("fs.s3a.secret.key", "LZ0xaHBSYKHaJ9ECDbX9f7zin79UZkXfGoNapRPL")
    sc.hadoopConfiguration.set("fs.s3a.endpoint", "https://obs.cn-north-1.myhuaweicloud.com")
    sc.setLogLevel("ERROR")

    val sourcePath = "s3a://o2o-business-data/yanjiubu/dami/2020/*/*/*"
    var uy = spark.read.orc(sourcePath).toJSON.rdd.map(x => {
      val nObject = JSON.parseObject(x)
      nObject.toString
    })
    spark.read.json(uy).registerTempTable("main")
    spark.sql(
      """
        |select count(1) from main
        |""".stripMargin).show()
  }
}