package com.o2o.cleaning.month.platform.ebusiness_plat.pupuchaoshi

import com.alibaba.fastjson.JSON
import com.o2o.utils.Iargs
import org.apache.spark.sql.SparkSession

object CheckDataDetail {

  def main(args: Array[String]): Unit = {

    val spark = SparkSession.builder()
      .appName("CheckDataDetail")
      .config("spark.debug.maxToStringFields", "2000")
      .config("spark.serializer", "org.apache.spark.serializer.KryoSerializer")
      .config("spark.sql.caseSensitive", "true")
      .config("es.nodes", "192.168.1.29")
      .config("es.port", "9200")
      .config("cluster.name", "O2OElastic")
      .config("es.net.http.auth.user", "elastic")
      .config("es.net.http.auth.pass", "changeme")
      .master("local[*]")
      .getOrCreate()

    val sc = spark.sparkContext
    sc.hadoopConfiguration.set("fs.s3a.access.key", "GAO7EO9FWKPJ8WFCQDME")
    sc.hadoopConfiguration.set("fs.s3a.secret.key", "LZ0xaHBSYKHaJ9ECDbX9f7zin79UZkXfGoNapRPL")
    sc.hadoopConfiguration.set("fs.s3a.endpoint", "https://obs.cn-north-1.myhuaweicloud.com")
    sc.setLogLevel("WARN")

    val readDatabase = "PupuChaoShi" //平台名称
    val year = Iargs.YEAR
    //    val month = Iargs.MONTH
    val months = Array(1, 2, 3, 4, 5, 6, 7)
    val new_month = Array("01", "02", "03", "04", "05", "06", "07")
    val source_path = s"s3a://o2o-sourcedata-2022/obs-source-2022/1/PupuChaoShi/"
    //    spark.read.orc(s"s3a://o2o-sourcedata-2022/obs-source-2022/1/PupuChaoShi/").printSchema()
    for (month <- months) {
      println(month)
      println(new_month(month - 1))
      //      spark.read.orc(s"s3a://o2o-sourcedata-2022/obs-source-2022/${month}/PupuChaoShi/")

      //            .repartition(1).write.mode("overwrite").orc(s"s3a://o2o-tempdata/zyf/pupuchaoshi/mongosource/2022/${new_month(month - 1)}/")
      val value = spark.read.orc(s"s3a://o2o-tempdata/zyf/pupuchaoshi/mongosource/2022/${new_month(month - 1)}/").toJSON.rdd.map(line => {
        val nObject = JSON.parseObject(line.toString())
        nObject.put("Base_Info", nObject.getString("Base_Info"))
        nObject.put("add_to_field", nObject.getString("add_to_field"))
        nObject.toString
      })

      spark.read.json(value)
        .repartition(1).write.mode("overwrite").orc(s"s3a://o2o-tempdata/zyf/pupuchaoshi/mongosource/20222/${new_month(month - 1)}/")
    }
    //    spark.read.option("header", true).csv(category_path).registerTempTable("t_cate")
    //    spark.read.orc(s"s3a://o2o-dataproces-group/zyf/${year}/10/${readDatabase}/").registerTempTable("t2")
    //    spark.sql(
    //      """
    //        |select count(1) count, cast(sum(sellCount) as bigInt) sellsum,cast(sum(salesAmount) as decimal(20,2)) salesum from t2
    //                """.stripMargin
    //    ).show(false)
    //    spark.sql(
    //      """
    //        |select count(1) from a t1 left join t_cate t2 on t1.rootCategoryId = t2.rootCategoryId and t1.categoryId = t2.categoryId where t2.rootCategoryId is null and t2.categoryId is null
    //        |""".stripMargin).show(false)
    //      .show(false)
    //    spark.read.json(s"s3a://o2o-dimension-table/address_table/address_table_2021/9/address_platform/PupuChaoShi_address_2021_9/")
    //      .show(false)
    //      .printSchema()

  }

}
