import com.o2o.utils.Iargs
import org.apache.spark.sql.SparkSession

/**
  * @ Auther: o2o-rd-0008
  * @ Date:   2020/6/5 16:23
  * @ Param:  ${PARAM}
  * @ Description: 
  */
object CheckINTIMEObsData {
  def main(args: Array[String]): Unit = {

    val spark = SparkSession.builder()
      .appName(s"${this.getClass.getSimpleName}")
      .config("spark.debug.maxToStringFields", "2000")
      .config("spark.serializer", "org.apache.spark.serializer.KryoSerializer")
      .config("spark.sql.caseSensitive", "true")
      .config("es.nodes", "192.168.1.29")
      .config("es.port", "9200")
      .config("cluster.name","O2OElastic")
      .config("es.net.http.auth.user", "elastic")
      .config("es.net.http.auth.pass", "changeme")
      .master("local[*]")
      .getOrCreate()

    val sc = spark.sparkContext
    sc.hadoopConfiguration.set("fs.s3a.access.key", Iargs.OBSACCESS)
    sc.hadoopConfiguration.set("fs.s3a.secret.key", Iargs.OBSSECRET)
    sc.hadoopConfiguration.set("fs.s3a.endpoint", Iargs.OBSENDPOINT)
    sc.setLogLevel("WARN")


//    val goodPath = "s3a://o2o-sourcedata/obs-source-2020/intime/2020/8/"
    val goodPath = "s3a://o2o-dataproces-group/zsc/2020/9/intime/good/"
    val good1Path = "s3a://o2o-dataproces-group/zsc/2020/9/intime/good_final_final/"


//    val value: Dataset[Row] = spark.read.orc(goodPath).where("firstCategoryId='10019' and brandName_cn='其他/OTHER'")
//
//    val value1: RDD[String] = value.toJSON.rdd.map(lines => {
//      val nObject: JSONObject = JSON.parseObject(lines)
//      nObject.put("name", nObject.getString("mallName"))
//      nObject.put("shopName", nObject.getString("shopName") + "（" + nObject.getString("address") + "）")
//      nObject.toString
//    })
//
//
//    spark.read.json(value1).registerTempTable("tab")

//
//    val res = spark.sql(
//      """
//        |select
//        |a.*,
//        |b.goodUrl
//        |from
//        |tab a
//        |left join
//        |es b
//        |on a.good_id=b.good_id
//      """.stripMargin)
//
//    println(res.count())
//
//
//    res.toJSON.rdd.map(lines=>{
//      val nObject: JSONObject = JSON.parseObject(lines)
//      nObject
//    }).saveToEs("2020_intime/intime_2020_8",Map("es.mapping.id" -> "good_id"))



    /*val df: DataFrame = spark.read.orc(goodPath).where("firstCategoryId='10019'")
    val df1: DataFrame = spark.read.orc(good1Path).where("firstCategoryId='10019'")
    df.registerTempTable("df")
    df1.registerTempTable("df1")

    val res1 = spark.sql(
      """
        |select
        |brandName_cn,
        |count(1),
        |sum(sellCount),
        |sum(salesAmount) as sales
        |from
        |df
        |group by brandName_cn
        |order by sales desc
      """.stripMargin)

    val res2 = spark.sql(
      """
        |select
        |brandName_cn,
        |count(1),
        |sum(sellCount),
        |sum(salesAmount) as sales
        |from
        |df1
        |group by brandName_cn
        |order by sales desc
      """.stripMargin)

    res1.repartition(1).write.csv("D:\\intime\\es\\")
    res2.repartition(1).write.csv("D:\\intime\\today\\")*/



//    df.printSchema()

    /*df.registerTempTable("tab")
    spark.sql(
      """
        |select
        |count(1) totalNum,
        |count(distinct good_id),
        |count(distinct shopId) shopNum,
        |count(distinct mallId) mallNum,
        |sum(sellCount) as sumSellCount,
        |sum(salesAmount) as sumSalesAmount
        |from
        |tab
        |
      """.stripMargin).show(false)*/
//    spark.sql(
//      """
//        |select
//        |good_id
//        |from(
//        |select
//        |good_id,
//        |count(1) ct
//        |from
//        |tab
//        |group by good_id)
//        |where ct > 1
//      """.stripMargin).show(false)
//
//    spark.sql(
//      """
//        |select * from tab where good_id='100326449'
//      """.stripMargin).show()




    spark.read.orc(goodPath).registerTempTable("tab")

    spark.sql(
      """
        |select
        |count(1) totalNum,
        |count(distinct shopId) shopNum,
        |count(distinct mallId) mallNum,
        |sum(sellCount) as sumSellCount,
        |sum(salesAmount) as sumSalesAmount
        |from
        |tab
        |
      """.stripMargin).show(false)

    spark.sql(
      """
        |select
        |count(1)
        |from(
        |select
        |good_id,
        |count(1) ct
        |from
        |tab
        |group by good_id)
        |where ct > 1
      """.stripMargin).show(false)


  sc.stop()
}
}
