package a_o2odata_deal

import org.apache.spark.rdd.RDD
import org.apache.spark.sql.{DataFrame, SQLContext}
import org.apache.spark.{SparkConf, SparkContext}

/**
  * @ Auther: o2o-rd-0008
  * @ Date:   2019/12/7 14:49
  * @ Param:  ${PARAM}
  * @ Description: 
  */
object zhibo_test {

  def pricedeal(priceText: String): String = {
    var price = "-1"
    if (!priceText.equals("-1")) {
      if (priceText.contains("-")) {
        price = priceText.split("-")(0)
      } else {
        price = priceText
      }
    } else {
      price = "-1"
    }
    price
  }

  def main(args: Array[String]): Unit = {
    val conf = new SparkConf()
    conf.setAppName(s"${this.getClass.getSimpleName}")
    conf.set("spark.debug.maxToStringFields", "500")
    conf.setMaster("local[*]")
    conf.set("es.nodes", "192.168.1.157")
    conf.set("es.port", "9200")
    conf.set("cluster.name", "O2OElastic")
    conf.set("spark.serializer", "org.apache.spark.serializer.KryoSerializer")
    conf.set("spark.sql.caseSensitive", "true")
    val sc = new SparkContext(conf)
    val sqlContext = new SQLContext(sc)
    sc.setLogLevel("WARN")
    sc.hadoopConfiguration.set("fs.s3a.access.key", "GAO7EO9FWKPJ8WFCQDME")
    sc.hadoopConfiguration.set("fs.s3a.secret.key", "LZ0xaHBSYKHaJ9ECDbX9f7zin79UZkXfGoNapRPL")
    sc.hadoopConfiguration.set("fs.s3a.endpoint", "https://obs.cn-north-1.myhuaweicloud.com")
    import org.elasticsearch.spark._
    sqlContext.read.option("header","true").csv("D:\\yantai.csv").createOrReplaceTempView("yt_shop")
    for (months <- 1 to 3){
      val values: RDD[String] = sc.esJsonRDD(s"2020_tmall/tmall_2020_${months}",
        """
          |{
          |  "query": {
          |    "bool": {
          |      "must": [
          |        {
          |          "term": {
          |            "province.keyword": {
          |              "value": "山东省"
          |            }
          |          }
          |        },{
          |          "term": {
          |            "city.keyword": {
          |              "value": "烟台市"
          |            }
          |          }
          |        }
          |      ]
          |    }
          |  }
          |}
        """.stripMargin).values

      sqlContext.read.json(values).createOrReplaceTempView("source_data")
      val tm_data = sqlContext.sql(
        s"""
           |select
           |t1.*,
           |case when t2.shopId is null then t1.district else '烟台经济技术开发区' end as districts
           |from source_data t1
           |left join
           |yt_shop t2
           |on t1.shopId=t2.shopId
         """.stripMargin).drop("district")
        .withColumnRenamed("districts","district")

      val result = handle_good(sqlContext,tm_data)

      /*println(result.count())
      result.show(false)

      result.printSchema()*/

      result.repartition(1).write.orc(s"s3a://o2o-dataproces-group/panzonghao/zhibo_zf/2020/${months}/yantai_good_v1")
    }



  }

  def handle_good(sqlContext:SQLContext,tm_data:DataFrame):DataFrame={
    val source_data = tm_data
    source_data.createOrReplaceTempView("source_data")

    //计算每个区县每个三级行业下的总销量，销售额，商品条数
    sqlContext.sql(
      s"""
         |select
         |*,
         |sum(sellCount) over(partition by major_key) as sellCounts,
         |sum(salesAmount) over(partition by major_key) as salesAmounts,
         |count(good_id) over(partition by major_key) as goodCount
         |from
         |(select
         |*,
         |concat(concat(concat(province,city),district),thirdCategoryId) as major_key
         |from source_data)
         """.stripMargin).drop("sellCount","salesAmount").dropDuplicates("major_key")
      .withColumnRenamed("sellCounts","sellCount")
      .withColumnRenamed("salesAmounts","salesAmount")
      .createOrReplaceTempView("all_data")

    //将直播数据摘出来
    source_data.where("cast(is_showLive as boolean)=true").createOrReplaceTempView("zhibo_data")
    //计算每个区县每个三级行业下的总观看人数，直播商品数，直播总销量，直播总销售额
    sqlContext.sql(
      s"""
         |select
         |*,
         |sum(sellCount) over(partition by major_key) as live_sellCount,
         |sum(salesAmount) over(partition by major_key) as live_salesAmount,
         |sum(viewcount) over(partition by major_key) as viewCount,
         |count(good_id) over(partition by major_key) as live_goodCount
         |from
         |(select
         |*,
         |concat(concat(concat(province,city),district),thirdCategoryId) as major_key
         |from zhibo_data)
         """.stripMargin).dropDuplicates("major_key").createOrReplaceTempView("live_data")

    sqlContext.sql(
      s"""
         |select
         |t1.major_key,
         |t1.province,
         |t1.city,
         |t1.district,
         |t1.regional_ID,
         |t1.firstCategoryId,
         |t1.secondCategoryId,
         |t1.thirdCategoryId,
         |t1.fourthCategoryId,
         |t1.platformId,
         |t1.timeStamp,
         |t1.sellCount,
         |t1.salesAmount,
         |t1.goodCount,
         |case when t2.major_key is null then '0' else t2.live_sellCount end as live_sellCount,
         |case when t2.major_key is null then '0' else t2.live_salesAmount end as live_salesAmount,
         |case when t2.major_key is null then '0' else t2.viewCount end as viewCount,
         |case when t2.major_key is null then '0' else t2.live_goodCount end as live_goodCount
         |from all_data t1
         |left join
         |live_data t2
         |on t1.major_key=t2.major_key
         """.stripMargin).createOrReplaceTempView("tmp")

    sqlContext.sql(
      s"""
         |select
         |*,
         |cast(sellCount as bigint) as sellCounts,
         |cast(salesAmount as double) as salesAmounts,
         |cast(goodCount as bigint) as goodCounts,
         |cast(live_sellCount as string) as live_sellCounts,
         |cast(live_salesAmount as string) as live_salesAmounts,
         |cast(viewCount as string) as viewCounts,
         |cast(live_goodCount as string) as live_goodCounts
         |from tmp
         """.stripMargin).drop("sellCount","salesAmount","goodCount","live_sellCount","live_salesAmount","viewCount","live_goodCount")
      .withColumnRenamed("sellCounts","sellCount")
      .withColumnRenamed("salesAmounts","salesAmount")
      .withColumnRenamed("goodCounts","goodCount")
      .withColumnRenamed("live_sellCounts","live_sellCount")
      .withColumnRenamed("live_salesAmounts","live_salesAmount")
      .withColumnRenamed("viewCounts","viewCount")
      .withColumnRenamed("live_goodCounts","live_goodCount")

      //.repartition(4).write.orc(s"s3a://o2o-dataproces-group/panzonghao/zhibo_zf/${years}/${months}/good")


  }
}
