package a_o2odata_deal

import a_aa_amainpackage.a_o2odata_deal.config.config.{months, years}
import org.apache.spark.sql.SQLContext
import org.apache.spark.{SparkConf, SparkContext}

/**
  * @ Auther: o2o-rd-0008
  * @ Date:   2020/3/2 10:45
  * @ Param:  ${PARAM}
  * @ Description: 
  */
object test {
  def main(args: Array[String]): Unit = {
    val conf = new SparkConf()
    conf.setAppName(s"${this.getClass.getSimpleName}")
    conf.set("spark.debug.maxToStringFields", "500")
    conf.setMaster("local[*]")
    conf.set("es.nodes", "192.168.1.157")
    conf.set("es.port", "9200")
    conf.set("cluster.name", "O2OElastic")
    conf.set("spark.serializer", "org.apache.spark.serializer.KryoSerializer")
    conf.set("spark.sql.caseSensitive", "true")
    val sc = new SparkContext(conf)
    val sqlContext = new SQLContext(sc)
    sc.setLogLevel("WARN")
    sc.hadoopConfiguration.set("fs.s3a.access.key", "GAO7EO9FWKPJ8WFCQDME")
    sc.hadoopConfiguration.set("fs.s3a.secret.key", "LZ0xaHBSYKHaJ9ECDbX9f7zin79UZkXfGoNapRPL")
    sc.hadoopConfiguration.set("fs.s3a.endpoint", "https://obs.cn-north-1.myhuaweicloud.com")

    //    val df = sqlContext.read.orc("s3a://o2o-dataproces-group/zyf/2021/4/dingdongmc/good/")
    //    df.selectExpr("sum(sellCount)","sum(salesAmount)").show()


    for (i <- 0 to 10) {
      println(math.round(math.random * 21 - 10))
      //      Math.random()
    }

    //    sqlContext.read.orc(s"s3a://o2o-dataproces-group/panzonghao/taobao/${years}/${months}/zhibo_finally").createOrReplaceTempView("source_data")
    //
    //    sqlContext.sql(
    //      s"""
    //         |select
    //         |*,
    //         |case when categoryId in ('121454038','124558013','124534019') then '1002402' else secondCategoryId end as secondCategoryIds,
    //         |case when categoryId in ('121454038','124558013','124534019') then '100240201' else thirdCategoryId end as thirdCategoryIds,
    //         |case when categoryId in ('121454038','124558013','124534019') then '10024020199' else fourthCategoryId end as fourthCategoryIds
    //         |from source_data
    //       """.stripMargin).drop("secondCategoryId","thirdCategoryId","fourthCategoryId")
    //      .withColumnRenamed("secondCategoryIds","secondCategoryId")
    //      .withColumnRenamed("thirdCategoryIds","thirdCategoryId")
    //      .withColumnRenamed("fourthCategoryIds","fourthCategoryId")
    //      .repartition(12).write.orc(s"s3a://o2o-dataproces-group/panzonghao/taobao/${years}/${months}/zhibo_finally_v1")


    /*for (months <- 1 to 2){
      //加载天猫 淘宝数据
      val tb_data = sqlContext.read.orc(s"s3a://o2o-dataproces-group/panzonghao/taobao/2020/${months}/zhibo_finally")
          .where("province not in('香港特别行政区', '澳门特别行政区', '海外', '0', '台湾省', '-1')")
        .selectExpr("province","firstCategoryId","secondCategoryId","thirdCategoryId","platformId","shopId","is_showLive")
      val tm_data = sqlContext.read.orc(s"s3a://o2o-dataproces-group/panzonghao/tmall/2020/${months}/zhibo_finally")
        .where("province not in('香港特别行政区', '澳门特别行政区', '海外', '0', '台湾省', '-1')")
        .selectExpr("province","firstCategoryId","secondCategoryId","thirdCategoryId","platformId","shopId","is_showLive")

      tb_data.createOrReplaceTempView("tb_data")
      tm_data.createOrReplaceTempView("tm_data")

      sqlContext.udf.register("handle_is_showLive",handle_is_showLive _)
      //只有一个店铺只有一个直播商品就算直播店铺，否则就不是直播店铺
      val tb_result = sqlContext.sql(
        s"""
           |select
           |platformId,
           |province,
           |firstCategoryId,
           |secondCategoryId,
           |thirdCategoryId,
           |shopId,
           |handle_is_showLive(is_showLive_collect) as is_liveShop
           |from
           |(select
           |platformId,
           |province,
           |firstCategoryId,
           |secondCategoryId,
           |thirdCategoryId,
           |shopId,
           |collect_list(is_showLive) as is_showLive_collect
           |from tb_data
           |group by platformId,province,firstCategoryId,secondCategoryId,thirdCategoryId,shopId)
         """.stripMargin)

      val tm_result = sqlContext.sql(
        s"""
           |select
           |platformId,
           |province,
           |firstCategoryId,
           |secondCategoryId,
           |thirdCategoryId,
           |shopId,
           |handle_is_showLive(is_showLive_collect) as is_liveShop
           |from
           |(select
           |platformId,
           |province,
           |firstCategoryId,
           |secondCategoryId,
           |thirdCategoryId,
           |shopId,
           |collect_list(is_showLive) as is_showLive_collect
           |from tm_data
           |group by platformId,province,firstCategoryId,secondCategoryId,thirdCategoryId,shopId)
         """.stripMargin)

      val all = tb_result.selectExpr("platformId","province","firstCategoryId","secondCategoryId","thirdCategoryId","shopId","is_liveShop").union(
        tm_result.selectExpr("platformId","province","firstCategoryId","secondCategoryId","thirdCategoryId","shopId","is_liveShop")
      ).repartition(4).write.orc(s"s3a://o2o-dataproces-group/panzonghao/backs/2020/${months}")


    }

*/


    //val source_data  = sc.esJsonRDD(s"${indexs}").values

    /*  import org.elasticsearch.spark._
      //所有主播数据入库
      sqlContext.read.orc(s"s3a://o2o-dataproces-group/panzonghao/tmall/${years}/${months}/all_anchor")
        .drop("timeStamp")
        .withColumn("timeStamp",lit("1582819200")).toJSON.rdd.map(line =>{
        JSON.parseObject(line)
      }).saveToEs(s"${years}_all_anchor/all_anchor_${years}_${months}", Map("es.mapping.id" -> "anchorId"))

      //天猫主播及其商品数据数据入库
      sqlContext.read.orc(s"s3a://o2o-dataproces-group/panzonghao/tmall/${years}/${months}/zhibo_zhubo")
        .drop("timeStamp")
        .withColumn("timeStamp",lit("1582819200")).toJSON.rdd.map(line =>{
        JSON.parseObject(line)
      }).saveToEs(s"${years}_tmall_anchor/tmall_anchor_${years}_${months}", Map("es.mapping.id" -> "anchorId"))

      sqlContext.read.orc(s"s3a://o2o-dataproces-group/panzonghao/tmall/${years}/${months}/zhibo_live_good")
        .drop("timeStamp")
        .withColumn("timeStamp",lit("1582819200")).toJSON.rdd.map(line =>{
        JSON.parseObject(line)
      }).saveToEs(s"${years}_tmall_good_live/tmall_good_live_${years}_${months}", Map("es.mapping.id" -> "liveAndgood_id"))

      //天猫主播及其商品数据数据入库
      sqlContext.read.orc(s"s3a://o2o-dataproces-group/panzonghao/taobao/${years}/${months}/zhibo_zhubo")
        .drop("timeStamp")
        .withColumn("timeStamp",lit("1582819200")).toJSON.rdd.map(line =>{
        JSON.parseObject(line)
      }).saveToEs(s"${years}_taobao_anchor/taobao_anchor_${years}_${months}", Map("es.mapping.id" -> "anchorId"))

      sqlContext.read.orc(s"s3a://o2o-dataproces-group/panzonghao/taobao/${years}/${months}/zhibo_live_good")
        .drop("timeStamp")
        .withColumn("timeStamp",lit("1582819200")).toJSON.rdd.map(line =>{
        JSON.parseObject(line)
      }).saveToEs(s"${years}_taobao_good_live/taobao_good_live_${years}_${months}", Map("es.mapping.id" -> "liveAndgood_id"))

  */


    //加载12月份数据
    //sqlContext.read.orc(s"s3a://dws-data/g_data/2020/3/tmall/")
    ////sqlContext.read.json(a_aa_amainpackage.a_o2odata_deal.config.config.cate_path)
    // .where("province not in ('香港特别行政区','澳门特别行政区','海外','0','台湾省','-1')").dropDuplicates("shopId")
    // .createOrReplaceTempView("data12")
    ////打上店铺类型
    //sqlContext.sql(
    // s"""
    //    |select
    //    |*,
    //    |case
    //    |when is_flagship='true' then '天猫旗舰店'
    //    |when is_flagship='false' and shopName rlike '专营' then '天猫专营店'
    //    |when is_flagship='false' and shopName rlike '专卖' then '天猫专卖店'
    //    |else '天猫其他'
    //    |end as shop_type
    //    |from data12
    //  """.stripMargin).createOrReplaceTempView("mid1")
    //
    //sqlContext.sql(
    // s"""
    //    |select
    //    |economic_division,
    //    |shopType,
    //    |province,
    //    |city,
    //    |district,
    //    |shop_type,
    //    |count(shopId) as shop_count
    //    |from mid1
    //    |group by economic_division,province,city,district,shop_type,shopType
    //  """.stripMargin).repartition(1).write.option("header","true")
    // .csv("s3a://o2o-dataproces-group/panzonghao/2020/3/shop/2003")
    //


  }

  import scala.util.control.Breaks.{break, breakable}

  def handle_is_showLive(is_showLive_collect: Seq[String]): String = {
    var b = "false"
    breakable {
      for (i <- 0 to is_showLive_collect.length - 1) {
        if (is_showLive_collect(i) == "true") {
          b = "true"
          break
        }
      }
    }

    b
  }

}
