package com.o2o.cleaning.month.platform.ebusiness_plat.meituan_tg
//import com.o2o.cleaning.month.platform.ebusiness_plat.pinduoduo.PinDuoDuo_new.db_subName
//import com.o2o.utils.obs.Obs_Conf
import org.apache.spark.SparkContext
import org.apache.spark.sql.SparkSession

object Mttg_2005_test {


  def main(args: Array[String]): Unit = {
      val spark = SparkSession.builder()
        .master("local[*]")
        .appName("MongoSparkConnectorIntro")
        .config("spark.serializer", "org.apache.spark.serializer.KryoSerializer")
        .getOrCreate()
      //obs设置
      var sc: SparkContext = spark.sparkContext
//      sc.hadoopConfiguration.set("fs.s3a.access.key", Obs_Conf.access_key)
//      sc.hadoopConfiguration.set("fs.s3a.secret.key", Obs_Conf.secret_key)
      sc.hadoopConfiguration.set("fs.s3a.endpoint", "https://obs.cn-north-1.myhuaweicloud.com")
      sc.setLogLevel("ERROR")
    //=================================================================

    // 110718459|          4392
//    spark.read.orc("s3a://dws-data/g_shop/2020/4/meituan_tg/").select("shopId","totalSellCount").show(50)
//      .where("good_id in ('617616984','616350146','617620160','617627274','617622535') ").show()
//      .where("good_id in ('55485050','55485082','55485075','55484871','640077658') ").show()

    /*val df4 = spark.read.orc("s3a://o2o-dataproces-group/chen_lixiu/2020/4/mttg/good/")
    df4.registerTempTable("t4")
    df4.select("shopId").dropDuplicates("shopId").registerTempTable("t44")
    val df5 = spark.read.orc("s3a://o2o-dataproces-group/chen_lixiu/2020/5/mttg/good/")
    df5.registerTempTable("t5")
    df5.select("shopId").dropDuplicates("shopId").registerTempTable("t55")

    //关联到的
    spark.sql(
      """
        |select
        |a.*
        |from t4 a join t55 b
        |on a.shopId = b.shopId
        |""".stripMargin).repartition(1).write.mode("overwrite").orc("s3a://o2o-dataproces-group/chen_lixiu/2020/5/mttg/0604_du/shopJoin")

    spark.sql(
      """
        |select
        |a.*
        |from t4 a left join t55 b
        |on a.shopId = b.shopId
        |where b.shopId is null
        |""".stripMargin).repartition(1).write.mode("overwrite").orc("s3a://o2o-dataproces-group/chen_lixiu/2020/5/mttg/0604_du/shopNoJoin4")

    spark.sql(
      """
        |select
        |a.*
        |from t5 a left join t44 b
        |on a.shopId = b.shopId
        |where b.shopId is null
        |""".stripMargin).repartition(1).write.mode("overwrite").orc("s3a://o2o-dataproces-group/chen_lixiu/2020/5/mttg/0604_du/shopNoJoin5")
*/


    val df1 = spark.read.orc("s3a://o2o-dataproces-group/chen_lixiu/2020/5/mttg/0604_du/shopJoin")
      .selectExpr("province","ceil(sellCount*0.91) sellCount","cast(salesAmount*0.91 as decimal(20,2)) salesAmount")
    val df2 = spark.read.orc("s3a://o2o-dataproces-group/chen_lixiu/2020/5/mttg/0604_du/shopNoJoin5")
      .select("province","sellCount","salesAmount")
    val df3 = spark.read.orc("s3a://o2o-dataproces-group/chen_lixiu/2020/5/mttg/0604_du/shopNoJoin4")
//      .selectExpr("province","ceil(sellCount*0.91) sellCount","cast(salesAmount*0.91 as decimal(20,2)) salesAmount")

    val df4 = spark.read.orc("s3a://dws-data/g_data/2020/4/meituan_tg/")
      .select("province","sellCount","salesAmount").registerTempTable("t4")

//    df1.union(df2.union(df3)).registerTempTable("t5")
//    df1.dropDuplicates("good_id").registerTempTable("t")
    df3.registerTempTable("t")
    /*spark.sql(
      """
        |select
        |a.province,sellCount5,sellCount4,((sellCount5/sellCount4)-1)*100 sellCount_mom,
        |  salesAmount5,salesAmount4,((salesAmount5/salesAmount4)-1)*100 salesAmount_mom
        |from
        |(select province,sum(sellCount) sellCount5,sum(salesAmount/10000) salesAmount5 from t5 group by province)a
        |left join
        |(select province,sum(sellCount) sellCount4,sum(salesAmount/10000) salesAmount4 from t4 group by province)b
        |on a.province=b.province
        |""".stripMargin)
//      .show(100,100)
      .repartition(1).write.option("header","true")
      .csv("C:\\Users\\o2o-rd-0009\\Desktop\\mttg2005\\0002")*/

    spark.sql(
      """
        |select
        |--province,sum(sellCount) sellCount5,sum(salesAmount/10000) salesAmount5
        |priceText,sellCount,salesAmount
        |from t
        |--group by province
        |where province = '江苏省'
        |order by priceText desc
        |""".stripMargin)
      .show(100,100)
//      .repartition(1).write.option("header","true")
//      .csv("C:\\Users\\o2o-rd-0009\\Desktop\\mttg2005\\nojoin4")





    /*var thisMonthPath = s"s3a://o2o-dataproces-group/chen_lixiu/2020/5/mttg/good_0603/"
    var lastMonthPath = s"s3a://o2o-dataproces-group/chen_lixiu/2020/4/mttg/good/"
    spark.read.orc(thisMonthPath).registerTempTable("t5")
    spark.read.orc(lastMonthPath).select("good_id","sellCount").registerTempTable("t4")
    spark.sql(
      """
        |select
        |a.*,
        |case when cast(a.sellCount as bigint)>10000 and b.good_id is not null  then cast(ceil(b.sellCount*1.2) as bigint)
        |     else a.sellCount end sellCount_tmp
        |from t5 a left join t4 b
        |on a.good_id = b.good_id
        |""".stripMargin)
      .withColumnRenamed("sellCount","sellCount_bak")
      .withColumnRenamed("sellCount_tmp","sellCount")
      .selectExpr("*","cast((sellCount*priceText) as decimal(20,2)) as salesAmount_tmp")
      .withColumnRenamed("salesAmount","salesAmount_bak")
      .withColumnRenamed("salesAmount_tmp","salesAmount")
      .repartition(1).write.orc("s3a://o2o-dataproces-group/chen_lixiu/2020/5/mttg/good/")*/

//    val df_db = spark.read.orc("s3a://o2o-sourcedata/obs-source-2020/5/pinduoduo/background_cate_2006/")
//      .select("rootCategoryId","rootCategoryName","categoryId","categoryName","subCategoryId","subCategoryName")
//      .dropDuplicates("subCategoryId")
//    val index =  ""

    /*var thisMonthPath = s"s3a://o2o-sourcedata/obs-source-2020/5/mttg/mt_tg_detail/"
    var lastMonthPath = s"s3a://o2o-sourcedata/obs-source-2020/4/mttg/mt_tg_detail/"
    spark.read.json(thisMonthPath).where("sellCount != '-1'")
      .selectExpr("good_id","sellCount","sellCountText","priceText").registerTempTable("t5")
    spark.read.json(lastMonthPath).select("good_id","sellCount","sellCountText").registerTempTable("t4")

    val value = spark.sql(
        """
          |select
          |a. good_id good_id,a.sellCount sellCount5,a.sellCountText sellCountText5,priceText,
          |                   b.sellCount sellCount4,b.sellCountText sellCountText4
          |from t5 a join t4 b
          |on a.good_id = b.good_id
          |""".stripMargin).toJSON.rdd.map(line=>{

      val nObject = JSON.parseObject(line)
      val priceText = nObject.get("priceText").toString.toDouble
      val sellCount5 = nObject.get("sellCount5").toString.toInt
      val sellCount4 = nObject.get("sellCount4").toString.toInt
      val sellCountText5_tmp = nObject.get("sellCountText5").toString.replace("半年销量","").replace("已售","")
        .replace(" ","")
      val sellCountText4_tmp = nObject.get("sellCountText4").toString.replace("半年销量","").replace("已售","")
        .replace(" ","")
      var sellCountText5 = 0
      var sellCountText4 = 0
      if(sellCountText5_tmp.contains("万+")){
        var random1 = scala.util.Random.nextInt(1000).toString.toInt
        sellCountText5 = (sellCountText5_tmp.replace("万+","").toDouble*10000+random1).toInt
      }else if(sellCountText5_tmp.contains("万")){
        var random2 = scala.util.Random.nextInt(100).toString.toInt
        sellCountText5 = (sellCountText5_tmp.replace("万","").toDouble*10000+random2).toInt
      }
      else{
        sellCountText5=sellCountText5_tmp.toInt
      }

      if(sellCountText4_tmp.contains("万+")){
        var random3 = scala.util.Random.nextInt(1000).toString.toInt
        sellCountText4 = (sellCountText4_tmp.replace("万+","").toDouble*10000+random3).toInt
      }else if(sellCountText4_tmp.contains("万")){
        var random4 = scala.util.Random.nextInt(100).toString.toInt
        sellCountText4 = (sellCountText4_tmp.replace("万","").toDouble*10000+random4).toInt
      }else{
        sellCountText5=sellCountText4_tmp.toInt
      }

      var sellCount = sellCount5-sellCount4
      val sellCountText = sellCountText5-sellCountText4

      if(sellCount5!=0&&sellCount4!=0){
        if(sellCount5>20000 && sellCount5/sellCount4>2.5){
          sellCount=Math.ceil(sellCountText5/5.5).toInt
        }else if(sellCount5<=20000 && sellCount5/sellCount4>5.5){
          sellCount=Math.ceil(sellCountText5/5.5).toInt
        }
        if(sellCount==0 && sellCountText==0){
          sellCount=Math.ceil(sellCountText5/5.5).toInt
        }
      }

      nObject.put("sellCount",sellCount)
      nObject.put("salesAmount",(sellCount*priceText).formatted("%.2f").toDouble)
      nObject.toString
      }).cache()

    spark.read.json(value).registerTempTable("t")

    spark.sql(
      """
        |select
        |count(1) count,sum(sellCount) sellCount,sum(salesAmount) salesAmount
        |from t
        |
        |""".stripMargin).show(100,100)

    spark.sql(
      """
        |select
        |count(1) count,sum(sellCount) sellCount,sum(salesAmount) salesAmount
        |from t
        |where sellCount>0
        |""".stripMargin).show(100,100)*/













  }



}
