package com.o2o.cleaning.month.platform.ebusiness_plat.jumei_2019_7.Jumei_utils

import com.alibaba.fastjson.{JSON, JSONObject}
import org.apache.spark.sql.functions.lit
import org.apache.spark.sql.{DataFrame, SparkSession}
//import platform.jumei.jumei_th1.JuMei
/**
  * @Description TODO 
  * @Author liutaowei
  * @Date 2018/11/21 20:24
  */
object base_util {

  def base_util_jumei(rdd:DataFrame,spark:SparkSession):DataFrame ={
//    这个是对原始数据进行清洗
    val jumei_s = rdd.toJSON.rdd.filter(lines=>{
      val nObject = JSON.parseObject(lines)
      val aa = nObject.getJSONArray("add_to_field")
      aa.size() > 0
    }).map(lines => {
      val nObject = JSON.parseObject(lines)
      val Base_Info = nObject.get("Base_Info").toString
//        从数组中获取销售量和价格
//      val add_to_field =  nObject.getJSONArray("add_to_field").getJSONObject(0)
        val aa = nObject.getJSONArray("add_to_field")
        var add_to_field = aa.getJSONObject(aa.size()-1)

        val good_id = nObject.get("good_id").toString
        val good_type = nObject.get("good_type").toString
        val temp_good_id = good_id.replace(good_type,"")
        nObject.put("temp_good_id", temp_good_id)
        //   从add_to_field中提取价格
        val priceText = add_to_field.get("priceText").toString
        val sellCount = add_to_field.get("sellCount").toString
        val is_onsell = add_to_field.get("is_onsell").toString
        val original_cost = nObject.get("original_cost").toString
        var price = ""
        var organ = ""

        if (!priceText.equals("-1")) {
          if (priceText.contains("-")) {
            price = priceText.split("-")(0)
          } else {
            price = priceText
          }
        } else {
          price = "-1"
        }

        if (!original_cost.equals("-1")) {
          if (original_cost.contains("-")) {
            organ = original_cost.split("-")(0)
          } else {
            organ = original_cost
          }
        } else {
          organ = "-1"
        }

        val function = nObject.getOrDefault("function","-1").toString
        var fun = "-1"
        var str1 = new JSONObject

        if (function.equals("-1")) {
          fun = "{\"fuyi\":\"-1\"}"
        } else {
          str1 = JSON.parseObject(function)
        }
        if (!fun.contains("fuyi")) {
          nObject.put("function", str1)
        } else {
          val vs = JSON.parseObject(fun)
          nObject.put("function", vs)
        }

        val shop_id = nObject.getOrDefault("shopId","-1").toString
        if(shop_id.equals("-1")){
          nObject.put("shopName","聚美优品自营店")
        }

        nObject.put("Base_Info", Base_Info)
        nObject.put("priceText", price)
        nObject.put("sellCount", sellCount)
        nObject.put("original_cost", organ)
        nObject.put("is_onsell", is_onsell)
        nObject.toString


    })

    spark.read.json(jumei_s).dropDuplicates("good_id")
  }

//这个是聚美原始数据（全量数据）
  def filter_isNotOnSell(frame:DataFrame,spark:SparkSession,start_time:String,end_time:String)={

//    这个是全量原始数据
    frame.drop("salesAmount")
      .registerTempTable("source_da_1")

//    读取上个月的下架商品路径
//    val value = spark.read.json(JuMei.isOnsell_false_Url)
//      .select("good_id")
//      .dropDuplicates("good_id")
//    value.registerTempTable("is_onsell_false_1")

//促销商品当月累计来计算
    val data_1 = spark.sql(
      s"""
         |select *,
         |(end_sell - start_sell) as sell_Count,
         |((end_sell - start_sell) * priceText) as salesAmount
         |from(
         |     select *,
         |     add_to_field[0].sellCount start_sell,
         |     add_to_field[size(add_to_field)-1].sellCount end_sell
         |     from source_da_1
         |     where is_onsell = 'true'
         |     and add_to_field[size(add_to_field)-1].end_time >= '${start_time}'
         |     and add_to_field[size(add_to_field)-1].end_time < '${end_time}'
         |     )
      """.stripMargin)
      .drop("sellCount")
      .withColumnRenamed("sell_Count","sellCount")


//    对当月原始数据中未下架商品进行计算
    val data_2 = spark.sql(
      """
        |select *,
        |(sellCount * priceText) as salesAmount
        |from
        |source_da_1 a
        |where is_onsell = 'true'
        |and add_to_field[size(add_to_field)-1].end_time = '-1'
      """.stripMargin)

    /**聚美地址
      * 关联上的打上自营地址
      * 关联不上的打上自带的地址
      * 【将下架商品和未下架商品计算完毕的数据进行合并】
      */
    val frame1 = spark.read.json(data_1.toJSON.rdd.union(data_2.toJSON.rdd))
      .drop("shopType","platformId","platformName")
      .withColumn("platformName",lit("聚美优品"))
      .withColumn("platformId",lit("6"))
      .withColumn("shopType",lit("B"))
    frame1

  }
}
