package com.o2o.cleaning.month.platform.ebusiness_plat.intime

import com.o2o.cleaning.month.platform.ebusiness_plat.brand_modular.brand_join_res
import com.alibaba.fastjson.{JSON, JSONObject}
import com.o2o.utils.Iargs
import org.apache.spark.rdd.RDD
import org.apache.spark.sql.{DataFrame, SparkSession}

/**
  * @ Auther: o2o-rd-0008
  * @ Date:   2020/6/5 16:23
  * @ Param:  ${PARAM}
  * @ Description: 
  */
object IntimeObsData {
  def main(args: Array[String]): Unit = {

    val spark = SparkSession.builder()
      .appName(s"${this.getClass.getSimpleName}")
      .config("spark.debug.maxToStringFields", "2000")
      .config("spark.serializer", "org.apache.spark.serializer.KryoSerializer")
      .config("es.nodes", "192.168.1.29")
      .config("es.port", "9200")
      .config("cluster.name", "O2OElastic")
      .config("es.net.http.auth.user", "elastic")
      .config("es.net.http.auth.pass", "changeme")
      .master("local[*]")
      .getOrCreate()

    val sc = spark.sparkContext
    sc.hadoopConfiguration.set("fs.s3a.access.key", Iargs.OBSACCESS)
    sc.hadoopConfiguration.set("fs.s3a.secret.key", Iargs.OBSSECRET)
    sc.hadoopConfiguration.set("fs.s3a.endpoint", Iargs.OBSENDPOINT)
    sc.setLogLevel("WARN")
    val year = 2021
    val month = 12
    val platformName = "intime"
    val timeStamp = Iargs.TIMESAMP
    //good路径
    //    var goodPath = s"s3a://o2o-dataproces-group/zyf/obs-source-${year}/${year}/${month}/${platformName}/${platformName}_${year}_${month}/"
    var goodPath = s"s3a://o2o-sourcedata-${year}/obs-source-${year}/${month}/${platformName}/${platformName}_${year}_${month}"
//        var goodPath = "D:\\test.json"
    //    var goodPath = s"s3a://o2o-sourcedata/obs-source-2020/intime/2020/11/"
    //    var goodPath = s"s3a://o2o-sourcedata/obs-source-2020/intime/2020/8/"1

    val catePath = s"s3a://o2o-dimension-table/category_table/categoryFile_intime/2020/9/*"

    val addrPath = s"s3a://o2o-dimension-table/address_table/address_table_${year}/${month}/address_platform/${platformName}_address_${year}_${month}/*"
    //    val addrPath = s"s3a://o2o-dimension-table/address_table/address_source_data_${year}/address_platform_newAdd/${year}_${month}/${platformName}_address_${year}_${month}/*"
    //                   obs://o2o-dimension-table/address_table/address_source_data_2021/address_platform_newAdd/2021_4/intime_address_2021_4/


    val result_goodUrl = s"s3a://o2o-dataproces-group/zyf/${year}/${month}/${platformName}/good/"
//    val result_goodUrl = s"d:\\test1"


    val value: RDD[String] = spark.read.json(goodPath).toJSON.rdd.map(lines => {
      val nObject: JSONObject = JSON.parseObject(lines)

      nObject.put("name", nObject.getString("mallName"))
      nObject.put("shopName", nObject.getString("shopName") + "（" + nObject.getString("address") + "）")
      nObject.put("goodUrl", nObject.getString("goodUrl"))

      nObject.toString
    })


    spark.read.json(value).registerTempTable("itemTab")
    //    spark.sql(
    //      """
    //        |select * from itemTab
    //        |""".stripMargin).show(false)

    //计算销量
    val res = spark.sql(
      """
        |select
        |*,
        |case when discount='-1' then priceText/100 else case when (priceText/100)-(discount/100) < 0 then 0 else (priceText/100)-(discount/100) end end priceText_tmp,
        |discount/10 as discount_tmp
        |from
        |itemTab
      """.stripMargin).drop("priceText").drop("discount").withColumnRenamed("priceText_tmp", "priceText").withColumnRenamed("discount_tmp", "discount")
      .drop("address")
    //    res.show(false)

    res.registerTempTable("cacutab")

    val caculateDF = spark.sql(
      """
        |select
        |a.*,
        |a.sellCount*a.priceText as salesAmount
        |from
        |cacutab a where a.sellCount > 0 and a.priceText > 0
      """.stripMargin)

    caculateDF.where("priceText > 0").registerTempTable("tab")

    //关联分类
    val cateDF: DataFrame = spark.read.option("header", true).csv(catePath)

    cateDF.registerTempTable("cateDF")

    spark.sql(
      """
        |select
        |substr(standId,0,5) as firstCategoryId,
        |substr(standId,0,7) as secondCategoryId,
        |standId as thirdCategoryId,
        |concat(standId,'99') as fourthCategoryId,
        |*
        |from
        |cateDF
      """.stripMargin).registerTempTable("cateTab")


    //关联地址
    spark.read.json(addrPath).registerTempTable("addrTab")

    val resultDF = spark.sql(
      """
        |select
        |a.*,
        |case when b.firstCategoryId is not null then b.firstCategoryId else '10099' end firstCategoryId,
        |case when b.secondCategoryId is not null then b.secondCategoryId else '1009999' end secondCategoryId,
        |case when b.thirdCategoryId is not null then b.thirdCategoryId else '100999999' end thirdCategoryId,
        |case when b.fourthCategoryId is not null then b.fourthCategoryId else '10099999999' end fourthCategoryId,
        |case when c.shopId is not null then c.administrative_region  else  '0'                   end administrative_region,
        |case when c.shopId is not null then c.city  else  '0'                                      end  city,
        |case when c.shopId is not null then c.city_grade  else  '0'                                    end  city_grade,
        |case when c.shopId is not null then c.city_origin  else  '0'                               end  city_origin,
        |case when c.shopId is not null then c.district  else  '0'                                  end  district,
        |case when c.shopId is not null then c.district_origin  else  '0'                           end  district_origin,
        |case when c.shopId is not null then c.economic_division  else  '0'                             end  economic_division,
        |case when c.shopId is not null then c.if_city  else '0'                                        end  if_city,
        |case when c.shopId is not null then c.if_district  else  '0'                                   end  if_district,
        |case when c.shopId is not null then c.if_state_level_new_areas  else  '0'                      end  if_state_level_new_areas,
        |case when c.shopId is not null then c.poor_counties  else  '0'                                 end  poor_counties,
        |case when c.shopId is not null then c.province  else  '0'                                  end  province,
        |case when c.shopId is not null then c.regional_ID  else  '0'                              end  regional_ID,
        |case when c.shopId is not null then c.rural_demonstration_counties  else  '0'                  end  rural_demonstration_counties,
        |case when c.shopId is not null then c.rural_ecommerce  else  '0'                               end  rural_ecommerce,
        |case when c.shopId is not null then c.the_belt_and_road_city  else  '0'                        end  the_belt_and_road_city,
        |case when c.shopId is not null then c.the_belt_and_road_province  else  '0'                    end  the_belt_and_road_province,
        |case when c.shopId is not null then c.the_yangtze_river_economic_zone_city  else  '0'          end  the_yangtze_river_economic_zone_city,
        |case when c.shopId is not null then c.the_yangtze_river_economic_zone_province  else  '0'      end  the_yangtze_river_economic_zone_province,
        |case when c.shopId is not null then c.urban_agglomerations  else  '0'                          end  urban_agglomerations,
        |case when c.shopId is not null then c.address  else  '0'                                       end  address,
        |case when c.shopId is not null then c.latitude  else  '-1'                                     end latitude,
        |case when c.shopId is not null then c.longitude  else  '-1'                                    end longitude,
        |case when c.shopId is not null then c.aedzId  else  '-1'                                       end  aedzId,
        |case when c.shopId is not null then c.town  else '-1'                                          end  town
        |from
        |tab a
        |left join
        |cateTab b
        |on a.categoryId=b.categoryId
        |left join
        |addrTab c
        |on a.mallId=c.shopId
        |
      """.stripMargin)

    import org.apache.spark.sql.functions._

    //打标签
    val frame: DataFrame = resultDF.withColumn("platformId", lit("67")).withColumn("platformName", lit("喵街")).withColumn("timeStamp", lit(timeStamp))
      .withColumnRenamed("brandId", "brandValueId").dropDuplicates()


    //关联品牌
    val brand = new brand_join_res
    //    读取老品牌表路径
    //val brandTableOld = spark.read.json(brandTable_old)
    brand.brandJoinResult(frame, result_goodUrl, year, month, platformName, spark)




    //入库
    /*spark.read.orc(goodPath).registerTempTable("tab")

    val res = spark.sql(
      """
        |select
        |*,
        |case when discount='-1' then priceText/100 else (priceText/100)-(discount/100) end priceText_tmp,
        |discount/10 as discount_tmp
        |from
        |tab
      """.stripMargin).drop("priceText").drop("discount").withColumnRenamed("priceText_tmp","priceText").withColumnRenamed("discount_tmp","discount")



    res.registerTempTable("res")

    spark.sql(
      """
        |select
        |good_id
        |from
        |res
        |where (priceText-discount)<0
      """.stripMargin).show()
*/

    /*    spark.sql(
          """
            |select
            |rate,
            |count(1) ct
            |from
            |(
            |select
            |*,
            |priceText/discount as rate
            |from
            |res
            |)
            |where rate < 0.9
            |group by rate
            |order by ct desc
          """.stripMargin).show()



        res.registerTempTable("endtab")

        spark.sql(
          """
            |select
            |count(1) totalNum,
            |count(distinct shopId) shopNum,
            |count(distinct mallId) mallNum,
            |sum(sellCount) as sumSellCount,
            |sum(sellCount*priceText) as sumSalesAmount
            |from
            |endtab
          """.stripMargin).show(false)*/


    sc.stop()
  }
}
