package com.o2o.regularservice.brand_modular_202004

import com.alibaba.fastjson.JSON
import com.o2o.regularservice.brand_modular_202004.utils.{brand_modify_util, brand_state_util, laozihao_util, newAdd_brand_util}
import org.apache.spark.sql.{DataFrame, SQLContext, SparkSession}
import org.elasticsearch.spark._

object brand_join_ES_202004{

  def main(args: Array[String]): Unit = {
    val spark = SparkSession.builder()
            .master("local[*]")
      .appName("brand_join_res")
      .config("es.nodes", "192.168.1.29")
      .config("es.port", "9200")
      .config("es.net.http.auth.user", "elastic")
      .config("es.net.http.auth.pass", "changeme")
      .config("cluster.name","O2OElastic")
      .config("spark.hadoop.fs.s3a.access.key", "GAO7EO9FWKPJ8WFCQDME")
      .config("spark.hadoop.fs.s3a.secret.key", "LZ0xaHBSYKHaJ9ECDbX9f7zin79UZkXfGoNapRPL")
      .config("spark.hadoop.fs.s3a.endpoint", "https://obs.cn-north-1.myhuaweicloud.com")
      .getOrCreate()
    val sc = spark.sparkContext
    sc.setLogLevel("ERROR")
    val sqlContext = new SQLContext(sc)
    //=====================================================================
    val year = 2020
    val month = 8
    //  suning kaola  miya  yunji    rongyigou
//    val platformName = "suning"
    val platformName = "rongyigou"
    val index = s"${year}_${platformName}/${platformName}_${year}_${month}"

    //  老品牌表路径
    val old_brand_path =s"s3a://o2o-dataproces-group/li_yinchao/Table/2020/8/${platformName}"
    //  品牌结果路径【该路径不需要修改】
    val result_path = s"s3a://o2o-dataproces-group/li_yinchao/Table/2020/9/${platformName}"
    //  新增品牌路径【该路径不需要修改】
    val brand_newAdd = s"s3a://o2o-dataproces-group/li_yinchao/Table/newAddBrand/2020/8/${platformName}"

    val sourceDataPath_ES = sc.esJsonRDD(index).values.map(x => {
      val nObject = JSON.parseObject(x)
      nObject.toString
    })
    val source_data = spark.read.json(sourceDataPath_ES)
    val df_old = spark.read.json(old_brand_path)

    brandJoinResult(source_data,"",df_old,result_path,brand_newAdd,year,month,platformName,spark,index)
  }
  /**
    * @param sourceDataPath:DataFrame  跑完分类的【数据】
    * @param sourceResultPath：String  跑完品牌的数据结果路径
    * @param brandTableOld：DataFrame  老品牌表【数据】
    * @param brandTableResult：string  品牌表结果路径
    * @param brandNewAdd：string       新增品牌表路径
    * @param year：int                 年
    * @param month：int                月
    * @param platformName ：string     平台名称
    * @param spark
    */

  def brandJoinResult(sourceDataPath:DataFrame,sourceResultPath:String,brandTableOld:DataFrame,brandTableResult:String,brandNewAdd:String,
                year:Int,month:Int,platformName:String,spark:SparkSession,index:String){
// ************************************************************************************************************
    val timeStamp = s"${year}年${month}月"
// **************************************************************************************************************************
      println("--------------提取新增品牌--------------------------")
      val addBrand = new newAdd_brand_util
      val new_brand =  addBrand.new_brand_excat(sourceDataPath,brandTableOld,spark,timeStamp)
//    统计一下新增品牌条数，如果没有新品牌直接关联品牌入库
      val brandNewCount = new_brand.dropDuplicates("brandCcId").count()
    if(brandNewCount != 0){
      println("--------------修改品牌名称-----------------------------")
      val brand_modify = new brand_modify_util
      val brand_stap_2 = brand_modify.brand_modify_1(new_brand,spark,platformName)
      brand_stap_2.cache()
      println("--------------修改品牌来源国-----------------------------")
      val state = new brand_state_util
      val stateResult = state.brand_state(brand_stap_2,spark,platformName)
      println("--------------修改老字号---------------------------")
      val laozihao = new laozihao_util
      val frame_3 = laozihao.brand_add_laozihaoaddress(stateResult, spark)
     frame_3.cache()
//   将处理完毕的新增品牌表进行输出
     frame_3.repartition(4).write.json(brandNewAdd)

    spark.read.json(brandNewAdd).registerTempTable("frame_3")
      brandTableOld.registerTempTable("old_brand_data")
  //println("合并新老品牌")
   val result =  spark.sql(
      """
        |select brandCcId,brandName,brandName_cn,brandName_en,brandValueId,brand_isLaoZiHao,brand_type,firstCategoryId,platform,timeStamp,brand_state from frame_3
        |union
        |select brandCcId,brandName,brandName_cn,brandName_en,brandValueId,brand_isLaoZiHao,brand_type,firstCategoryId,platform,timeStamp,brand_state from old_brand_data
      """.stripMargin)
      .dropDuplicates("brandCcId")
      result.repartition(1).write.json(brandTableResult)

//      println("品牌结果表")
      val result_path_1 = spark.read.json(brandTableResult)
      val join_data = utils.brand_join_util.brand_join(sourceDataPath,spark,result_path_1)
      join_data.toJSON.rdd.map(line=>{
        val nObject = JSON.parseObject(line)
        nObject
      })
        .saveToEs(index,Map("es.mapping.id" -> "good_id"))
    }else{
//      如果没有新增品牌，直接关联品牌表入库,并且使用的表是老品牌表,将老品牌表输入到新的路径中
      brandTableOld.repartition(1).write.json(brandTableResult)
      val join_data = utils.brand_join_util.brand_join(sourceDataPath,spark,brandTableOld)
      join_data.toJSON.rdd.map(line=>{
        val nObject = JSON.parseObject(line)
        nObject
      })
        .saveToEs(index,Map("es.mapping.id" -> "good_id"))
    }
  }
}
