package com.atguigu.userprofile.ml.app

import java.util.Properties

import com.atguigu.userprofile.bean.TagInfo
import com.atguigu.userprofile.dao.TagInfoDao
import com.atguigu.userprofile.ml.pipeline.MyPipeline
import com.atguigu.userprofile.util.MyPropertiesUtil
import org.apache.spark.SparkConf
import org.apache.spark.sql.{DataFrame, SparkSession}

object BusiGenderOriginApp {

  //  1    提数，提取待预测的数据 （不含答案）
  //  2    要把模型从HDFS中加载出来
  //  3    用模型进行预测
  //  4   把预测结果的矢量值转换为原值
  def main(args: Array[String]): Unit = {

  //环境
    val sparkConf: SparkConf = new SparkConf().setAppName("busi_gender_app").setMaster("local[*]")
    val sparkSession: SparkSession = SparkSession.builder().config(sparkConf).enableHiveSupport().getOrCreate()

    val taskId: String = args(0)
    val taskDate: String = args(1)


    //  1    提数，提取待预测的数据 （不含答案）  // 只预测未填写性别的用户
    val sql=
      s"""
         |  with  user_cate1
         | as
         | (
         | select user_id, category1_id, during_time from dwd_page_log pl
         | join dim_sku_info si  on    page_item=id
         | where page_id='good_detail' and page_item_type='sku_id' and  si.dt='$taskDate'
         |  and  pl.dt='$taskDate'
         |  ),
         |  user_gender
         |  as(
         |    select id ,gender  from dim_user_info where dt='9999-99-99' and ( gender =''  or gender is  null)
         |  )
         |  select user_id, top1_c1 ,top2_c1 ,top3_c1 ,male_dur_time,female_dur_time
         |  from
         |  (
         |  select user_id,  sum(if(rk_ct=1,category1_id,0)) top1_c1,sum(if(rk_ct=2,category1_id,0))  top2_c1 ,sum(if(rk_ct=3,category1_id,0))  top3_c1 ,
         |   sum(if(category1_id in (3,4,16) ,sum_dur_time,0)) male_dur_time,
         |   sum(if(category1_id in (8,12,15),sum_dur_time,0)) female_dur_time
         |  from
         |  (
         |    select  user_id,category1_id, count(*) ct ,sum(during_time) sum_dur_time,
         |    row_number()over(partition by user_id  order by  count(*) desc  )  rk_ct
         |    from user_cate1
         |     group by user_id,category1_id
         | ) user_cate1_ct
         | where rk_ct<=3
         | group by user_id
         | )  user_feature
         |  join user_gender ug on ug.id = user_feature.user_id
        """.stripMargin

    sparkSession.sql("use gmall2021")
    println("1    提数")
    val dataFrame: DataFrame = sparkSession.sql(sql)

    //  2    要把模型从HDFS中加载出来
    println("2    要把模型从HDFS中加载出来")
    val properties: Properties = MyPropertiesUtil.load("config.properties")
    val modelPath: String = properties.getProperty("model.path")
    val upDbName: String = properties.getProperty("user-profile.dbname")
    val hdfsPath: String = properties.getProperty("hdfs-store.path")

    val myPipeline: MyPipeline = new MyPipeline().loadModel(modelPath)
    //  3    用模型进行预测
    println("3    用模型进行预测")
    val predictedDataFrame: DataFrame = myPipeline.predict(dataFrame)
    //  4   把预测结果的矢量值转换为原值
    println("4   把预测结果的矢量值转换为原值")
    val predictedDFwithOrigin: DataFrame = myPipeline.convertOrigin(predictedDataFrame)

    predictedDFwithOrigin.select("user_id","prediction_origin").cache().show(1000,false)
    //5保存为标签
    saveTag(predictedDFwithOrigin,taskId,taskDate,upDbName,hdfsPath,sparkSession)
  }

//  1     标签的定义 页面录入
//  2     预测性别标签表的创建
//  3     把预测结果转为标签表的结构 select
//  4    写入标签表    insert

  def saveTag(predictedDFwithOrigin:DataFrame,taskId:String,taskDate:String ,upDbName:String,hdfsPath:String ,sparkSession: SparkSession): Unit ={
    //  2     预测性别标签表的创建
    val tagInfo: TagInfo = TagInfoDao.getTagInfoByTask(taskId)
    val tableName: String = "tg_busi_prediction_busigender03"
    sparkSession.sql(" use  user_profile0318")
    sparkSession.sql(s"drop table if exists $upDbName.$tableName")
    val createTableSQL=
      s"""
         |create table  if not exists $upDbName.$tableName
         |(uid string,tag_value string)
         |comment  '${tagInfo.tagName}'
         |partitioned by (dt string)
         |  ROW FORMAT DELIMITED FIELDS TERMINATED BY '\\t'
         |      location  '$hdfsPath/$upDbName/$tableName'
       """.stripMargin


    println(createTableSQL)
    sparkSession.sql(createTableSQL)

      predictedDFwithOrigin.createTempView("predicted_table")  //把结果集映射成表
    val insertSQL=
      s"""
         |insert overwrite table $tableName  partition (dt='$taskDate')
         |   select user_id ,
         |  case prediction_origin  when 'M' then  '男性'
         |                          when 'F' then '女性' end tag_value
         |      from predicted_table
         | union all
         |      select id, if(gender='M','男性','女性' )
         |         from gmall2021.dim_user_info  ui
         |         where dt='9999-99-99'
         |         and (select count(1) as num from predicted_table  pr where  pr.user_id=ui.id)=0
       """.stripMargin

  //  val insertSQL=s"insert overwrite table $tableName partition (dt='$taskDate')  $selectSQL"

    println(insertSQL)
    sparkSession.sql(insertSQL)




  }

}
