package com.shujia.useraction

import com.shujia.utils.{Constants, SparkMain}
import org.apache.spark.sql.{DataFrame, SaveMode}

class UserActionMerge(day: String, userActionIPTV: UserActionIPTV, userActionMobile: UserActionMobile, userActionSkill: UserActionSkill) extends UserActionBase {

  import spark.implicits._

  import org.apache.spark.sql.functions._


  override def load(path: String, day: String): DataFrame = ???

  override def process(): Unit = {
    /**
     * 将数据转换成统一的格式
     * 用户编号，物品编号，物品名称，日期，播放的次数
     *
     */
    val iptvDF: DataFrame = userActionIPTV.load()
    val mobileDF: DataFrame = userActionMobile.load()
    val skillDF: DataFrame = userActionSkill.load()

    // 处理精确搜索数据
    val iptvResDF: DataFrame = iptvDF.select($"uid", $"itemId", $"itemName")

    // 处理移动数据
    val mobileResDF: DataFrame = mobileDF
      .where($"play_time" > 60)
      .select($"uid", $"itemId", $"itemName")

    /**
     * 处理泛搜索数据
     * 1、加载媒资库的数据
     * 2、通过itemName关联取得itemId
     */

    val mediaDF: DataFrame = spark
      .read
      .format("csv")
      .option("sep", "#")
      .schema("id STRING,name STRING,category STRING,tags STRING,actors STRING,directors STRING,hot STRING,score STRING,releasedate STRING,similar STRING,area STRING,language STRING,isnew STRING,mapid STRING,duration STRING,hottags STRING,coldtags STRING")
      .load(Constants.MEDIA_SOURCE_PATH)

    val skillRedDF: DataFrame = skillDF
      .join(mediaDF, $"itemName" === $"name")
      .select($"uid", $"id" as "itemId", $"itemName")

    df = iptvResDF
      .union(mobileResDF)
      .union(skillRedDF)
      .groupBy($"uid", $"itemId", $"itemName")
      .agg(count($"uid") as "num")

  }

  override def save(): Unit = {
    df
      .write
      .format(Constants.USER_ACTION_MERGE_FORMAT)
      .mode(SaveMode.Overwrite)
      .save(s"${Constants.USER_ACTION_MERGE_PATH}/$day")

  }

  override def load(): DataFrame = {
    spark
      .read
      .format(Constants.USER_ACTION_MERGE_FORMAT)
      .load(s"${Constants.USER_ACTION_MERGE_PATH}/$day")
  }
}

object UserActionMerge extends SparkMain {
  override def run(day: String): Unit = {
    val userActionMerge: UserActionMerge = UserActionMerge(day, UserActionIPTV(day), UserActionMobile(day), UserActionSkill(day))

    userActionMerge.process()
    userActionMerge.save()

  }

  def apply(day: String, userActionIPTV: UserActionIPTV, userActionMobile: UserActionMobile, userActionSkill: UserActionSkill): UserActionMerge = new UserActionMerge(day, userActionIPTV, userActionMobile, userActionSkill)

  def apply(day: String): UserActionMerge = new UserActionMerge(day, null, null, null)

  /**
   * 集群提交命令
   * 1、准备数据
   * 2、提交任务
   * spark-submit --master yarn-client --conf spark.sql.shuffle.partitions=2 --class com.shujia.useraction.UserActionIPTV --jars common-1.0.jar action-1.0.jar 2021-11-03
   * spark-submit --master yarn-client --conf spark.sql.shuffle.partitions=2 --class com.shujia.useraction.UserActionMobile --jars common-1.0.jar action-1.0.jar 2021-11-03
   * spark-submit --master yarn-client --conf spark.sql.shuffle.partitions=2 --class com.shujia.useraction.UserActionSkill --jars common-1.0.jar action-1.0.jar 2021-11-03
   * spark-submit --master yarn-client --conf spark.sql.shuffle.partitions=2 --class com.shujia.useraction.UserActionMerge --jars common-1.0.jar action-1.0.jar 2021-11-03
   *
   */
}
