package com.njbdqn.datahandler

import com.njbdqn.util.{HDFSConnection, MySQLConnection}
import org.apache.spark.sql.SparkSession
import org.apache.spark.sql.expressions.Window
import org.apache.spark.sql.functions._

object ALSDataHandler {

  val actToNum = udf{
    (str:String)=>{
      str match {
        case "BROWSE"=>1
        case "COLLECT"=>2
        case "BUYCAR"=>4
        case _=> 8
      }
    }
  }
  case class UserAction(act:String,act_time:String,cust_id:String,good_id:String,browse:String)
  //将商品编号转为连续数字
  def goodToNum(spark:SparkSession) ={
    import spark.implicits._
    val gwnd = Window.partitionBy().orderBy("good_id")
    HDFSConnection.readDataFromHDFS(spark,"/myshops/dwd_good")
      .select($"good_id",row_number().over(gwnd).alias("gid")).cache()
  }
  //将用户编号转为连续数字
  def userToNum(spark:SparkSession)={
    import spark.implicits._
    val cwnd = Window.partitionBy().orderBy("cust_id")
    MySQLConnection.readMySQL(spark,"customs")
      .select($"cust_id",row_number().over(cwnd).alias("uid")).cache()
  }

  def alsData(spark:SparkSession) = {
    val txt = spark.sparkContext.textFile("file:///d:/log/*.log").cache()
    //将读入的数据转为dataframe
    import spark.implicits._
    //计算出每个用户对该用户接触过的的商品的评分
    val df = txt.map(line => {
      val arr = line.split(" ")
      UserAction(arr(0), arr(1), arr(2), arr(3), arr(4))
    }).toDF.select($"cust_id", $"good_id", actToNum($"act").alias("score"))
      .groupBy("cust_id", "good_id").agg(sum($"score").alias("score"))
      .cache()
    //为了防止用户编号或商品编号中含有非数字情况 所有要对所有的商品和用户编号给一个连续的对应的数字编号后再存到缓存
    //将df和goodstab以及custtab join一下只保留(gid,uid,score)
    df.join(goodToNum(spark),Seq("good_id"),"inner").join(userToNum(spark),Seq("cust_id"),"inner")
            .select("gid","uid","score")
  }
}
