import org.apache.spark.{SparkConf, SparkContext}
import org.apache.spark.mllib.recommendation._
object Recommend {
  def main(args: Array[String]): Unit = {
    val conf = new SparkConf().setAppName("mySpark")
    conf.setMaster("local")


    val sc =new SparkContext(conf)
    val rawUserArtistData = sc.textFile("/profiledata_06-May-2005/user_artist_data.txt")
    rawUserArtistData.map(_.split(' ')(0).toDouble).stats()
    rawUserArtistData.map(_.split(' ')(1).toDouble).stats()

    val rawArtistData = sc.textFile("/profiledata_06-May-2005/artist_data.txt")
    val artistByID=rawArtistData.flatMap{line=>
      val (id,name)=line.span(_!='\t')
      if(name.isEmpty){
        None
      }else{
        try{
          Some((id.toInt,name.trim))
        }catch {
          case e:NumberFormatException=>None
        }
      }
    }



    val rawArtistAlias = sc.textFile("/profiledata_06-May-2005/artist_alias.txt")
    val artistAlias=rawArtistAlias.flatMap{line=>
      val tokens=line.split('\t')
      if(tokens(0).isEmpty){
        None
      }else{
        Some((tokens(0).toInt,tokens(1).toInt))
      }
    }.collectAsMap()
    artistByID.lookup(6803336).head
    artistByID.lookup(1000010).head

    val bArtistAlias=sc.broadcast(artistAlias)

    val trainData=rawUserArtistData.map{line=>
      val Array(userID,artistID,count)=line.split(' ').map(_.toInt)
      val finalArtistID=
        bArtistAlias.value.getOrElse(artistID,artistID)
      Rating(userID,finalArtistID,count)
    }.cache()



    val model=ALS.trainImplicit(trainData,10,5,0.01,1.0);
    println(model.userFeatures.mapValues(_.mkString(",")).first())

    val recommendations=model.recommendProducts(2093760,5)
    recommendations.foreach(println)
    val recommendedProductIDs=recommendations.map(_.product).toSet

    artistByID.filter{case (id,name)=>
      recommendedProductIDs.contains(id)
    }.values.collect().foreach(println)


  }
}
