//import org.apache.spark.broadcast.Broadcast
//import org.apache.spark.{SparkConf, SparkContext}
//import org.apache.spark.mllib.recommendation._
//import org.apache.spark.rdd._
//import org.apache.spark.sql.{DataFrame, Dataset}
//
//import scala.collection.mutable.ArrayBuffer
//import scala.util.Random
//
//object MySpark {
//  def main(args: Array[String]) {
//    val conf = new SparkConf().setAppName("mySpark")
//    conf.setMaster("local")
//    val sc =new SparkContext(conf)
//
//    val rawUserArtistData=sc.textFile("/spark-learning/user_artist_data.txt");
//    val rawArtistData=sc.textFile("/spark-learning/artist_data.txt");
//
//    val artistByID=rawArtistData.flatMap{line=>
//      val (id,name)=line.span(_!='\t')
//      if(name.isEmpty){
//        None
//      }else{
//        try{
//          Some((id.toInt,name.trim))
//        }catch {
//          case e:NumberFormatException=>None
//        }
//      }
//    }
//
//    val rawArtistAlias=sc.textFile("/spark-learning/artist_alias.txt")
//    val artistAlias=rawArtistAlias.flatMap{line=>
//      val tokens=line.split('\t')
//      if(tokens(0).isEmpty){
//        None
//      }else {
//        Some((tokens(0).toInt,tokens(1).toInt))
//      }
//    }.collectAsMap()
//
//
//    val bArtistAlias=sc.broadcast(artistAlias)
//    val trainDate=rawUserArtistData.map { line =>
//      val Array(userID, artistID, count) = line.split(' ').map(_.toInt)
//      val finalArtistID= bArtistAlias.value.getOrElse(artistID,artistID)
//      Rating(userID,finalArtistID,count)
//    }.cache()
//
//    val model=ALS.trainImplicit(trainDate,10,5,0.01,1.0)
//    model.save(sc,"model")
//    model.userFeatures.mapValues(_.mkString(",")).first()
//    model.productFeatures.mapValues(_.mkString(",")).first()
//
//    val rawArtistsForUser=rawUserArtistData.map(_.split(' ')).
//      filter{case Array(user,_,_)=>user.toInt==120}
//
//    val existingProducts=
//      rawArtistsForUser.map{case Array(_,artist,_)=>artist.toInt}.
//        collect().toSet
//
//    artistByID.filter {case (id,name) =>
//      existingProducts.contains(id)
//    }.values.collect().foreach(println)
//
//    val recommendations=model.recommendProducts(120,5)
//    recommendations.foreach(println)
//
//    val recommendedProductIDs=recommendations.map(_.product).toSet
//    artistByID.filter{case(id,name)=>
//      recommendedProductIDs.contains(id)
//    }.values.collect().foreach(println)
//
//
//
//
//    //计算AUC进行评估
//    def buildArtistAlias(rawArtistAlias: Dataset[String]): Map[Int,Int] = {
//      rawArtistAlias.flatMap { line =>
//        val Array(artist, alias) = line.split('\t')
//        if (artist.isEmpty) {
//          None
//        } else {
//          Some((artist.toInt, alias.toInt))
//        }
//      }.collect().toMap
//    }
//
//    def areaUnderCurve(
//                        positiveData: RDD[Rating],
//                        bAllItemIDs: Broadcast[Array[Int]],
//                        predictFunction:(RDD[(Int,Int)]=>RDD[Rating]))={
//
//    }
//
//    def areaUnderCurve(
//                        positiveData: DataFrame,
//                        bAllArtistIDs: Broadcast[Array[Int]],
//                        predictFunction: (DataFrame => DataFrame)): Double = {
//
//      // What this actually computes is AUC, per user. The result is actually something
//      // that might be called "mean AUC".
//
//      // Take held-out data as the "positive".
//      // Make predictions for each of them, including a numeric score
//      val positivePredictions = predictFunction(positiveData.select("user", "artist")).
//        withColumnRenamed("prediction", "positivePrediction")
//
//      // BinaryClassificationMetrics.areaUnderROC is not used here since there are really lots of
//      // small AUC problems, and it would be inefficient, when a direct computation is available.
//
//      // Create a set of "negative" products for each user. These are randomly chosen
//      // from among all of the other artists, excluding those that are "positive" for the user.
//      val negativeData = positiveData.select("user", "artist").as[(Int,Int)].
//        groupByKey { case (user, _) => user }.
//        flatMapGroups { case (userID, userIDAndPosArtistIDs) =>
//          val random = new Random()
//          val posItemIDSet = userIDAndPosArtistIDs.map { case (_, artist) => artist }.toSet
//          val negative = new ArrayBuffer[Int]()
//          val allArtistIDs = bAllArtistIDs.value
//          var i = 0
//          // Make at most one pass over all artists to avoid an infinite loop.
//          // Also stop when number of negative equals positive set size
//          while (i < allArtistIDs.length && negative.size < posItemIDSet.size) {
//            val artistID = allArtistIDs(random.nextInt(allArtistIDs.length))
//            // Only add new distinct IDs
//            if (!posItemIDSet.contains(artistID)) {
//              negative += artistID
//            }
//            i += 1
//          }
//          // Return the set with user ID added back
//          negative.map(artistID => (userID, artistID))
//        }.toDF("user", "artist")
//
//      // Make predictions on the rest:
//      val negativePredictions = predictFunction(negativeData).
//        withColumnRenamed("prediction", "negativePrediction")
//
//      // Join positive predictions to negative predictions by user, only.
//      // This will result in a row for every possible pairing of positive and negative
//      // predictions within each user.
//      val joinedPredictions = positivePredictions.join(negativePredictions, "user").
//        select("user", "positivePrediction", "negativePrediction").cache()
//
//      // Count the number of pairs per user
//      val allCounts = joinedPredictions.
//        groupBy("user").agg(count(lit("1")).as("total")).
//        select("user", "total")
//      // Count the number of correctly ordered pairs per user
//      val correctCounts = joinedPredictions.
//        filter($"positivePrediction" > $"negativePrediction").
//        groupBy("user").agg(count("user").as("correct")).
//        select("user", "correct")
//
//      // Combine these, compute their ratio, and average over all users
//      val meanAUC = allCounts.join(correctCounts, "user").
//        select($"user", ($"correct" / $"total").as("auc")).
//        agg(mean("auc")).
//        as[Double].first()
//
//      joinedPredictions.unpersist()
//
//      meanAUC
//    }
//
//    def predictMostListened(train: DataFrame)(allData: DataFrame): DataFrame = {
//      val listenCounts = train.groupBy("artist").
//        agg(sum("count").as("prediction")).
//        select("artist", "prediction")
//      allData.
//        join(listenCounts, Seq("artist"), "left_outer").
//        select("user", "artist", "prediction")
//    }
//
//
//
//
//  }
//}
