package com.lmq.sparkConcept
import org.apache.spark.SparkContext
import org.apache.spark.SparkContext._
/**
 * A simple Spark app in Scala
 */
object firstApp {
  def main(args: Array[String]): Unit = {
    val sc = new SparkContext("local[2]", "First App")
    // we take the raw data in CSV format and convert it into a
    // set of records of the form (user, product, price)
    val data = sc.textFile("data/UserPurchaseHistory.csv")
      .map(line => line.split(","))
      .map(purchaseRecord => (purchaseRecord(0),
      purchaseRecord(1), purchaseRecord(2)))
    data.foreach(println)

    // let's count the number of purchases
    val numPurchases = data.count()
    // count how many unique users made purchaes
    val uniqueUsers = data.map {
      case (user,product, price) => user
    }.distinct().count()
    // lets sum up our ttal revenue
    val totalRevenue = data.map{ case (user, product, price) =>
    price.toDouble }.sum()
    // lets find our most popular product
    println(data
      .map{ case (user, product, price) => (product, 1)}
      .reduceByKey(_+_)
      .collect().mkString)
    val productsByPopularity = data
      .map{ case (user, product, price) => (product, 1)}
      .reduceByKey(_+_)
      .collect()
//      根据该元组的第二个数进行降序排列
      .sortBy(-_._2)
    val mostPopular = productsByPopularity(0)
//    println(mostPopular)
    println("Total purchase: " + numPurchases)
    println("Unique users: " + uniqueUsers)
    println("Total revenue: " + totalRevenue)
    println("Most popular product: %s with %d" +
      "purchases".format(mostPopular._1, mostPopular._2))

  }

}
