package chapter03

import org.apache.log4j.{Level, Logger}
import org.apache.spark.{SparkConf, SparkContext}

object Test33_store {
  def main(args: Array[String]): Unit = {
    Logger.getLogger("org.apache.spark").setLevel(Level.WARN)
    val ads = new SparkConf().setMaster("local[*]").setAppName("store")
    val sc = new SparkContext(ads)
    val value = sc.textFile("input/user_visit_action.txt")
      .map(e=>e.split("_"))
    //获取点击数据
    val clickData = value
      .filter(e => !(e(6).equals("-1") && e(7).equals("-1")))
    //订单数据
    val orderData = value
      .filter(e => !(e(8).equals("null") && e(9).equals("null")))
    //购买数据
    val buyData = value
      .filter(e => !(e(10).equals("null") && e(11).equals("null")))
    //输出查看数据结构
    println(clickData.map(e=>e.toList).take(2).toList)
    //统计点击数据中每个品类ID出现的次数
    val value1 = clickData
      .map(e => e(6))
      .map(e => (e, 1))
      .reduceByKey(_ + _)
      .map(e=>(e._1,e._2,0))
    println(value1.take(2).toList)
    //订单出现品类ID的数量
    val value2 = orderData
      .flatMap(e => e(8).split(","))
      .map(e=>(e,1))
      .reduceByKey(_+_)
      .map(e=>(e._1,e._2,1))
    println(value2.take(2).toList)
    //购买出现品类ID的数量
    val value3 = buyData
      .flatMap(e => e(10).split(","))
      .map(e => (e, 1))
      .reduceByKey(_ + _)
      .map(e=>(e._1,e._2,2))
    println(value3.take(2).toList)
    //把数据合并
    val value4 = value1
      .union(value2)
      .union(value3)
      .groupBy(e => e._1)
    println(value4.take(2).toList)
    val value5 = value4.mapValues(e => e.map(f => {
      if (f._3 == 0) f._2 * 0.2 else if (f._3 == 1) f._2 * 0.3 else f._2 * 0.5
    }))
    println(value5.take(2).toList)
    //将数值加起来
    val value6 = value5
      .mapValues(e => e.sum)
      .sortBy(e=>e._2,false)
      .take(10)
    println(value6.toList)
  }
}
