package chapter03

import org.apache.log4j.{Level, Logger}
import org.apache.spark.{SparkConf, SparkContext}

object Test35_Session {
  def main(args: Array[String]): Unit = {
    Logger.getLogger("org.apache.spark").setLevel(Level.WARN)
    val conf = new SparkConf().setMaster("local[*]").setAppName("session")
    val sc = new SparkContext(conf)
    val value = sc.textFile("input/user_visit_action.txt")
    //切分文件
    val value1 = value.map(e => e.split("_"))
    //点击的品类
    //获取点击数据
    val clickData = value1
      .filter(e => !(e(6).equals("-1") && e(7).equals("-1")))
    //订单数据
    val orderData = value1
      .filter(e => !(e(8).equals("null") && e(9).equals("null")))
    //购买数据
    val buyData = value1
      .filter(e => !(e(10).equals("null") && e(11).equals("null")))
    //分别得到三个数据中的session
    val value2 = clickData.map(e => (e(2), e(6)))
    val value3 = orderData.map(e => (e(2), e(8).split(",")))
      .flatMap(e=>(e._2.map(f=>(e._1,f))))
    val value4 = buyData.map(e => (e(2), e(10).split(",")))
      .flatMap(e => (e._2.map(f => (e._1, f))))
    //将数据合并
    val value5 = value2
      .union(value3)
      .union(value4)
      .groupBy(e => e._2)
      .mapValues(e => e.map(f => f._1))
      .mapValues(e => e.toSet.size)
    println(value5.sortBy(e=>e._2,ascending = false).take(10).toList)
  }
}
