package com.txl.cn.spark04

import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkConf, SparkContext}

/**
  * Created by txl on 2018/1/4.
  */
object PvUvDemo {
  def main(args: Array[String]): Unit = {
    val conf = new SparkConf() .setAppName("PvUv").setMaster("local[*]")
      val sc = new SparkContext(conf)
    val data: RDD[String] = sc.textFile("data/data.utf8.10000")
    val res = data.map({
      t =>
        val str = t.split("\t")
        val str1 = str(0).split("[:]")
        val id = str(1)
        val hour = str1(0)
        ((hour, id), 1)
    })
    val res2 = res.groupBy(t=>t._1._1)
    val res2data: RDD[(String, Int)] = res2.mapValues({
      t =>
        val pv = t.map(t => t._2).sum
        pv
    })

    val data2 = res.distinct()
    val data3=data2.groupBy(t=>t._1._1)
    val resdata: RDD[(String, Int)] = data3.mapValues({
      t =>
        val uv = t.map(t => t._2).sum
        uv
    })
    val res3: RDD[(String, (Int, Int))] = res2data.join(resdata)
    res3.collect().sortBy(t=>t._1).foreach(println)
  }


}
