package com.xxxx.spark

import org.apache.spark.{SparkConf, SparkContext}

/**
 * @program: day0311
 * @description: 尽信书，则不如无书
 * @author: CoreDao
 * @create: 2021-03-11 14:32
 * */

object Pvuv {
  def main(args: Array[String]): Unit = {
    val conf = new SparkConf()
    conf.setMaster("local[*]").setAppName("pvuv")
    val sc = new SparkContext(conf)

    val lineRDD = sc.textFile("F:\\MyIDEA\\MyScala\\sxt\\day0311\\src\\main\\resources\\data\\pvuvdata")
    /*lineRDD
      .map(x => (x.split("\t")(5),1))
      .reduceByKey(_+_)
      .foreach(println)*/

    /*lineRDD
      .map(x =>{
        val strings = x.split("\t")
        (strings(5),strings(0))
      })
      .distinct()
      .map(x=>(x._1,1))
      .reduceByKey(_+_)
      .foreach(println)*/

    lineRDD
      .map(x =>{
        val strings = x.split("\t")
        (strings(5),strings(0))
      })
      .distinct()
      .countByKey()
      .foreach(x=>println(s"key is ${x._1}  , val is ${x._2}"))


    sc.stop()
  }

}
