package com.zhaosc.spark.core

import scala.io.Source
import org.apache.spark.SparkConf
import org.apache.spark.SparkContext

object FilterCountDemo {

  def main(args: Array[String]): Unit = {
    val conf = new SparkConf().setAppName("logTest")
      .setMaster("local");
    val sc = new SparkContext(conf);

    val fileRdd = sc.textFile("D:\\zhaoshichao\\workspace\\com.zhaosc.spark\\src\\main\\java\\log.txt")

    //    val name = fileRdd.map(line => {
    //      val paras = line.split("\t")
    //      (paras(1), 1);
    //    }).reduceByKey(_ + _)
    //      .map(v => v.swap)
    //      .sortByKey(false)
    //      .take(1)(0)_2

    val name = fileRdd.sample(true, 0.5, 1).map(line => {
      val paras = line.split("\t")
      (paras(1), 1);
    }).reduceByKey(_ + _)
      .map(v => v.swap)
      .sortByKey(false)
      .take(1)(0) _2
      
      println(name)
    fileRdd.filter(line => {
      !line.contains(name)
    }).map(line => {
      val paras = line.split("\t")
      (paras(1), paras(2).toInt);
    }).reduceByKey(_ + _)
      .map(v => v.swap) //调换位置
      .sortByKey(false)
      .foreach(println _)

    sc.stop()
  }

}