package sparkCoreNew

import org.apache.spark.{SparkConf, SparkContext}

object TeacherRDD {
  def main(args: Array[String]): Unit = {
    val conf = new SparkConf().setMaster("local[*]").setAppName("hello Xue")
    val sc=new SparkContext(conf)
    val  fileRdd=sc.textFile("D:\\Code\\data\\teacher.txt")
    /*fileRdd.flatMap(_.split("/")).filter(x=>x.contains("laozhang")||x.contains("laozhao")||x.contains("laoduan")||
      x.contains("xiaoxu")||x.contains("laoyang")||x.contains("laoli")||x.contains("laoliu")).map((_,1)).reduceByKey((_+_)).foreach(println(_))*/
    fileRdd.flatMap(_.split("/")).filter(x=>x.contains("laozhang")||x.contains("laozhao")||x.contains("laoduan")||
          x.contains("xiaoxu")||x.contains("laoyang")||x.contains("laoli")||x.contains("laoliu")).map((_,1)).reduceByKey((_+_)).foreach(println(_))


  }

}
