import org.apache.spark.{SparkConf, SparkContext}

object Teacher {
  def main(args: Array[String]): Unit = {
    //读取数据文件，构建RDD
    val conf =new SparkConf().setMaster("local[*]").setAppName("hello scala")
    val sc =new SparkContext(conf)
    val fileRDD=sc.textFile("D:\\15code\\teacher.log")
    fileRDD.flatMap(_.split("/")).filter(
      x=>x.contains("laozhang")||x.contains("laoduan")||
        x.contains("laoyang")||x.contains("laozhao")||x.contains("xiaoxu")||
        x.contains("laoliu")||x.contains("laoli")).map((_,1)).reduceByKey((_+_)).foreach(println(_))
    //保存数据
    //rRDD.saveAsTextFile("")

  }

}
