package sparkCoreNew

import org.apache.spark.{SparkConf, SparkContext}

object work11017 {
  def main(args: Array[String]): Unit = {
    //读取数据文件，构建RDD
    val conf =new SparkConf().setMaster("local[*]").setAppName("hello scala")
    val sc =new SparkContext(conf)
    val fileRDD=sc.textFile("D:\\15code\\data\\data10.17.txt")
    fileRDD.flatMap(_.split("\\\\00A")).filter(x=>x.contains("玩具")||x.contains("服装")||x.contains("家电")).map((_,1)).reduceByKey((_+_)).foreach(println(_))
    //保存数据
    //rRDD.saveAsTextFile("")
  }

}
