package sparkCore

import org.apache.spark.{SparkConf, SparkContext}

object FileRDD {
  def main(args: Array[String]): Unit = {
    //读取数据文件，构建RDD

    //1.构建spark 的配置项
    val conf = new SparkConf().setMaster("local[*]").setAppName("hello scala")
    //2.构建sparkcontext sc
    val sc = new SparkContext(conf)

    //读取文件数据
    val fileRDD = sc.textFile("D:\\IdeaCodes\\code\\data\\dataStudent.txt")
    val obj1 = fileRDD.flatMap(_.split("\\\\00A")).filter(str => str.exists(word => word >= '\u4000' && word <= '\u9fff'))
    val mapRdd = obj1.map((_,1))
    val rRDD = mapRdd.reduceByKey(_+_)
    rRDD.foreach(println(_) )
    //保存文件

    }

  }
