package ScalaSpark

import org.apache.spark.{SparkConf, SparkContext}

/**
  * 使用本地文件创建RDD
  * eg：统计文本文件字数
  */
object ScalaLocalFile {

  def main(args: Array[String]): Unit = {
    val conf = new SparkConf().setAppName("ss").setMaster("local[4]")

    val sc = new SparkContext(conf)

    val lines = sc.textFile("file:///C:\\Users\\10902\\Desktop\\spark.txt", 1)

    val count = lines.map { line => line.length() }.reduce(_ + _)

    println("统计文本文件字数：" + count)
  }

}
