package cn.spark.study.core

import org.apache.spark.SparkConf
import org.apache.spark.SparkContext

object LetterCountHDFS {
  def main(args: Array[String]): Unit = {
    
    val conf = new SparkConf()
            .setAppName("LetterCountHDFS")
            .setMaster("local")
            
    val  sc = new SparkContext(conf)
      
    val lines = sc.textFile("hdfs://spark1:9000/spark.txt", 5)
    val linesCounts = lines.map { line => line.length() }
    val wordCount = linesCounts.reduce(_ + _)
    
    println("累加的总字数为 " + wordCount)
  }
}