package scala

import org.apache.spark.{SparkConf, SparkContext}

/**
  * 项目名称:spark-learn<br>
  * 包名:scala<br>
  * 用于:spark-learn<br>
  * 创建时间:2019年03月14日<br>
  * 更新时间:2019年03月14日<br>
  *
  * @author :lds（创建人）<br>
  * @version :v1.0（版本号）<br>
  * @since jdk1.8
  */
object WordCount {

  def main(args: Array[String]): Unit = {
    val conf = new SparkConf()
    conf.setMaster("local")
    conf.setAppName("WordCount")
    val context = new SparkContext(conf)
    //读取数据，并做持久化
    val lines = context.textFile("README.md").persist()
    //四个‘####’代表标题
    val titleCount = lines.filter(line => line.contains("####")).count()
    println("标题数量："+titleCount)
    val lineCount = lines.count()
    println("行数："+lineCount)
    val firstLine = lines.first()
    println("第一行数据："+firstLine)

    //统计单词数量
    val wordCountRDD = lines.flatMap(line => line.split(" ")).map(word => (word, 1)).reduceByKey {
      case (x, y) => x + y
    }
    //单词统计2
    val countMap = lines.flatMap(line => line.split(" ")).countByValue()
    countMap.foreach{
      case (key,value) => println("count:"+key+":"+value)
    }
    //wordCountRDD.saveAsTextFile("readmeWordCount.txt")
    wordCountRDD.foreach(println)
  }


}
