package com.atguigu0.core

import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkConf, SparkContext}

/**
 * @description: wordCount案例:本地测试案例,最终文件保存在part-00000 part-00001中
 * @time: 2020/6/10 15:12
 * @author: baojinlong
 **/
object WordCountInLocal {
  def main(args: Array[String]): Unit = {
    // 提交任务命令三种模式可以共存: bin/spark-submit --master yarn --class com.atguigu.test.WordCount target/WordCount.jar /word-data.txt /sparkOut2
    // 创建sparkConf对象
    val sparkConf: SparkConf = new SparkConf().setAppName("myWordCount").setMaster("local[*]")
    // 创建sparkContext对象
    val sc: SparkContext = new SparkContext(sparkConf)
    // 读取文件
    val line: RDD[String] = sc.textFile("E:/test-data/input/wordcount.txt")
    // 将一行数据压平
    val word: RDD[String] = line.flatMap(_.split(" "))
    // 将单词映射成一个元组
    val wordAndOne: RDD[(String, Int)] = word.map((_, 1))
    // 按照单词聚合次数
    val wordAndCount: RDD[(String, Int)] = wordAndOne.reduceByKey(_ + _)
    // 保存到文件
    wordAndCount.saveAsTextFile("E:/test-data/output/wordcount-result")

    // 关闭程序
    sc.stop
  }
}
