package com.catmiao.spark.core

import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkConf, SparkContext}

/**
 * @title: WordCountTest
 * @projectName spark_study
 * @description: TODO
 * @author ChengMiao
 * @date 2024/1/4 11:46
 */
object WordCountTest {

  def main(args: Array[String]): Unit = {

    // 创建Spark运行配置对象
    val sparkConfig = new SparkConf()
      .setMaster("spark://192.168.89.191:7077")
      .setJars(Array("/Users/monster/code/my/gitee/spark_study/spark_core/target/spark_core-1.0-SNAPSHOT.jar"))
      .setAppName("wordCount")

    // 创建Spark上下文对象
    val sparkContext : SparkContext = new  SparkContext(sparkConfig)

    // 读取文件数据
    val fileRDD : RDD[String] = sparkContext.makeRDD(Array("Hello world","Hello Spark","Word Count"))

    // 分词
    val wordRdd : RDD[String] = fileRDD.flatMap(_.split(" "))

    // 转换为 map 元组结构
    val word2OneRDD: RDD[(String, Int)] = wordRdd.map((_, 1))

    // 分组聚合 spark提供
    val word2CountRDD: RDD[(String, Int)] = word2OneRDD.reduceByKey(_ + _)

    // 将数据聚合结果采集到内存中
    val word2Count: Array[(String, Int)] = word2CountRDD.collect()
    // 打印结果
    word2Count.foreach(println)
    //关闭 Spark 连接
    sparkContext.stop()

    // bin/spark-submit --class org.apache.spark.examples.SparkPi --master 'local[2]' ./examples/jars/spark-examples_2.12-3.0.0.jar  10
  }

}
