package com.catmiao.spark.framework.service

import com.catmiao.spark.framework.common.TService
import com.catmiao.spark.framework.dao.WordCountDao
import org.apache.spark.rdd.RDD

/**
 * @title: WordCountService
 * @projectName spark_study
 * @description: TODO
 * @author ChengMiao
 * @date 2024/3/15 15:56
 */
class WordCountService extends TService{

  val dao = new WordCountDao


  // 数据分析方法
  def dataAnalysis():  Array[(String, Int)] = {
    // 读取文件数据
    val fileRDD = dao.readFile("datas/1.txt")

    // 分词
    val wordRdd: RDD[String] = fileRDD.flatMap(_.split(" "))

    // 转换为 map 元组结构
    val word2OneRDD: RDD[(String, Int)] = wordRdd.map((_, 1))

    // 分组聚合 spark提供
    val word2CountRDD: RDD[(String, Int)] = word2OneRDD.reduceByKey(_ + _)

    // 将数据聚合结果采集到内存中
    val word2Count: Array[(String, Int)] = word2CountRDD.collect()


    word2Count
  }

}
