package com.at.bigdata.spark.core.framework.service

import com.at.bigdata.spark.core.framework.common.TService
import com.at.bigdata.spark.core.framework.dao.WordCountDao
import org.apache.spark.rdd.RDD

/**
 *
 * @author cdhuangchao3
 * @date 2023/5/20 9:25 PM
 */
class WordCountService extends TService{

  private val dao = new WordCountDao

  def dataAnalysis(): Array[(String, Int)] = {
    // TODO 执行业务操作
    // 1.读取一行
    val lines: RDD[String] = dao.readFile("datas/1.txt")
    // 2.拆分成1个1个单词
    //   扁平化
    val words = lines.flatMap(_.split(" "))
    val word21 = words.map(
      word => (word, 1)
    )

    val wordGroup = word21.groupBy(
      t => t._1
    )
    val word2Count = wordGroup.map {
      case (_, list) => {
        list.reduce(
          (t1, t2) => {
            (t1._1, t1._2 + t2._2)
          }
        )
      }
    }

    // 5.将转换结果采集到控制台打印
    word2Count.collect()
  }
}
