package com.SparkCore.framwork.service

import com.SparkCore.framwork.common.TService
import com.SparkCore.framwork.dao.WordCountDao
import org.apache.spark.rdd.RDD

/**
 * 服务层
 */
class WordCountService extends TService{

  private val wordCountDao = new WordCountDao()

  //数据分析
  def dataAnalysis(): Array[(String, Int)] ={
    //TODO 执行业务操作
    val lines = wordCountDao.readFile("datas/1.txt")
    //2、 将一行数据进行拆分，形成一个一个的单词
    val words: RDD[String] = lines.flatMap(_.split(" "))
    //3、将数据根据单词进行分组，便于统计
    val wordGroup: RDD[(String, Iterable[String ])] = words.groupBy((words: String) => words)
    //4、对分组后的数据进行转换
    val  wordToCount = wordGroup.map {
      case (word, list) =>
        (word, list.size)
    }
    //5、将转换结构采集到控制台转化出来
    val array: Array[(String , Int)] = wordToCount.collect()
    array
  }

}
