package com.dxf.bigdata.D05_spark_again.工程化代码.service

import com.dxf.bigdata.D05_spark_again.工程化代码.common.TService
import com.dxf.bigdata.D05_spark_again.工程化代码.dao.WordCountDao
import org.apache.spark.rdd.RDD

class WordCountService extends TService{

  private val dao = new WordCountDao

  def dataAnalysis(): Array[(String, Int)] = {
    val lines: RDD[String] = dao.readFile("datas/*")

    val words = lines.flatMap(_.split(" "))
    val wordToOne: RDD[(String, Int)] = words.map(
      word => (word, 1)
    )
    val wordToCount = wordToOne.reduceByKey(_ + _)
    val array: Array[(String, Int)] = wordToCount.collect()
    array
  }

}
