package org.wj.algorithm

import org.apache.commons.lang3.StringUtils
import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkConf, SparkContext}

object ScoreAvg extends App {


  val conf = new SparkConf().setMaster("local").setAppName(this.getClass.getName)


  val sc = new SparkContext(conf)


  private val source: RDD[String] = sc.textFile("P:\\Project\\Idea\\bigdata\\spark\\src\\main\\resources\\data\\scores.txt")

  source.filter(StringUtils.isNotBlank(_)).map(line => {
    val strings = line.split(" ")
    (strings(0), strings(1).toDouble)
  }).persist()
    .combineByKey(score => (1, score), (c1: (Int, Double), newValue: Double) => (c1._1 + 1, c1._2 + newValue), (c1: (Int, Double), c2: (Int, Double)) => (c1._1 + c2._1, c1._2 + c2._2)).foreach(println(_))


}
