package spark

import org.apache.spark.{SparkConf, SparkContext}

/**
 * @className Rdd
 * @author sjw
 * @date 2020/11/16 17:42
 * @description: TODO
 */
object Rdd {

  def main(args: Array[String]): Unit = {
//    wordCount()
    average()
  }

  def initSparkContext(appName: String, masterUrl: String): SparkContext = {
    val sparkConf = new SparkConf().setAppName(appName).setMaster(masterUrl);
    new SparkContext(sparkConf);
  }

  def wordCount(): Unit = {
    val sparkContext = initSparkContext("wordCount", "local");
    val inputFile = "file:\\G:\\wordcount\\1.txt";
    //val sparkConf=new SparkConf().setAppName("wordCount").setMaster("local");
    //val sparkContext=new SparkContext(sparkConf);
    val textFile = sparkContext.textFile(inputFile);
    textFile.flatMap(line => line.split(" ")).map(word => (word, 1)).reduceByKey((a, b) => a + b).foreach(println)
  }

  def average(): Unit = {
    val sparkContext = initSparkContext("average", "local");
    var rdd=sparkContext.parallelize(Array(("孙靖武",1),("李双旭",3),("李双旭",5),("李双旭",6),("孙靖武",3)))
    rdd.mapValues(x=>(x,1)).reduceByKey((x,y)=> (x._1+y._1,x._2+y._2 )).mapValues(x=>(x._1/x._2,x._2)).foreach(println)
  }

}
