package com.gy.spark.core.actions

import org.apache.spark.{SparkConf, SparkContext}

object Operator_reduce {
  def main(args: Array[String]): Unit = {
    val conf = new SparkConf().setMaster("local").setAppName(this.getClass.getSimpleName)
    val sc = new SparkContext(conf)

    val lines = sc.textFile("spark/input/words.txt")
    val tuple: (String, Int) = lines.map((_, 1)).reduce((k, v) => (k._1 + ":" + v._1, k._2 + v._2))
    println(tuple)

    sc.stop()

  }

}
