package com.weic.spark.scala.p2

import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkConf, SparkContext}

/**
 * @Auther:BigData-weic
 * @ClassName:_05ActionOps
 * @Date:2020/12/7 23:07
 * @功能描述: $FunctionDescription
 * @Version:1.0
 */
object _05ActionOps {
	def main(args: Array[String]): Unit = {
		val conf = new SparkConf()
			.setAppName("_05ActionOps")
			.setMaster("local[*]")
		val sc = new SparkContext(conf)

		val array = sc.parallelize(Array(
			"hello you",
			"hello me",
			"hello you",
			"hello you",
			"hello me",
			"hello you"
		), 2)

		val pairs = array.flatMap(_.split("\\s+")).map((_, 1))

		val ret:RDD[(String, Int)] = pairs.aggregateByKey(0)(_+_, _+_)
		
		var array1 = ret.collect()
		println("collect----->"+array1.mkString(","))
		array1 = ret.take(1)
		println("--take---" + array1.mkString("[", ", ", "]"))
		val count = ret.values.reduce(_+_)
		println(count)
		val countBykeyha = pairs.countByKey()
		countBykeyha.foreach{case (word,count) => {
			println(word+"--->"+count)
		}}

		
	}

}
