package com.atguigu.sparkcore.rdd.kvs

import com.atguigu.sparkcore.util.MySparkContextUtil
import org.apache.spark.{SparkConf, SparkContext}

/**
  * 保持key不变，对value进行操作
  * author 剧情再美终是戏
  * mail 13286520398@163.com
  * date 2020/1/7 10:57 
  * version 1.0
  **/
object MapValue {

  def main(args: Array[String]): Unit = {

    // 获取sparkContext
    val sc = MySparkContextUtil.get(args)

    // 创建rdd
    val list = List(("a", 3), ("a", 2), ("c", 4), ("b", 3), ("c", 6), ("c", 8))
    val rdd = sc.makeRDD(list, 2)

    // 转换rdd
    val result = rdd.mapValues(">>>" + _)

    // 输出
    println(result.collect().mkString(","))

    // 关闭资源
    MySparkContextUtil.close(sc)
  }

}
