package com.zhl.spark.core.rddtransform

import com.zhl.spark.core.BasicRdd

/**
 * @program: demos
 * @description: ${description}
 * @author: 刘振华
 * @create: 2020-12-01 10:56
 **/
object DoubleValueRdd extends BasicRdd {
    def main(args: Array[String]): Unit = {
        test()
    }

    override def logic(): Unit = {
//        sc.makeRDD(List(1,2,3,4),2).map((_,1))
//            .partitionBy(new HashPartitioner(4))
//            .saveAsTextFile("spark//output")
        sc.makeRDD(List(
            ("a",1),("a",2),("a",3),("b",4)
        )).reduceByKey((a,b)=>{
            println(s"x=${a},y=${b}")
            a+b
        }).collect().foreach(println)

    }
}
