import org.apache.spark.{SparkConf, SparkContext}

val sparkConf = new SparkConf().setAppName("MyNetworkWordCount").setMaster("local[2]")

val sc=new SparkContext(sparkConf)

val rdd4 = sc.parallelize(List("12","23","345",""),2)


//注意这里的局部聚合有个toString
rdd4.aggregate("")((x,y) => math.min(x.length, y.length).toString, (x,y) => x + y)
