package day01
// 成了没？??????
// 克隆
// 第二次单独修改这个MyTest.scala文件
import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkConf, SparkContext}

object MyTest {
  def main(args: Array[String]): Unit = {
    val conf = new SparkConf().setAppName("MyTest").setMaster("local[*]")
    val sc = new SparkContext(conf)

    val value = sc.parallelize(Array(1,2,3,4,5,6,7,2,6,8,3,8,9,10))
    val size = value.distinct().collect()
    println(size.toBuffer)


    val res: Array[Int] = value.mapPartitions(_.map(_ * 10)).collect()
    println("res="+res.toBuffer)

    val a: String = ""
    println(a.length)

    val rdd1: RDD[String] = sc.parallelize(List("","23","345",""),2)

    val res2 = rdd1.aggregate("")((x,y)=>math.min(x.length,y.length).toString, (x,y)=>x+y)
    println("res2= "+res2)
    val res3 = rdd1.aggregate("")((x,y)=>math.min(x.length,y.length).toString, (x,y)=>x+y)
    println("res3= "+res3)
    val res4 = rdd1.aggregate("")((x,y)=>math.min(x.length,y.length).toString, (x,y)=>x+y)
    println("res4= "+res4)


  }
}
