package com.scala

import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkConf, SparkContext}

object Scala_Lesson1 {
  def main(args: Array[String]): Unit = {
    val conf=new SparkConf();
    conf.setMaster("local").setAppName("paraliztion")

    val sc=new SparkContext(conf)
    val rdd1=sc.makeRDD(Array(("a",1),("b",2),("c",3),("d",4),("e",5)))
    val rdd2=sc.makeRDD(Array(("a",6),("g",7),("c",8),("i",9),("j",10)))
    val join:RDD[(String,(Int,Int))]=rdd1.join(rdd2)
    join.foreach((param:(String,(Int,Int)))=>{
      println(param)
    })
    val filter = rdd1.filter((param: (String, Int)) => {
      println(param._1)
      false
    })



//    val rdd=sc.makeRDD(Array(1,2,3,4,5,6))
//    println("partition is "+rdd.partitions.size)

    sc.stop()
  }

}
