package spark.example

import org.apache.spark.{SparkConf,SparkContext}
import org.apache.spark.rdd.RDD

object methondReferences {
  def main(args: Array[String])  {
    val conf = new SparkConf()
      .setAppName("methondReferences Example")
      .set("spark.serializer","org.apache.spark.serializer.KryoSerializer")

    val sc = new SparkContext(conf)

    val r1 = sc.parallelize(Array("abc","cde"))
    val c1 = new MyClass()
    val c2 = new MyClass1()
    val c3 = new MyClass3()
    println(c1.doStuff(r1).collect.mkString("-"))
    println(c2.doStuff(r1).collect.mkString("-"))
    println(c3.doStuff(r1).collect.mkString("-"))
    println(MyClass2.doStuff(r1).collect.mkString("-"))

  } 
  class MyClass extends Serializable {
    def func1(s: String) : String = s + "100"
    def doStuff(rdd: RDD[String]) : RDD[String] = { rdd.map(func1) }
  }

  class MyClass1 extends Serializable {
    def func1(s: String) : String = s + "100"
    def doStuff(rdd: RDD[String]) : RDD[String] = { rdd.map(x => this.func1(x)) }
  }

  object MyClass2 extends Serializable {
    def func1(s: String) : String = s + "100"
    def doStuff(rdd: RDD[String]) : RDD[String] = { rdd.map(x => this.func1(x)) }
  }

  class MyClass3 extends Serializable {
    val field = "Hello"
    def func1(s: String) : String = s + "100"
    def doStuff(rdd: RDD[String]) : RDD[String] = {
      val field_ = this.field
      rdd.map(x => field_ + x)
    }
  }
}
