package com.study.bigdata.spark.core.rdd.serial

import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkConf, SparkContext}

object Spark01_RDD_Oper_Serial {
  def main(args: Array[String]): Unit = {

    // 一个应用程序,Driver程序
    val conf = new SparkConf().setMaster("local[*]").setAppName("RDD")
    val sc = new SparkContext(conf)

    // TODO 算子 - 行动
    val rdd = sc.makeRDD(List("Hello","Hive","Spark","Scala"),2)
    val s =new Search("S")
    s.filterByKey(rdd).foreach(println)
    /*
    Spark
    Scala
     */
    sc.stop()

  }
  // case默认实现了序列化
  case class Search(q : String){
    def filterByKey(rdd : RDD[String]): RDD[String] ={
      rdd.filter(_.startsWith(q))
    }
  }

}
