package com.li.spark.optimization

import org.apache.spark.storage.StorageLevel
import org.apache.spark.{SparkConf, SparkContext}

object KyroSer {
  def main(args: Array[String]): Unit = {
    //创建SparkContext
    val conf = new SparkConf();
    conf.setAppName("KyroSer").setMaster("local");
    //指定使用kryo序列化的机制
    conf.set("spark.serializer", "org.apache.spark.serializer.KryoSerializer")
      .registerKryoClasses(Array(classOf[Person])) //注册自定义数据类型
    val sc: SparkContext = new SparkContext(conf);


    //默认基于内存
    val dataRDD = sc.parallelize(Array("hello you", "hello me"))

    dataRDD.flatMap(_.split(" ")).map(
      Person(_, 10)
    ).persist(StorageLevel.MEMORY_ONLY_SER).foreach(println(_))

    //保证程序不结束，方便在本地4040端口，查看storage页面
    while (true) {
      ;
    }
    sc.stop();
  }
}

case class Person(name: String, age: Int) extends Serializable