package com.optimize

import org.apache.spark.storage.StorageLevel
import org.apache.spark.{SparkConf, SparkContext}

import scala.collection.mutable.ArrayBuffer

/**
  *
  * @author ymy.hadoop
  */

case class Student(id: String,name: String,age: Int,gender: String)

object SerializationDemo {

  def main(args: Array[String]): Unit = {


    test02
  }

  def test02={

    val conf = new SparkConf()
      .setMaster("local[2]")
      .setAppName("SerializationDemo-Kryo")
      .set("spark.serializer","org.apache.serializer.KryoSerializer")
      .registerKryoClasses(Array(classOf[Student]))
    val sc = new SparkContext(conf)

    val studentArr = new ArrayBuffer[Student]()
    for (i <- 1 to 1000000){
      studentArr += (Student(i + "",i + "a",10,"male"))
    }
    val kryoSerializableSerializer = sc.parallelize(studentArr)
    kryoSerializableSerializer.persist(StorageLevel.MEMORY_ONLY_SER).count()

    while (true){
      Thread.sleep(100000)
    }

    sc.stop()
  }

  def test01={

    val conf = new SparkConf().setMaster("local[2]").setAppName("SerializationDemo")
    val sc = new SparkContext(conf)

    val studentArr = new ArrayBuffer[Student]()
    for (i <- 1 to 1000000){
      studentArr += (Student(i + "",i + "a",10,"male"))
    }

    val javaSerialization = sc.parallelize(studentArr)
    javaSerialization.persist(StorageLevel.MEMORY_ONLY_SER).count()

    while (true){
      Thread.sleep(100000)
    }


    sc.stop()
  }
}
