package spark.example

import org.apache.hadoop.io.compress._
import org.apache.spark.{SparkConf, SparkContext}

object saveCompression {
  def main(args: Array[String]): Unit = {
//    val spark = SparkSession
//      .builder()
//      .master("local")
//      .appName("saveCompile")
//      .config("spark.testing.memory", "471859200")
//      .getOrCreate()

    val conf = new SparkConf().setAppName("save compression").set("spark.testing.memory","471859200")
    val sc = new SparkContext(conf)
    sc.parallelize(Array(1,2,3,4,5,6)).repartition(1)
      .saveAsTextFile("hdfs://master:9000/output/spark_output/testgz",classOf[GzipCodec])

    sc.parallelize(Array(1,2,3,4,5,6)).repartition(1)
      .saveAsTextFile("hdfs://master:9000/output/spark_output/testbz2",classOf[BZip2Codec])

    sc.parallelize(Array(1,2,3,4,5,6)).repartition(1)
      .saveAsTextFile("hdfs://master:9000/output/spark_output/testLz",classOf[Lz4Codec])

    sc.parallelize(Array(1,2,3,4,5,6)).repartition(1)
      .saveAsTextFile("hdfs://master:9000/output/spark_output/testSnappy",classOf[SnappyCodec])

    sc.parallelize(Array(1,2,3,4,5,6)).repartition(1)
      .saveAsTextFile("hdfs://master:9000/output/spark_output/testDefault",classOf[DefaultCodec])

    sc.parallelize(Array(1,2,3,4,5,6)).repartition(1)
      .saveAsTextFile("hdfs://master:9000/output/spark_output/testDeflateCodec",classOf[DeflateCodec])

    sc.parallelize(Seq(("key1",1),("key2",2),("key3",3),("key4",4)),1)
      .saveAsSequenceFile("")
  }
}
