package com.dxf.bigdata.D05_spark_again.action

import org.apache.spark.api.java.JavaSparkContext.fromSparkContext
import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkConf, SparkContext}

/**
 *
 */
object saveAsxxFile保存和读取文件 {

  def main(args: Array[String]): Unit = {

    val sparkConf = new SparkConf().setMaster("local[*]").setAppName("app")
    sparkConf.set("spark.port.maxRetries", "100")
    val sc = new SparkContext(sparkConf)

    //    val value: RDD[(String, Int)] = sc.makeRDD(List(("a", 1), ("b", 2), ("a", 3)), 2)

    //    value.saveAsTextFile("output4")
    //    value.saveAsObjectFile("output5")
    //    // saveAsSequenceFile 要求必须是PairRDD (kv类型的数据)
    //    value.saveAsSequenceFile("output6")

    // 读取文件

    val value: RDD[String] = sc.textFile("output4")
    val value1: RDD[(String, Int)] = sc.objectFile[(String, Int)]("output5")
    val value2: RDD[(String, Int)] = sc.sequenceFile[String, Int]("output6")
    value.collect().foreach(println)
    value1.collect().foreach(println)
    value2.collect().foreach(println)


    value.map{

      case x => x

    }.collect().foreach(println)


    sc.stop()

  }

}
