package com.inspur
import org.apache.spark.SparkConf
import org.apache.spark.SparkContext
object Rdd {
  def main(args: Array[String]): Unit = {
    //RDD的创建
    //first::::实例化SparkContext上下文:
    val conf = new SparkConf
    conf.setAppName("Test")
    conf.setMaster("local")
    val sc =  new SparkContext(conf)
    //one.从集合List中创建
    val lst = List("oh","my","god","hha")
    val rdd1 = sc.parallelize(lst)
    rdd1.foreach(println)
    //two.通过数组Array创建
    val arr = Array(0,3,2,7,2,3,3)
    val rdd2 = sc.parallelize(arr)
    rdd2.foreach(print)
    //three.通过本地文件系统中创建
    val file = "file:///e:/words.txt"
    val rdd3 = sc.textFile(file)
    rdd3.foreach(println)
    //fore.通过hdfs文件系统创建--(此行代码执行需要在spark-shell集群模式下)
   // val hdfs = "hdfs://namenode:8020/user/words.txt" 
    //val rdd4 = sc.textFile(hdfs)
    //rdd4.foreach(println)
  }
}