package com.spark.mooc.ch5_rdd.part01_RDDBasics

import org.apache.spark.{SparkConf, SparkContext}

/**
 * @description:
 * @time: 2020/11/27 13:59
 * @author: lhy
 */
object demo01_RDDCreate {
    def main(args: Array[String]): Unit = {
        val conf: SparkConf = new SparkConf().setAppName("RDD创建").setMaster("local")
        val sc = new SparkContext(conf)
        // 1.从本地文件系统中加载数据
        val lines_file = sc.textFile("file:///root/data.txt/test.txt")
        // 2.从分布式文件系统中加载数据
        val lines_hdfs = sc.textFile("hdfs://localhost:9000/user/hadoop/word.txt")
        // 3.通过并行集合（数组）创建RDD
        val array = Array(1,2,3,4,5)
        val rdd = sc.parallelize(array)
        val rdd2 = sc.makeRDD(array)
    }
}
