package com.arnold.guide.sparkSql.demo04

import org.apache.spark.rdd.RDD
import org.apache.spark.sql.{Row, SparkSession}
import org.apache.spark.sql.types.{StringType, StructField, StructType}

/**
  * Created by arnold.zhu on 2017/7/24.
  */
object Demo04_2 {

  private val sparkSession: SparkSession = SparkSession.builder().master("local").appName("LearnSparkSQL").getOrCreate()

  def main(args: Array[String]): Unit = {
    val path: String = Thread.currentThread().getContextClassLoader.getResource("sparksql/demo04/people.txt").getPath

    println(path)

    // 1.创建RDD
    val personRDD: RDD[String] = sparkSession.sparkContext.textFile(path)

    // 2.创建Schema
    val schema = StructType(
      Seq(
        StructField("name", StringType, nullable = true),
        StructField("age", StringType, nullable = true)
      )
    )

    // 3.遍历RDD中的数据转换为Row对象
    val rowsRdd = personRDD.map(_.split(",")).map(x => {
      Row(x(0), x(1).trim)
    })

    // 4.创建DataFrame对象
    val df = sparkSession.createDataFrame(rowsRdd, schema)

    df.show()
  }

}
