package com.arnold.guide.sparkSql.demo04

import org.apache.spark.sql.{DataFrame, SparkSession}

/**
  * Created by arnold.zhu on 2017/7/24.
  */
object Demo04_1 {

  private val sparkSession: SparkSession = SparkSession.builder().master("local").appName("LearnSparkSQL").getOrCreate()

  def main(args: Array[String]): Unit = {
    val path: String = Thread.currentThread().getContextClassLoader.getResource("sparksql/demo04/people.txt").getPath

    println(path)

    // 将RDD数据集转换成DataFrame
    val peopleRdd = sparkSession.sparkContext.textFile(path)

    import sparkSession.implicits._

    val df: DataFrame = peopleRdd.map(_.split(",")).map(x => {
      Person(x(0), x(1).trim.toInt)
    }).toDF()

    df.show()

    // 可以显示某一列的结果(索引)
    df.map(person => {
      "Name:" + person.get(0)
    }).show()

    // 或者使用getAs方法
    df.map(person => {
      "Name:" + person.getAs[String]("name")
    }).show()
  }

}
