package com.jinghang.spark_base._020_SQL

import org.apache.spark.sql.{Row, SparkSession}
import org.apache.spark.sql.types.{StringType, StructField, StructType}

/**
  * sparkSession.createDataFrame(rowRDD, schema)
  */
object _040_Rdd_DataFrame {

  def main(args: Array[String]): Unit = {
    val sparkSession = SparkSession.builder()
      .appName("_010_BasicDataFrameExample")
      .master("local[1]")
      .getOrCreate()
    sparkSession.sparkContext.setLogLevel("ERROR")

    runProgrammaticSchemaExample(sparkSession)
  }

  private def runProgrammaticSchemaExample(spark: SparkSession): Unit = {
    // Create an RDD
    val peopleRDD = spark.sparkContext.textFile("data/practiceOperator/people.txt")

    // Generate the schema based on the string of schema
    val fields=Array(
      StructField("name", StringType, nullable = true) ,
      StructField("age", StringType, nullable = true)
    )

    val schema = StructType(fields)

    // Convert records of the RDD (people) to Rows
    val rowRDD = peopleRDD
      .map(_.split(","))
      .map(x => Row(x(0), x(1).trim))

    // Apply the schema to the RDD
    val peopleDF = spark.createDataFrame(rowRDD, schema)

    // Creates a temporary view using the DataFrame
    peopleDF.createOrReplaceTempView("people")

    // SQL can be run over a temporary view created using DataFrames
    val results = spark.sql("SELECT name FROM people")

    results.show()

    // The results of SQL queries are DataFrames and support all the normal RDD operations
    // The columns of a row in the result can be accessed by field index or by field name
    //results.map(attributes => "Name: " + attributes(0)).show()
    // +-------------+
    // |        value|
    // +-------------+
    // |Name: Michael|
    // |   Name: Andy|
    // | Name: Justin|
    // +-------------+
    // $example off:programmatic_schema$
  }


}
