package com.zhaosc.spark.sql.df

import org.apache.spark.sql.SparkSession
import org.apache.spark.SparkConf
import org.apache.spark.SparkContext
import org.apache.spark.sql.RowFactory
import org.apache.spark.sql.Row
import org.apache.spark.sql.types.StructType
import org.apache.spark.sql.types.StringType
import org.apache.spark.sql.types.StructField

object RDD2DataFrameByProgrammatically {

  def main(args: Array[String]): Unit = {

    val spark = SparkSession
      .builder()
      .appName("RDD2DataFrameByProgrammatically")
      .config("spark.master", "local")
      .getOrCreate();

    val rowRDD = spark.sparkContext.textFile("Peoples.txt")
      .map(line => {
        val params = line.split(",")
        Row(params(0), params(1).trim, params(2));
      })

    val schemaString = "id name age"

    val fields = schemaString.split(" ")
      .map(fieldName => StructField(fieldName, StringType, nullable = true))
    val schema = StructType(fields)

    val peopleDF = spark.createDataFrame(rowRDD, schema);

    peopleDF.createOrReplaceTempView("people");

    val results = spark.sql("SELECT name FROM people").show();

    spark.sparkContext.stop()
  }
}