package spark.sql.practice

import org.apache.spark.rdd.RDD
import org.apache.spark.sql.types.{IntegerType, StringType, StructField, StructType}
import org.apache.spark.sql._
import org.apache.spark.{SparkConf, SparkContext}


case class Person(name: String, age: Int)

object RDD2DataFrame {
  def main(args: Array[String]) {
    val conf = new SparkConf()
      .setAppName("RDD2DataFrame")
      .setMaster("local[*]")
    val sc = SparkContext.getOrCreate(conf)
    val sqlContext = new SQLContext(sc)
    val jsonPath = "datas/people.json"
    val jsonDataFrame = sqlContext.jsonFile(jsonPath)
    val rowRDD: RDD[Row] = sc.parallelize(Array(
      ("tom", 19, "man"),
      ("jim", 19, "man"),
      ("le", 29, "women"),
      ("lucy", 15, "women")
    ).map {
      case (name, age, sex) => {
        Row(name, age, sex)
      }
    })
    val schemat: StructType = StructType(Array(
      StructField("nameT", StringType),
      StructField("ageT", IntegerType),
      StructField("sexT", StringType)
    ))
    val dft = sqlContext.createDataFrame(rowRDD, schemat)
    dft.show()



    dft.coalesce(1).write
      .format("json")
      .mode(SaveMode.Overwrite)
      .save(s"result/spark_sql${System.currentTimeMillis()}")

  }
}
