package com.gy.spark.sparksql.dataframe

import org.apache.spark.SparkConf
import org.apache.spark.SparkContext
import org.apache.spark.sql.{RowFactory, SQLContext, SaveMode}
import org.apache.spark.sql.types.StructType
import org.apache.spark.sql.types.StringType
import org.apache.spark.sql.types.StructField
import org.apache.spark.sql.types.IntegerType


object CreateDFFromRDDWithStruct {
  def main(args: Array[String]): Unit = {
    val conf = new SparkConf()
    conf.setMaster("local").setAppName(this.getClass.getSimpleName)
    val sc = new SparkContext(conf)
    val sqlContext = new SQLContext(sc)
    val lineRDD = sc.textFile("./spark/input/sql/person.txt")
    val rowRDD = lineRDD.map { x => {
      val split = x.split(",")
      RowFactory.create(split(0), split(1), Integer.valueOf(split(2)))
    }
    }

    val schema = StructType(List(
      StructField("id", StringType, true),
      StructField("name", StringType, true),
      StructField("age", IntegerType, true)
    ))

    val df = sqlContext.createDataFrame(rowRDD, schema)
    df.show()
    df.printSchema()


    df.write.mode(SaveMode.Overwrite).orc("./spark/output/sql/orc")
    df.write.mode(SaveMode.Overwrite).csv("./spark/output/sql/csv")
    df.write.mode(SaveMode.Overwrite).json("./spark/output/sql/json")

    sc.stop()
  }
}