package com.dtkavin.sparkSQL

import org.apache.spark.sql.{Row, SQLContext}
import org.apache.spark.sql.types.{StringType, StructField, IntegerType, StructType}
import org.apache.spark.{SparkConf, SparkContext}

/**
  * Created by IntelliJ IDEA.
  * Programmer : John Zn
  * Date : 2016/4/22 0022
  * Time : 23:58
  * Discribtion : 不使用case class来定义schema，而用StructType来定义Row的schema
  */
class StructTypePerson {

}

object StructTypePerson {
  def main(args: Array[String]) {
    val conf = new SparkConf()
    val sc = new SparkContext("local[3]", "StructTypePerson", conf)
    val sqlc = new SQLContext(sc)

    val mapedRdd = sc.textFile("hdfs://spark01:9000/streamsql").map(_.split(","))

    val schema = StructType(
      List(
        StructField("id", IntegerType, true),
        StructField("name", StringType, true),
        StructField("age", IntegerType, true)
      )

    )

    val personRdd = mapedRdd.map(x => Row(x(0).toInt, x(1), x(2).toInt))
    val personDF=sqlc.createDataFrame(personRdd,schema)

    personDF.show()
    personDF.write.json("c:/j.txt")
    sc.stop()
  }
}