package cn.rslee.java.demos.sql

import org.apache.spark.sql.SQLContext
import org.apache.spark.SparkConf
import org.apache.spark.SparkContext

object SparkSqlApplication {
  case class Person(name:String, age:Int, addr:String)
 
  def main(args: Array[String]): Unit = {
    val conf = new SparkConf().setAppName("Spark Sql Test").setMaster("local")
    val sc = new SparkContext(conf)
    val sqlContext = new SQLContext(sc)
 
    import sqlContext._
    import sqlContext.implicits._
 
    val people = sc.textFile("D:\\data\\SparkSqlDataTest\\data1.txt").map(_.split(",")).map( p => Person(p(0),p(1).trim.toInt,p(2))).toDF()
    people.registerTempTable("people")
    val teenagers = sql("SELECT name, age, addr FROM people ORDER BY age")
 
    teenagers.map( t => "name:" + t(0) + " age:" + t(1) + " addr:" + t(2)).collect().foreach(println)
 
    sc.stop();
  }
}