package com.makainb.test

import org.apache.spark.sql.SQLContext
import org.apache.spark.{SparkContext, SparkConf}

/**
 * Created by kevin on 2015/12/7.
 */

case class Person(name : String, age : Int)

object SparkSQL {

  def main(args: Array[String]) {
    val config = new SparkConf().setAppName("spark sql").setMaster("local")
    val sc = new SparkContext(config)
    val sqlContext = new SQLContext(sc)

    import sqlContext.implicits._

    val people = sc.textFile("people.txt").map(_.split(",")).map(p => Person(p(0), p(1).trim().toInt)).toDF()
    people.registerTempTable("t_person")

    val teenagers = sqlContext.sql(" select name from t_person  where age  > 31 ")
    teenagers.map(t => "Name: " + t(0)).collect().foreach(println)
    teenagers.toJavaRDD.saveAsTextFile("hdfs://node1:8020/usr/sparksql")

    sc.stop()






  }

}
