package org.zjt.spark.sql

import org.apache.spark.SparkConf
import org.apache.spark.sql.SparkSession
import org.apache.spark.sql.functions.col

/**
  * DESC    parquet保存 保存表的数据和表信息
  *
  * @author
  * @create 2017-05-15 下午5:57
  **/
object RarquestTest extends App{
  val conf = new SparkConf().setAppName("WordCount").setMaster("local")
  val sqlContext = new SparkSession.Builder().config(conf).getOrCreate()


  // TODO: 读取json文件做数据处理
  val peoples = sqlContext.read.json("./person.json")
  peoples.select(col("name"), col("age").plus(1)).show()


  // TODO: 保存parquet文件    读取parquet文件
  peoples.write.parquet("person.parquet")
  val parquet = sqlContext.read.parquet("person.parquet")
  parquet.select(col("name"), col("age").plus(1)).show()

}
