package ssssscala.ParquetLoadData;

import org.apache.spark.sql.SQLContext
import org.apache.spark.{SparkConf, SparkContext}

/**
  * Auther: Mengkunxuan
  * Date:2018 /9/279:35
  * Description:
  */
object ParquetLoadData {
  def main(args: Array[String]): Unit = {
    val conf = new SparkConf().setMaster("local").setAppName("ParquetLoadData")
    val sc = new SparkContext(conf)
    val sqlContext = new SQLContext(sc);
    val usersDF = sqlContext.read.parquet("C:\\Users\\Administrator\\Desktop\\第一阶段代码\\第76讲-Spark SQL：数据源之通用的load和save操作\\文档\\users.parquet")
    usersDF.show();
    usersDF.registerTempTable("users");
    val userNameDf = sqlContext.sql("select * name form users")
    userNameDf.rdd.map(row => "name: "+ row(0)).collect().foreach(userName  =>println(userName))
  }
}
