package com.atguigu.sql

import org.apache.spark.SparkConf
import org.apache.spark.sql.{DataFrame, Dataset, SparkSession}

object TestSparkSQL {
  def main(args: Array[String]) {

    val conf: SparkConf = new SparkConf().setAppName("Spark SQL").setMaster("local[*]")

    //创建SparkConf()并设置App名称
    val spark: SparkSession = SparkSession
      .builder()
      .config(conf)
      .getOrCreate()

    // For implicit conversions like converting RDDs to DataFrames
    // 导入隐式转换
    import spark.implicits._

    // 获取数据
    val df: DataFrame = spark.read.json("E:\\user.json")

    // Displays the content of the DataFrame to stdout
    // 展示数据
    df.show()

    //df.rdd
    // 转换DataSet
    val de: Dataset[Person] = df.as[Person]

    df.filter($"age" > 21).show()

    df.createOrReplaceTempView("persons")

    spark.sql("SELECT * FROM persons where age > 21").show()

    spark.stop()
  }


}

/**
 * 样例类
 *
 * @param name
 * @param age
 */
case class Person(name: String, age: BigInt)
