package com.king.spark.sql

import org.apache.spark.rdd.RDD
import org.apache.spark.sql.{DataFrame, Dataset, SparkSession}
import org.apache.spark.{SparkConf, SparkContext}

/**
 * @Author wdl
 * @Date 2022/11/22 16:34
 */
object Spark_SQL_DataFrame {

  def main(args: Array[String]): Unit = {


    val sparkConf = new SparkConf().setMaster("local[*]").setAppName("WordCount")

    val sparkContext: SparkContext = new SparkContext(sparkConf)

    val session: SparkSession = SparkSession.builder().config(sparkConf).getOrCreate()
    import session.implicits._

    val frame: DataFrame = session.read.json("E:\\work\\big-data-2020\\spark-api\\input\\user.json")

    val value: Dataset[User1] = frame.as[User1]
    value.show()
    sparkContext.stop()
  }
}
case class User1(id: BigInt, name: String, age: BigInt)
