package com.atguigu.sparksql.day01

import org.apache.spark.rdd.RDD
import org.apache.spark.sql.{DataFrame, Dataset, Row, SparkSession}
import org.apache.spark.{SparkConf, SparkContext}

/**
 * Author atguigu
 * Date 2020/11/3 14:19
 */
object DF2DS {
    def main(args: Array[String]): Unit = {
        val spark: SparkSession = SparkSession
            .builder()
            .master("local[*]")
            .appName("DF2DS")
            .getOrCreate()
        import spark.implicits._
    
        val df: DataFrame = spark.read.json("c:/user.json")
    
        val as= df.as[Emp]
        
        as.toDF()
        
        spark.close()
   
        
        
        
    }
}
case class Emp(name: String, salary: Long)
