package com.atguigu.sparksql.day01

import org.apache.spark.rdd.RDD
import org.apache.spark.sql.{DataFrame, SparkSession}

/**
 * Author atguigu
 * Date 2020/11/3 11:26
 */
object RDD2DF {
    def main(args: Array[String]): Unit = {
        // 先去创建SparkSession
        val spark = SparkSession.builder()
            .master("local[2]")
            .appName("RDD2DF")
            .getOrCreate()
        import spark.implicits._
        /*val userDF: DataFrame = spark.read.json("c:/user.json")
        /*userDF.createOrReplaceTempView("user")
        spark.sql("select * from user").show*/
        // 转成rdd
        val rdd: RDD[Row] = userDF.rdd
        rdd
            .map(row => {
                //row.getString(1)
                row.getAs[Long]("salary")
            })
            .collect
            .foreach(println)*/
        
        /*val list1 = List(30, 50, 70, 60, 10, 20)
        val rdd: RDD[Int] = spark.sparkContext.parallelize(list1)
        val df = rdd.toDF("num")
        df.show*/
        
        val list = User(10, "zs")::User(20, "zhiling")::Nil
        val rdd: RDD[User] = spark.sparkContext.makeRDD(list)
        val df: DataFrame = rdd.toDF("a", "b")
        df.show
        
        spark.stop()
    }
}

case class User(age:Int, name: String)
