package com.atguigu0.sql

import org.apache.spark.SparkContext
import org.apache.spark.rdd.RDD
import org.apache.spark.sql.types.{IntegerType, StructField, StructType}
import org.apache.spark.sql.{DataFrame, Row, SparkSession}

/**
 * @description: xxx
 * @time: 2020/6/15 14:17
 * @author: baojinlong
 **/
object RDDToDf {
  def main(args: Array[String]): Unit = {
    // 创建sparkSession对象
    val spark: SparkSession = SparkSession.builder().master("local[*]").appName("xx").getOrCreate()
    // 导入隐式转换,注意spark.implicits._中spark必须要和上面spark变量名称一致
    // 获取sparkContext
    val sc: SparkContext = spark.sparkContext
    val rdd: RDD[Int] = sc.parallelize(Array(1, 2, 3, 4))
    // 将RDD[int]转成Rdd[row]
    val rowRdd: RDD[Row] = rdd.map(Row(_))
    // 创建结构信息
    val structType: StructType = StructType(StructField("id", IntegerType) :: Nil)
    // 创建dataFrame
    val frame: DataFrame = spark.createDataFrame(rowRdd, structType)
    frame.show

    // 关闭资源
    spark.stop()
  }

}
