package cn.doitedu.df_rdd

import cn.doitedu.beans.ScalaStudentBean
import cn.doitedu.util.SparkUtil
import org.apache.spark.rdd.RDD
import org.apache.spark.sql.Row
import org.apache.spark.sql.types.{DataTypes, StructType}

/**
 * @Date 22.4.12
 * @Created by HANGGE
 * @Description
 *
 */
object C06_RDD_2_DF_Row {
  def main(args: Array[String]): Unit = {
    val session = SparkUtil.getSession
    //获取sparkContext
    val sc = session.sparkContext
    // 加载数据  RDD
    val rdd: RDD[String] = sc.textFile("data\\scores\\Scores.csv")


    // RDD的数据结构  Row
    val rowRDD : RDD[Row] = rdd.map(line => {
      val arr = line.split(",")
      Row(arr(0).toInt, arr(1), arr(2).toInt, arr(3), arr(4), arr(5).toDouble)
    })

    /**
     * def createDataFrame(rowRDD: JavaRDD[Row], schema: StructType): DataFrame = {
     * val replaced = CharVarcharUtils.failIfHasCharVarchar(schema).asInstanceOf[StructType]
     * createDataFrame(rowRDD.rdd, replaced)
     * }
     */
    val structType = new StructType()
      .add("id", DataTypes.IntegerType)
      .add("name", DataTypes.StringType)
      .add("age", DataTypes.IntegerType)
      .add("sex", DataTypes.StringType)
      .add("city", DataTypes.StringType)
      .add("score", DataTypes.DoubleType)
    val df = session.createDataFrame(rowRDD, structType)
    df.show()
    df.printSchema()

  }

}
