package com.shujia.sql

import org.apache.spark.SparkContext
import org.apache.spark.rdd.RDD
import org.apache.spark.sql.{DataFrame, Row, SparkSession}

/**
 * spark core: RDD
 *
 * spark sql: DataFrame
 *
 *
 */
object DF2RDDDemo {
  def main(args: Array[String]): Unit = {
    //spark sql
    val ss: SparkSession = SparkSession.builder()
      .config("spark.sql.shuffle.partitions", "1") // 将产生的shuffle的分区设置为1
      .master("local")
      .appName("spark source api")
      .getOrCreate()

    //spark core
    val sc: SparkContext = ss.sparkContext


    /**
     * ---------------------------------------------
     * RDD -> DF
     * ---------------------------------------------
     */
//    val lineRDD: RDD[String] = sc.textFile("spark/data/students.txt")
//    val resRDD: RDD[(String, Int)] = lineRDD.map((line: String) => {
//      val info: Array[String] = line.split(",")
//      (info(4), 1)
//    }).reduceByKey(_ + _)
//    resRDD.foreach(println)
//
//    //当需要将spark不同模块之间的数据结构进行相互转换的时候，需要导入 ss.implicits._ 隐式转换
    import ss.implicits._
    import org.apache.spark.sql.functions._
//    // RDD -> DF
//    val df1: DataFrame = resRDD.toDF.select($"_1" as "clazz",$"_2" as "counts")
//    df1.show()


    /**
     * ---------------------------------------------
     * RDD -> DF
     * ---------------------------------------------
     */
    val df1: DataFrame = ss.read
      .format("csv")
      .option("sep", ",")
      .schema("id STRING,subject_id STRING,score INT")
      .load("spark/data/score.txt")

    val resDF: DataFrame = df1.groupBy("id")
      .agg(sum("score") as "sumScore")
//    resDF.printSchema()

    //DF -> RDD
    val resRDD: RDD[Row] = resDF.rdd
//    resRDD.map((row:Row)=>{
//      //row是代表DF中的一行数据，有列值和列名
//      val id: String = row.getAs[String]("id")
//      val sumScore: Long = row.getAs[Long]("sumScore")
//      s"学号：$id, 总分：$sumScore"
//    }).foreach(println)

    //通过模式匹配，获取row中的每一列数据
    resRDD.map {
      case Row(id:String,sumScore:Long)=>s"学号：$id, 总分：$sumScore"
    }.foreach(println)



  }
}
