package com.spark.sql

import org.apache.spark.sql.{Column, DataFrame, SparkSession}

object Demo11Student {
  def main(args: Array[String]): Unit = {
    val spark: SparkSession = SparkSession
      .builder()
      .master("local")
      .appName("studnet")
      .getOrCreate()

    spark.sparkContext.setLogLevel("error")

    import org.apache.spark.sql.functions._
    import spark.implicits._

    //读取数据
    val stuScoDF: DataFrame = spark
      .read
      .format("csv")
      .option("sep", ",")
      .schema("name STRING,item STRING,score DOUBLE")
      .load("data/stu_sco.txt")

    stuScoDF.show()
    /**
     * 1、将表1转化成表2
     */
    val df2: DataFrame = stuScoDF
      .groupBy($"name")
      .agg(
        sum(when($"item" === "数学", $"score").otherwise("0")) as "math",
        sum(when($"item" === "英语", $"score").otherwise("0")) as "english"
      )
    df2.show()


    val m: Column = map(
      expr("'数学'"), $"math",
      expr("'英语'"), $"english"
    )

    df2.select($"name", explode(m) as Array("item", "score"))
      .show()

  }

}
