package com.shujia.spark.sql

import org.apache.spark.sql.{Column, DataFrame, SparkSession}

object Demo11Studnet {
  def main(args: Array[String]): Unit = {
    val spark: SparkSession = SparkSession
      .builder()
      .master("local")
      .appName("studnet")
      .getOrCreate()
    import spark.implicits._
    import org.apache.spark.sql.functions._

    //读取数据
    val stuScoDF: DataFrame = spark
      .read
      .format("csv")
      .option("sep", ",")
      .schema("name STRING,item STRING,score DOUBLE")
      .load("data/stu_sco.txt")


    /**
     * 1、将表1转化成表2
     */
    val df2: DataFrame = stuScoDF
      .groupBy($"name")
      .agg(
        sum(when($"item" === "数学", $"score").otherwise(0)) as "math",
        sum(when($"item" === "英语", $"score").otherwise(0)) as "english"
      )
    df2.show(100)

    /**
     * 2、将表2转化成表1
     */
    val m: Column = map(
      expr("'数学'"), $"math",
      expr("'英语'"), $"english"
    )

    df2
      .select($"name", explode(m) as Array("item", "score"))
      .show(100)


  }

}
