package com.bigdata.exam.cg

import org.apache.spark.rdd.RDD
import org.apache.spark.sql.types.{DoubleType, StringType, StructType}
import org.apache.spark.sql.{DataFrame, Row, SparkSession}

object ExamTotalResult {

  def main(args: Array[String]): Unit = {
    val spark: SparkSession = SparkSession.builder()
      .master("local[1]")
      .appName("baseScore")
      .getOrCreate()

    spark.sparkContext.setLogLevel("error")

    val schema: StructType = new StructType()
      .add("score", DoubleType)

    val baseScore: DataFrame = spark
      .read
      .schema(schema)
      .format("csv")
      .csv("data/exam/defen.txt")

    baseScore.createOrReplaceTempView("t_score")

    spark.sql(
      """
        |select count(*) from t_score
        |""".stripMargin).show(100, false)

    spark.sql(
      """
        |select sum(if(score >= 60, 1 , 0)) from t_score
        |""".stripMargin).show(100, false)

    println("-------------------------")
    spark.sql(
      """
        |select count(1) as stu_num from t_score where score between 0 and 39 union all
        |select count(1) as stu_num from t_score where score between 40 and 49 union all
        |select count(1) as stu_num from t_score where score between 50 and 59 union all
        |select count(1) as stu_num from t_score where score between 60 and 69 union all
        |select count(1) as stu_num from t_score where score between 70 and 79 union all
        |select count(1) as stu_num from t_score where score between 80 and 89 union all
        |select count(1) as stu_num from t_score where score between 90 and 100
        |""".stripMargin).show(100, false)

  }


}
