package cn.whuc.sparksql

import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkConf, SparkContext}
import org.apache.spark.sql.{DataFrame, Dataset, SparkSession}

object Demo_Begin {
  def main(args: Array[String]): Unit = {
    val conf: SparkConf = new SparkConf().setAppName("sql").setMaster("local[*]")
    val ss: SparkSession = SparkSession.builder().config(conf).getOrCreate()

    import ss.implicits._

    val sc: SparkContext = ss.sparkContext

    val rdd: RDD[String] = sc.textFile("input/Data01.txt")

    val df: DataFrame = rdd.map(line => {
      val strings: Array[String] = line.split(",")
      (strings(0), strings(1), strings(2))
    }).toDF("name", "course", "score")

    df.show(10)

    val df1: DataFrame = rdd.map(line => {
      val strings: Array[String] = line.split(",")
      (strings(0), strings(1), strings(2))
    }).map(t => {
      CourseScore(t._1, t._2, t._3.toInt)
    }).toDF()
    df1.show(10)

    val ds1: Dataset[CourseScore] = rdd.map(line => {
      val strings: Array[String] = line.split(",")
      (strings(0), strings(1), strings(2))
    }).map(t => {
      CourseScore(t._1, t._2, t._3.toInt)
    }).toDS()

    ds1.show(10)

    df1.createTempView("course")

    ss.sql("select * from course where score>=90 ").show(10)


    ss.stop()
  }
}

case class CourseScore(name:String,course:String,score:Int)
