import org.apache.spark.SparkContext
import org.apache.spark.sql.{DataFrame, Dataset, SparkSession}
import org.junit.Test


class SqarkSQL extends Serializable {

  val spark=SparkSession.builder().master("local[*]").appName("testsql").getOrCreate()

  @Test
  def testSqarkSql(): Unit ={
    import spark.implicits._
    val sparkContext: SparkContext = spark.sparkContext

    val df: DataFrame = sparkContext.textFile("data/student.txt").map(x => {
      val arr = x.split(",")
      (arr(0).toInt, arr(1), arr(2).toInt)
    }
    ).toDF("id","name","age")

    df.cache()

    df.printSchema()

    df.createTempView("student")

    val frame: DataFrame = spark.sql("select * from student where id >=2")
    frame.show()

  }



  @Test
  def sqarkSqlCaseClass(): Unit ={
    import spark.implicits._

    val sparkContext: SparkContext = spark.sparkContext

    val df: DataFrame = sparkContext.textFile("data/student.txt").map(x => {
      val arr = x.split(",")
      //(arr(0).toInt, arr(1), arr(2).toInt)
      Student(arr(0).toInt,arr(1),arr(2).toInt)
    }
    ).toDF()

    df.cache()

    df.printSchema()

    df.createTempView("student")

    val frame = spark.sql("select * from student where id >=2")
    frame.show()
    frame.write.csv("data/student.csv")

  }

  @Test
  def readSpark(): Unit ={
    import spark.implicits._
    val flie: Dataset[String] = spark.read.textFile("data/student.txt")
    val df: DataFrame = flie.map(x => {
      val arr: Array[String] = x.split(",")
      Student(arr(0).toInt, arr(1), arr(2).toInt)
    }).toDF()

    df.printSchema()

    spark.sql(
      """
        |SELECT * FROM
        |
        |
        |""".stripMargin)

  }

}



case class Student(id:Int,name:String,sex:Int)