package com.imooc.spark

import org.apache.spark.sql.SparkSession

/**
  * DataFrame中的操作
  */
object DataFrameCase {
  def main(args: Array[String]): Unit = {
    val spark  = SparkSession.builder().appName("DataFrameCase").master("local[2]").getOrCreate()

    val rdd = spark.sparkContext.textFile("D:\\tmp\\data-test\\student.txt")

    import spark.implicits._
    val studentDF = rdd.map(_.split("\\|")).map(line => Student(line(0).toInt, line(1), line(2), line(3))).toDF

    //studentDF.show(30,false)

    //studentDF.take(10).foreach(println)

    //studentDF.first() //=studentDF.head()

    //studentDF.head(10)

    studentDF.filter("name='' OR name='NULL'").show()
    studentDF.filter(studentDF.col("name") === "" || studentDF.col("name") === "NULL").show()

    //studentDF.filter("SUBSTR(name,0,1)='M'").show()

    //spark.sql("show functions").show(1000)  //查看所有的函数，使用时大小写不敏感

    //studentDF.sort(studentDF.col("name").desc).show()

    //studentDF.sort(studentDF.col("name"),studentDF.col("id")).show()

    //studentDF.sort(studentDF.col("name").asc,studentDF.col("id").desc).show()

    val studentDF2 = rdd.map(_.split("\\|")).map(line => Student(line(0).toInt, line(1), line(2), line(3))).toDF

    //studentDF.join(studentDF2, studentDF.col("id") === studentDF2.col("id")).show()
  }


  case class Student(id: Int, name: String, phone: String, email: String)
}
