package com.imooc.spark

import com.imooc.spark.DataFrameRDDApp.SetLogger
import org.apache.spark.sql.SparkSession

/**
  * Created by zghgchao 2017/12/23 15:05
  * DataFrame中的其他操作
  */
object DataFrameCase {
  def main(args: Array[String]): Unit = {
    SetLogger()
    val spark = SparkSession.builder()
      .master("local[2]")
      .appName("DataFrameRDDApp").getOrCreate()

    //RDD ==> DataFrame
    val studentRDD = spark.sparkContext.textFile("src/data/student.data")

    // 注意：需要导入隐式转换  RDD.toDF ==> DataFrame
    import spark.implicits._
    val studentDF = studentRDD.map(_.split("\\|"))
      .map(line =>
        Student(line(0).toInt, line(1), line(2),line(3))
      ).toDF()

    // 默认展示20条数据，truncate = true（截取列的宽度）
//    studentDF.show()
    studentDF.show(30,false)

    println("--------------take-------------")
    studentDF.take(10).foreach(println)

    println("--------------head-------------")
    println(studentDF.head(3))

    println("--------------first-------------")
    println(studentDF.first())

    println("--------------select-------------")
    studentDF.select("email").show(false)

    println("--------------filter-------------")
    studentDF.filter("name ='' OR name='NULL'").show(false)

    println("---------------name以M开头的人-------------------")
    // substr SQL内置函数 substr(string,start,length)
    studentDF.filter("substr(name,0,1)='M'").show()


    studentDF.sort("name","id").show()

    studentDF.sort(studentDF.col("id").asc,studentDF.col("name").desc).show()

    studentDF.select(studentDF.col("name").as("student_name")).show()

    val studentDF2 = studentRDD.map(_.split("\\|")).map(line => Student(line(0).toInt, line(1), line(2), line(3))).toDF()
    //join 中条件判断要用三个等号 “===”
    studentDF.join(studentDF2, studentDF.col("id") === studentDF2.col("id")).show

    spark.stop()
  }

  case class Student(id: Int, name: String, phone: String, email: String)

}
