package com.wu.spark

import org.apache.spark.sql.SparkSession



/**
  * DataFrame的api 操作
  */

object DataFrameCase {
  def main(args: Array[String]): Unit = {

    val spark = SparkSession.builder().appName("DataFrameCase").master("local[2]").getOrCreate()
    val rdd = spark.sparkContext.textFile("C://Users//wudl//Documents//ideaWorkSpaces2018.4.13//SparkSQL//Sources//student.data");

    //注意：需要导入隐式转换
    import spark.implicits._
    val df = rdd.map(_.split("\\|")).map(line => Student(line(0).toInt,line(1),line(2),line(3))).toDF()

    df.show
    df.show();
    df.show(30,false);

    println("------------------------------------------------------")
    df.take(5).length;

    df.count();
    df.head()
    df.first()
   // df.select("name").show();
    println("------------------------------------------------------")

   // df.filter("name = '").show()
    //df.filter("name = '' OR name = 'NULL'").show
    df.filter("name=' ' OR name = 'zhangsan18' ").show

    df.select(df.col("name").as("myname"),df.col("id")).show()

    df.sort(df.col("id").desc).show()
    df.sort(df.col("id").desc,df.col("emil").desc).show()

    // 表关联进行查询

   val  df2 = rdd.map(_.split("\\|")).map(line => Student(line(0).toInt,line(1),line(2),line(3))).toDF()

     df.join(df2,df.col("id") === df2.col("id")).show(30,false)

    spark.stop();

  }
  case class Student(id: Int, name: String, iphone: String, emil: String)

}
