package com.wu.spark

import org.apache.spark.sql.SparkSession

object  DataFrameApp {

  def main(args: Array[String]): Unit = {

    val spark = SparkSession.builder().appName("DataFrameApp").master("local[2]").getOrCreate();

    val df = spark.read.format("json").load("C://Users//wudl//Documents//ideaWorkSpaces2018.4.13//SparkSQL//src//t.json");
  // val df = spark.read.text("C://Users//wudl//Desktop//t.json");
    df.printSchema();
  //  df.show(30);

    df.select("name").show();
    df.select("name","age").show();
    df.filter(df.col("age")>30).show();
    df.select(df.col("name"),(df.col("age") + 50).as("newage")).show();
    df.groupBy("age").count().show();
    df.select("name","age").groupBy("age").count().show();

    df.orderBy(df.col("age").desc).show();
    spark.close();

  }


}
