package org.example

import org.apache.log4j.{Level, Logger}
import org.apache.spark.sql.{DataFrame, SQLContext}
import org.apache.spark.{SparkConf, SparkContext}


object scala02 {
  def main(args: Array[String]): Unit = {
    println("555555555")

    Logger.getLogger("org.apache.spark").setLevel(Level.ERROR)
    Logger.getLogger("org.eclipse.jetty.server").setLevel(Level.ERROR)

    println("==============初始化==================")
    val conf = new SparkConf().setMaster("local").setAppName("scala02")
    val sc = new SparkContext(conf)
    val sqlContext = new SQLContext(sc)
    val df = sqlContext.read.json("D:\\Scala_project\\20220323\\demo08\\People.json")
    val df_price = sqlContext.read.json("D:\\Scala_project\\20220323\\demo08\\Price.json")
    println("===================引入json数据=====================")
    println("======================查看df数据==========================")
    df.show()
    val df_join: DataFrame = df_price.join(df, df("id") === df_price("errNo"))
    df_join.show()
    println("====================关联之后的结果===================")
    val df_left_join: DataFrame = df_price.join(df, df("id") === df_price("errNo"), "left")
    df_left_join.show()
    println("================joinType的结果============================")

    df.printSchema()
    df.filter(df.col("age").gt(32)).show()
    df.select(df.col("name"), df.col("age").plus(1)).show()
    df.groupBy(df.col("age")).count().show()
    println("================使用lamdar表达式对数据进行遍历==============")
    df.select(df.col("name"), df.col("age"), df.col("id")).foreach(e => println(e))

  }

}
