package com.fwmagic.spark.other.operation

import org.apache.spark.sql.SparkSession

/**
  * DataFrame:弱类型使用
  */
object UntypedOperation {

  def main(args: Array[String]): Unit = {
    val spark = SparkSession.builder()
      .appName("UntypedOperation")
      .master("local[*]")
      .getOrCreate()

    //将RDD转换为DataFrame的隐式转换
    import org.apache.spark.sql.functions._
    import spark.implicits._
    //读取json文件
    val employee = spark.read.json("/Users/fangwei/learn/mycode/workspace/fwmagic-spark/src/main/resources/employee.json")

    //读取json文件
    val department = spark.read.json("/Users/fangwei/learn/mycode/workspace/fwmagic-spark/src/main/resources/department.json")

    employee
      .where("age > 40" )
      .join(department,$"depId" === $"id")
      .groupBy(employee("name"),employee("gender"))
      .agg(avg(employee("salary")))
      .show()

    println("=========================")

    employee
      .select($"name",$"age",$"depId",$"salary")
      .where("age > 40")
      .show()
  }

}
