package com.fwmagic.spark.other.core

import org.apache.spark.sql.SparkSession
import org.apache.spark.sql.functions._

object OtherFunction {

  def main(args: Array[String]): Unit = {
    val spark = SparkSession.builder()
      .appName("Other Function")
      .master("local[*]")
      .getOrCreate()

    //将RDD转换为DataFrame的隐式转换

    //读取json文件
    val employee = spark.read.json("/Users/fangwei/learn/mycode/workspace/fwmagic-spark/src/main/resources/employee.json")

    //读取json文件
    val department = spark.read.json("/Users/fangwei/learn/mycode/workspace/fwmagic-spark/src/main/resources/department.json")

    // 日期函数：current_date、current_timestamp
    // 数学函数：round
    // 随机函数：rand
    // 字符串函数：concat、concat_ws
    // 自定义udf和udaf函数

    // http://spark.apache.org/docs/latest/api/scala/index.html#org.apache.spark.sql.functions$

    employee.select(employee("name")
      ,current_date()
      ,current_timestamp()
      ,rand()
      ,round(employee("salary"),3)
      ,concat(employee("name"),employee("depId"))
      ,concat_ws(" | ",employee("name"),employee("depId"))
    ).show()

    spark.stop()
  }

}
