package day7

import org.apache.spark.sql.expressions.UserDefinedAggregateFunction
import org.apache.spark.sql.{DataFrame, SparkSession}

object Test1 {
  def main(args: Array[String]): Unit = {
    // 目标：掌握自定义单行函数的创建
    val spark= SparkSession.builder().master("local[*]").appName("cheshi").getOrCreate()

    val df1: DataFrame = spark.read.json("file:///D:\\data\\a.json")

    df1.createTempView("t_person")

    // 自定义单行函数创建
    spark.udf.register("convert_sex",(i:Int) => {
      // 也可以用java中的if else
      if (i == 1) "男" else "女"
    })

    val df2= spark.sql(
      """
        |select name,age,convert_sex(sex) sex from t_person
        |
        |""".stripMargin)

    df2.show()

    spark.stop()
  }
}
