package com.shujia.spark.sql

import org.apache.spark.sql.expressions.UserDefinedFunction
import org.apache.spark.sql.{DataFrame, SparkSession}

object Demo8UDF {
  def main(args: Array[String]): Unit = {
    val spark: SparkSession = SparkSession
      .builder()
      .appName("dsl")
      .master("local")
      .config("spark.sql.shuffle.partitions", 1)
      .getOrCreate()
    import spark.implicits._
    import org.apache.spark.sql.functions._

    val studentDF: DataFrame = spark
      .read
      .format("csv")
      .option("sep", ",")
      .schema("id STRING,name STRING,age INT,sex STRING,clazz STRING")
      .load("data/students.txt")

    //自定义函数，作为DSL使用
    val sub_string: UserDefinedFunction = udf((str: String, pos: Int, len: Int) =>
      str.substring(pos, pos + len)
    )
    studentDF.select(sub_string($"clazz", expr("1"), expr("2"))).show()

    studentDF.createOrReplaceTempView("students")
    //注册为sql使用函数
    spark.udf.register("sub_string", sub_string)
    spark.sql(
      """
        |select sub_string(clazz,1,2) from
        |students
        |""".stripMargin).show()
  }
}
