package cn.darksoul3.spark.udf

import org.apache.spark.sql.{Dataset, SparkSession}

object UDF {

  def main(args: Array[String]): Unit = {
    val ss: SparkSession = SparkSession.builder().appName("ReadTsv").master("local[*]").getOrCreate()
    import ss.implicits._
    val address: Dataset[(String, String)] = ss.createDataset(List(("北京市", "朝阳区"), ("天津市", "和平区")))
    val frame = address.toDF("p", "c")
    frame.createTempView("v_address")
    val func = (s: String, p: String, c: String) => {
      p + s + c
    }
    ss.udf.register("location", func)
    ss.sql("select location(',',p,c) loc from v_address").show()
    ss.stop()
  }


}
