package com.larry.spark.sql

import com.larry.spark.rdd.transform.RDD_Oper_sortByKey_1.User
import org.apache.spark.rdd.RDD
import org.apache.spark.sql.{DataFrame, SparkSession}
import org.apache.spark.{SparkConf, SparkContext}

object Sql_Oper_UDF {

  def main(args: Array[String]): Unit = {
    import org.apache.spark
    //TODO  使用spark coalesce  缩减分区
    //TODO  默认情况下 缩减分区不会shuffle

    val conf = new SparkConf().setMaster("local[*]").setAppName("sql")

    //创建session对象
    val spark: SparkSession = SparkSession.builder().config(conf).getOrCreate()

    import spark.implicits._

    //读取json
    val df = spark.read.json("input/user.json")

    //注册自定义函数
    spark.udf.register("prefix",(x:String) => "name:" + x)

    df.createOrReplaceTempView("user")

    spark.sql("select prefix(username),age from user").show()


    //关闭资源
    spark.stop()

  }
}
//case class User(id:Int,name:String,age:Int)