package com.shujia.sql

import org.apache.spark.sql.SQLContext
import org.apache.spark.{SparkConf, SparkContext}

object Demo5UDF {
  def main(args: Array[String]): Unit = {
    val conf = new SparkConf().setMaster("local[4]").setAppName("app")
    conf.set("spark.sql.shuffle.partitions", "2")

    val sc = new SparkContext(conf)

    val sqlContext = new SQLContext(sc)

    val studentRDD = sc.textFile("spark/data/students.txt")


    import sqlContext.implicits._

    val studentDF = studentRDD.map(line => {
      val split = line.split(",")
      val id = split(0)
      val name = split(1)
      val age = split(2).toInt
      val gender = split(3)
      val clazz = split(4)

      (id, name, age, gender, clazz)
    }).toDF("id", "name", "age", "gender", "clazz") //指定列名

    studentDF.registerTempTable("student")


    /**
      *
      * udf
      */

    sqlContext.udf.register("stringAdd", (a1: String, a2: String) => a1 + "-" + a2)

    sqlContext.sql("select stringAdd(id,name) from student").show()


    /**
      * udaf
      *
      */
    sqlContext.udf.register("StringCount", new StringCount)


    sqlContext.sql("select clazz ,StringCount(clazz) from student group by clazz").show()


  }
}
